Created
September 29, 2014 20:23
-
-
Save calvinmetcalf/9c43e985ce9cb000c958 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<!doctype html> | |
<html> | |
<head> | |
<meta charset="utf-8"> | |
</head> | |
<body> | |
<pre style="width: 100%; height: 100%;" id="output"></pre> | |
<script src="pouchdb.js"></script> | |
<script src="pouchdb.memory.js"></script> | |
<script src="performance-bundle.js"></script> | |
</body> | |
</html> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ | |
},{}],2:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
function EventEmitter() { | |
this._events = this._events || {}; | |
this._maxListeners = this._maxListeners || undefined; | |
} | |
module.exports = EventEmitter; | |
// Backwards-compat with node 0.10.x | |
EventEmitter.EventEmitter = EventEmitter; | |
EventEmitter.prototype._events = undefined; | |
EventEmitter.prototype._maxListeners = undefined; | |
// By default EventEmitters will print a warning if more than 10 listeners are | |
// added to it. This is a useful default which helps finding memory leaks. | |
EventEmitter.defaultMaxListeners = 10; | |
// Obviously not all Emitters should be limited to 10. This function allows | |
// that to be increased. Set to zero for unlimited. | |
EventEmitter.prototype.setMaxListeners = function(n) { | |
if (!isNumber(n) || n < 0 || isNaN(n)) | |
throw TypeError('n must be a positive number'); | |
this._maxListeners = n; | |
return this; | |
}; | |
EventEmitter.prototype.emit = function(type) { | |
var er, handler, len, args, i, listeners; | |
if (!this._events) | |
this._events = {}; | |
// If there is no 'error' event listener then throw. | |
if (type === 'error') { | |
if (!this._events.error || | |
(isObject(this._events.error) && !this._events.error.length)) { | |
er = arguments[1]; | |
if (er instanceof Error) { | |
throw er; // Unhandled 'error' event | |
} | |
throw TypeError('Uncaught, unspecified "error" event.'); | |
} | |
} | |
handler = this._events[type]; | |
if (isUndefined(handler)) | |
return false; | |
if (isFunction(handler)) { | |
switch (arguments.length) { | |
// fast cases | |
case 1: | |
handler.call(this); | |
break; | |
case 2: | |
handler.call(this, arguments[1]); | |
break; | |
case 3: | |
handler.call(this, arguments[1], arguments[2]); | |
break; | |
// slower | |
default: | |
len = arguments.length; | |
args = new Array(len - 1); | |
for (i = 1; i < len; i++) | |
args[i - 1] = arguments[i]; | |
handler.apply(this, args); | |
} | |
} else if (isObject(handler)) { | |
len = arguments.length; | |
args = new Array(len - 1); | |
for (i = 1; i < len; i++) | |
args[i - 1] = arguments[i]; | |
listeners = handler.slice(); | |
len = listeners.length; | |
for (i = 0; i < len; i++) | |
listeners[i].apply(this, args); | |
} | |
return true; | |
}; | |
EventEmitter.prototype.addListener = function(type, listener) { | |
var m; | |
if (!isFunction(listener)) | |
throw TypeError('listener must be a function'); | |
if (!this._events) | |
this._events = {}; | |
// To avoid recursion in the case that type === "newListener"! Before | |
// adding it to the listeners, first emit "newListener". | |
if (this._events.newListener) | |
this.emit('newListener', type, | |
isFunction(listener.listener) ? | |
listener.listener : listener); | |
if (!this._events[type]) | |
// Optimize the case of one listener. Don't need the extra array object. | |
this._events[type] = listener; | |
else if (isObject(this._events[type])) | |
// If we've already got an array, just append. | |
this._events[type].push(listener); | |
else | |
// Adding the second element, need to change to array. | |
this._events[type] = [this._events[type], listener]; | |
// Check for listener leak | |
if (isObject(this._events[type]) && !this._events[type].warned) { | |
var m; | |
if (!isUndefined(this._maxListeners)) { | |
m = this._maxListeners; | |
} else { | |
m = EventEmitter.defaultMaxListeners; | |
} | |
if (m && m > 0 && this._events[type].length > m) { | |
this._events[type].warned = true; | |
console.error('(node) warning: possible EventEmitter memory ' + | |
'leak detected. %d listeners added. ' + | |
'Use emitter.setMaxListeners() to increase limit.', | |
this._events[type].length); | |
if (typeof console.trace === 'function') { | |
// not supported in IE 10 | |
console.trace(); | |
} | |
} | |
} | |
return this; | |
}; | |
EventEmitter.prototype.on = EventEmitter.prototype.addListener; | |
EventEmitter.prototype.once = function(type, listener) { | |
if (!isFunction(listener)) | |
throw TypeError('listener must be a function'); | |
var fired = false; | |
function g() { | |
this.removeListener(type, g); | |
if (!fired) { | |
fired = true; | |
listener.apply(this, arguments); | |
} | |
} | |
g.listener = listener; | |
this.on(type, g); | |
return this; | |
}; | |
// emits a 'removeListener' event iff the listener was removed | |
EventEmitter.prototype.removeListener = function(type, listener) { | |
var list, position, length, i; | |
if (!isFunction(listener)) | |
throw TypeError('listener must be a function'); | |
if (!this._events || !this._events[type]) | |
return this; | |
list = this._events[type]; | |
length = list.length; | |
position = -1; | |
if (list === listener || | |
(isFunction(list.listener) && list.listener === listener)) { | |
delete this._events[type]; | |
if (this._events.removeListener) | |
this.emit('removeListener', type, listener); | |
} else if (isObject(list)) { | |
for (i = length; i-- > 0;) { | |
if (list[i] === listener || | |
(list[i].listener && list[i].listener === listener)) { | |
position = i; | |
break; | |
} | |
} | |
if (position < 0) | |
return this; | |
if (list.length === 1) { | |
list.length = 0; | |
delete this._events[type]; | |
} else { | |
list.splice(position, 1); | |
} | |
if (this._events.removeListener) | |
this.emit('removeListener', type, listener); | |
} | |
return this; | |
}; | |
EventEmitter.prototype.removeAllListeners = function(type) { | |
var key, listeners; | |
if (!this._events) | |
return this; | |
// not listening for removeListener, no need to emit | |
if (!this._events.removeListener) { | |
if (arguments.length === 0) | |
this._events = {}; | |
else if (this._events[type]) | |
delete this._events[type]; | |
return this; | |
} | |
// emit removeListener for all listeners on all events | |
if (arguments.length === 0) { | |
for (key in this._events) { | |
if (key === 'removeListener') continue; | |
this.removeAllListeners(key); | |
} | |
this.removeAllListeners('removeListener'); | |
this._events = {}; | |
return this; | |
} | |
listeners = this._events[type]; | |
if (isFunction(listeners)) { | |
this.removeListener(type, listeners); | |
} else { | |
// LIFO order | |
while (listeners.length) | |
this.removeListener(type, listeners[listeners.length - 1]); | |
} | |
delete this._events[type]; | |
return this; | |
}; | |
EventEmitter.prototype.listeners = function(type) { | |
var ret; | |
if (!this._events || !this._events[type]) | |
ret = []; | |
else if (isFunction(this._events[type])) | |
ret = [this._events[type]]; | |
else | |
ret = this._events[type].slice(); | |
return ret; | |
}; | |
EventEmitter.listenerCount = function(emitter, type) { | |
var ret; | |
if (!emitter._events || !emitter._events[type]) | |
ret = 0; | |
else if (isFunction(emitter._events[type])) | |
ret = 1; | |
else | |
ret = emitter._events[type].length; | |
return ret; | |
}; | |
function isFunction(arg) { | |
return typeof arg === 'function'; | |
} | |
function isNumber(arg) { | |
return typeof arg === 'number'; | |
} | |
function isObject(arg) { | |
return typeof arg === 'object' && arg !== null; | |
} | |
function isUndefined(arg) { | |
return arg === void 0; | |
} | |
},{}],3:[function(require,module,exports){ | |
// shim for using process in browser | |
var process = module.exports = {}; | |
process.nextTick = (function () { | |
var canSetImmediate = typeof window !== 'undefined' | |
&& window.setImmediate; | |
var canPost = typeof window !== 'undefined' | |
&& window.postMessage && window.addEventListener | |
; | |
if (canSetImmediate) { | |
return function (f) { return window.setImmediate(f) }; | |
} | |
if (canPost) { | |
var queue = []; | |
window.addEventListener('message', function (ev) { | |
var source = ev.source; | |
if ((source === window || source === null) && ev.data === 'process-tick') { | |
ev.stopPropagation(); | |
if (queue.length > 0) { | |
var fn = queue.shift(); | |
fn(); | |
} | |
} | |
}, true); | |
return function nextTick(fn) { | |
queue.push(fn); | |
window.postMessage('process-tick', '*'); | |
}; | |
} | |
return function nextTick(fn) { | |
setTimeout(fn, 0); | |
}; | |
})(); | |
process.title = 'browser'; | |
process.browser = true; | |
process.env = {}; | |
process.argv = []; | |
process.binding = function (name) { | |
throw new Error('process.binding is not supported'); | |
} | |
// TODO(shtylman) | |
process.cwd = function () { return '/' }; | |
process.chdir = function (dir) { | |
throw new Error('process.chdir is not supported'); | |
}; | |
},{}],4:[function(require,module,exports){ | |
var base64 = require('base64-js') | |
var ieee754 = require('ieee754') | |
exports.Buffer = Buffer | |
exports.SlowBuffer = Buffer | |
exports.INSPECT_MAX_BYTES = 50 | |
Buffer.poolSize = 8192 | |
/** | |
* If `Buffer._useTypedArrays`: | |
* === true Use Uint8Array implementation (fastest) | |
* === false Use Object implementation (compatible down to IE6) | |
*/ | |
Buffer._useTypedArrays = (function () { | |
// Detect if browser supports Typed Arrays. Supported browsers are IE 10+, | |
// Firefox 4+, Chrome 7+, Safari 5.1+, Opera 11.6+, iOS 4.2+. | |
if (typeof Uint8Array === 'undefined' || typeof ArrayBuffer === 'undefined') | |
return false | |
// Does the browser support adding properties to `Uint8Array` instances? If | |
// not, then that's the same as no `Uint8Array` support. We need to be able to | |
// add all the node Buffer API methods. | |
// Relevant Firefox bug: https://bugzilla.mozilla.org/show_bug.cgi?id=695438 | |
try { | |
var arr = new Uint8Array(0) | |
arr.foo = function () { return 42 } | |
return 42 === arr.foo() && | |
typeof arr.subarray === 'function' // Chrome 9-10 lack `subarray` | |
} catch (e) { | |
return false | |
} | |
})() | |
/** | |
* Class: Buffer | |
* ============= | |
* | |
* The Buffer constructor returns instances of `Uint8Array` that are augmented | |
* with function properties for all the node `Buffer` API functions. We use | |
* `Uint8Array` so that square bracket notation works as expected -- it returns | |
* a single octet. | |
* | |
* By augmenting the instances, we can avoid modifying the `Uint8Array` | |
* prototype. | |
*/ | |
function Buffer (subject, encoding, noZero) { | |
if (!(this instanceof Buffer)) | |
return new Buffer(subject, encoding, noZero) | |
var type = typeof subject | |
// Workaround: node's base64 implementation allows for non-padded strings | |
// while base64-js does not. | |
if (encoding === 'base64' && type === 'string') { | |
subject = stringtrim(subject) | |
while (subject.length % 4 !== 0) { | |
subject = subject + '=' | |
} | |
} | |
// Find the length | |
var length | |
if (type === 'number') | |
length = coerce(subject) | |
else if (type === 'string') | |
length = Buffer.byteLength(subject, encoding) | |
else if (type === 'object') | |
length = coerce(subject.length) // Assume object is an array | |
else | |
throw new Error('First argument needs to be a number, array or string.') | |
var buf | |
if (Buffer._useTypedArrays) { | |
// Preferred: Return an augmented `Uint8Array` instance for best performance | |
buf = augment(new Uint8Array(length)) | |
} else { | |
// Fallback: Return THIS instance of Buffer (created by `new`) | |
buf = this | |
buf.length = length | |
buf._isBuffer = true | |
} | |
var i | |
if (Buffer._useTypedArrays && typeof Uint8Array === 'function' && | |
subject instanceof Uint8Array) { | |
// Speed optimization -- use set if we're copying from a Uint8Array | |
buf._set(subject) | |
} else if (isArrayish(subject)) { | |
// Treat array-ish objects as a byte array | |
for (i = 0; i < length; i++) { | |
if (Buffer.isBuffer(subject)) | |
buf[i] = subject.readUInt8(i) | |
else | |
buf[i] = subject[i] | |
} | |
} else if (type === 'string') { | |
buf.write(subject, 0, encoding) | |
} else if (type === 'number' && !Buffer._useTypedArrays && !noZero) { | |
for (i = 0; i < length; i++) { | |
buf[i] = 0 | |
} | |
} | |
return buf | |
} | |
// STATIC METHODS | |
// ============== | |
Buffer.isEncoding = function (encoding) { | |
switch (String(encoding).toLowerCase()) { | |
case 'hex': | |
case 'utf8': | |
case 'utf-8': | |
case 'ascii': | |
case 'binary': | |
case 'base64': | |
case 'raw': | |
case 'ucs2': | |
case 'ucs-2': | |
case 'utf16le': | |
case 'utf-16le': | |
return true | |
default: | |
return false | |
} | |
} | |
Buffer.isBuffer = function (b) { | |
return !!(b !== null && b !== undefined && b._isBuffer) | |
} | |
Buffer.byteLength = function (str, encoding) { | |
var ret | |
str = str + '' | |
switch (encoding || 'utf8') { | |
case 'hex': | |
ret = str.length / 2 | |
break | |
case 'utf8': | |
case 'utf-8': | |
ret = utf8ToBytes(str).length | |
break | |
case 'ascii': | |
case 'binary': | |
case 'raw': | |
ret = str.length | |
break | |
case 'base64': | |
ret = base64ToBytes(str).length | |
break | |
case 'ucs2': | |
case 'ucs-2': | |
case 'utf16le': | |
case 'utf-16le': | |
ret = str.length * 2 | |
break | |
default: | |
throw new Error('Unknown encoding') | |
} | |
return ret | |
} | |
Buffer.concat = function (list, totalLength) { | |
assert(isArray(list), 'Usage: Buffer.concat(list, [totalLength])\n' + | |
'list should be an Array.') | |
if (list.length === 0) { | |
return new Buffer(0) | |
} else if (list.length === 1) { | |
return list[0] | |
} | |
var i | |
if (typeof totalLength !== 'number') { | |
totalLength = 0 | |
for (i = 0; i < list.length; i++) { | |
totalLength += list[i].length | |
} | |
} | |
var buf = new Buffer(totalLength) | |
var pos = 0 | |
for (i = 0; i < list.length; i++) { | |
var item = list[i] | |
item.copy(buf, pos) | |
pos += item.length | |
} | |
return buf | |
} | |
// BUFFER INSTANCE METHODS | |
// ======================= | |
function _hexWrite (buf, string, offset, length) { | |
offset = Number(offset) || 0 | |
var remaining = buf.length - offset | |
if (!length) { | |
length = remaining | |
} else { | |
length = Number(length) | |
if (length > remaining) { | |
length = remaining | |
} | |
} | |
// must be an even number of digits | |
var strLen = string.length | |
assert(strLen % 2 === 0, 'Invalid hex string') | |
if (length > strLen / 2) { | |
length = strLen / 2 | |
} | |
for (var i = 0; i < length; i++) { | |
var byte = parseInt(string.substr(i * 2, 2), 16) | |
assert(!isNaN(byte), 'Invalid hex string') | |
buf[offset + i] = byte | |
} | |
Buffer._charsWritten = i * 2 | |
return i | |
} | |
function _utf8Write (buf, string, offset, length) { | |
var charsWritten = Buffer._charsWritten = | |
blitBuffer(utf8ToBytes(string), buf, offset, length) | |
return charsWritten | |
} | |
function _asciiWrite (buf, string, offset, length) { | |
var charsWritten = Buffer._charsWritten = | |
blitBuffer(asciiToBytes(string), buf, offset, length) | |
return charsWritten | |
} | |
function _binaryWrite (buf, string, offset, length) { | |
return _asciiWrite(buf, string, offset, length) | |
} | |
function _base64Write (buf, string, offset, length) { | |
var charsWritten = Buffer._charsWritten = | |
blitBuffer(base64ToBytes(string), buf, offset, length) | |
return charsWritten | |
} | |
Buffer.prototype.write = function (string, offset, length, encoding) { | |
// Support both (string, offset, length, encoding) | |
// and the legacy (string, encoding, offset, length) | |
if (isFinite(offset)) { | |
if (!isFinite(length)) { | |
encoding = length | |
length = undefined | |
} | |
} else { // legacy | |
var swap = encoding | |
encoding = offset | |
offset = length | |
length = swap | |
} | |
offset = Number(offset) || 0 | |
var remaining = this.length - offset | |
if (!length) { | |
length = remaining | |
} else { | |
length = Number(length) | |
if (length > remaining) { | |
length = remaining | |
} | |
} | |
encoding = String(encoding || 'utf8').toLowerCase() | |
switch (encoding) { | |
case 'hex': | |
return _hexWrite(this, string, offset, length) | |
case 'utf8': | |
case 'utf-8': | |
case 'ucs2': // TODO: No support for ucs2 or utf16le encodings yet | |
case 'ucs-2': | |
case 'utf16le': | |
case 'utf-16le': | |
return _utf8Write(this, string, offset, length) | |
case 'ascii': | |
return _asciiWrite(this, string, offset, length) | |
case 'binary': | |
return _binaryWrite(this, string, offset, length) | |
case 'base64': | |
return _base64Write(this, string, offset, length) | |
default: | |
throw new Error('Unknown encoding') | |
} | |
} | |
Buffer.prototype.toString = function (encoding, start, end) { | |
var self = this | |
encoding = String(encoding || 'utf8').toLowerCase() | |
start = Number(start) || 0 | |
end = (end !== undefined) | |
? Number(end) | |
: end = self.length | |
// Fastpath empty strings | |
if (end === start) | |
return '' | |
switch (encoding) { | |
case 'hex': | |
return _hexSlice(self, start, end) | |
case 'utf8': | |
case 'utf-8': | |
case 'ucs2': // TODO: No support for ucs2 or utf16le encodings yet | |
case 'ucs-2': | |
case 'utf16le': | |
case 'utf-16le': | |
return _utf8Slice(self, start, end) | |
case 'ascii': | |
return _asciiSlice(self, start, end) | |
case 'binary': | |
return _binarySlice(self, start, end) | |
case 'base64': | |
return _base64Slice(self, start, end) | |
default: | |
throw new Error('Unknown encoding') | |
} | |
} | |
Buffer.prototype.toJSON = function () { | |
return { | |
type: 'Buffer', | |
data: Array.prototype.slice.call(this._arr || this, 0) | |
} | |
} | |
// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) | |
Buffer.prototype.copy = function (target, target_start, start, end) { | |
var source = this | |
if (!start) start = 0 | |
if (!end && end !== 0) end = this.length | |
if (!target_start) target_start = 0 | |
// Copy 0 bytes; we're done | |
if (end === start) return | |
if (target.length === 0 || source.length === 0) return | |
// Fatal error conditions | |
assert(end >= start, 'sourceEnd < sourceStart') | |
assert(target_start >= 0 && target_start < target.length, | |
'targetStart out of bounds') | |
assert(start >= 0 && start < source.length, 'sourceStart out of bounds') | |
assert(end >= 0 && end <= source.length, 'sourceEnd out of bounds') | |
// Are we oob? | |
if (end > this.length) | |
end = this.length | |
if (target.length - target_start < end - start) | |
end = target.length - target_start + start | |
// copy! | |
for (var i = 0; i < end - start; i++) | |
target[i + target_start] = this[i + start] | |
} | |
function _base64Slice (buf, start, end) { | |
if (start === 0 && end === buf.length) { | |
return base64.fromByteArray(buf) | |
} else { | |
return base64.fromByteArray(buf.slice(start, end)) | |
} | |
} | |
function _utf8Slice (buf, start, end) { | |
var res = '' | |
var tmp = '' | |
end = Math.min(buf.length, end) | |
for (var i = start; i < end; i++) { | |
if (buf[i] <= 0x7F) { | |
res += decodeUtf8Char(tmp) + String.fromCharCode(buf[i]) | |
tmp = '' | |
} else { | |
tmp += '%' + buf[i].toString(16) | |
} | |
} | |
return res + decodeUtf8Char(tmp) | |
} | |
function _asciiSlice (buf, start, end) { | |
var ret = '' | |
end = Math.min(buf.length, end) | |
for (var i = start; i < end; i++) | |
ret += String.fromCharCode(buf[i]) | |
return ret | |
} | |
function _binarySlice (buf, start, end) { | |
return _asciiSlice(buf, start, end) | |
} | |
function _hexSlice (buf, start, end) { | |
var len = buf.length | |
if (!start || start < 0) start = 0 | |
if (!end || end < 0 || end > len) end = len | |
var out = '' | |
for (var i = start; i < end; i++) { | |
out += toHex(buf[i]) | |
} | |
return out | |
} | |
// http://nodejs.org/api/buffer.html#buffer_buf_slice_start_end | |
Buffer.prototype.slice = function (start, end) { | |
var len = this.length | |
start = clamp(start, len, 0) | |
end = clamp(end, len, len) | |
if (Buffer._useTypedArrays) { | |
return augment(this.subarray(start, end)) | |
} else { | |
var sliceLen = end - start | |
var newBuf = new Buffer(sliceLen, undefined, true) | |
for (var i = 0; i < sliceLen; i++) { | |
newBuf[i] = this[i + start] | |
} | |
return newBuf | |
} | |
} | |
// `get` will be removed in Node 0.13+ | |
Buffer.prototype.get = function (offset) { | |
console.log('.get() is deprecated. Access using array indexes instead.') | |
return this.readUInt8(offset) | |
} | |
// `set` will be removed in Node 0.13+ | |
Buffer.prototype.set = function (v, offset) { | |
console.log('.set() is deprecated. Access using array indexes instead.') | |
return this.writeUInt8(v, offset) | |
} | |
Buffer.prototype.readUInt8 = function (offset, noAssert) { | |
if (!noAssert) { | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset < this.length, 'Trying to read beyond buffer length') | |
} | |
if (offset >= this.length) | |
return | |
return this[offset] | |
} | |
function _readUInt16 (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 1 < buf.length, 'Trying to read beyond buffer length') | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
var val | |
if (littleEndian) { | |
val = buf[offset] | |
if (offset + 1 < len) | |
val |= buf[offset + 1] << 8 | |
} else { | |
val = buf[offset] << 8 | |
if (offset + 1 < len) | |
val |= buf[offset + 1] | |
} | |
return val | |
} | |
Buffer.prototype.readUInt16LE = function (offset, noAssert) { | |
return _readUInt16(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readUInt16BE = function (offset, noAssert) { | |
return _readUInt16(this, offset, false, noAssert) | |
} | |
function _readUInt32 (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'Trying to read beyond buffer length') | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
var val | |
if (littleEndian) { | |
if (offset + 2 < len) | |
val = buf[offset + 2] << 16 | |
if (offset + 1 < len) | |
val |= buf[offset + 1] << 8 | |
val |= buf[offset] | |
if (offset + 3 < len) | |
val = val + (buf[offset + 3] << 24 >>> 0) | |
} else { | |
if (offset + 1 < len) | |
val = buf[offset + 1] << 16 | |
if (offset + 2 < len) | |
val |= buf[offset + 2] << 8 | |
if (offset + 3 < len) | |
val |= buf[offset + 3] | |
val = val + (buf[offset] << 24 >>> 0) | |
} | |
return val | |
} | |
Buffer.prototype.readUInt32LE = function (offset, noAssert) { | |
return _readUInt32(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readUInt32BE = function (offset, noAssert) { | |
return _readUInt32(this, offset, false, noAssert) | |
} | |
Buffer.prototype.readInt8 = function (offset, noAssert) { | |
if (!noAssert) { | |
assert(offset !== undefined && offset !== null, | |
'missing offset') | |
assert(offset < this.length, 'Trying to read beyond buffer length') | |
} | |
if (offset >= this.length) | |
return | |
var neg = this[offset] & 0x80 | |
if (neg) | |
return (0xff - this[offset] + 1) * -1 | |
else | |
return this[offset] | |
} | |
function _readInt16 (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 1 < buf.length, 'Trying to read beyond buffer length') | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
var val = _readUInt16(buf, offset, littleEndian, true) | |
var neg = val & 0x8000 | |
if (neg) | |
return (0xffff - val + 1) * -1 | |
else | |
return val | |
} | |
Buffer.prototype.readInt16LE = function (offset, noAssert) { | |
return _readInt16(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readInt16BE = function (offset, noAssert) { | |
return _readInt16(this, offset, false, noAssert) | |
} | |
function _readInt32 (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'Trying to read beyond buffer length') | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
var val = _readUInt32(buf, offset, littleEndian, true) | |
var neg = val & 0x80000000 | |
if (neg) | |
return (0xffffffff - val + 1) * -1 | |
else | |
return val | |
} | |
Buffer.prototype.readInt32LE = function (offset, noAssert) { | |
return _readInt32(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readInt32BE = function (offset, noAssert) { | |
return _readInt32(this, offset, false, noAssert) | |
} | |
function _readFloat (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset + 3 < buf.length, 'Trying to read beyond buffer length') | |
} | |
return ieee754.read(buf, offset, littleEndian, 23, 4) | |
} | |
Buffer.prototype.readFloatLE = function (offset, noAssert) { | |
return _readFloat(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readFloatBE = function (offset, noAssert) { | |
return _readFloat(this, offset, false, noAssert) | |
} | |
function _readDouble (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset + 7 < buf.length, 'Trying to read beyond buffer length') | |
} | |
return ieee754.read(buf, offset, littleEndian, 52, 8) | |
} | |
Buffer.prototype.readDoubleLE = function (offset, noAssert) { | |
return _readDouble(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readDoubleBE = function (offset, noAssert) { | |
return _readDouble(this, offset, false, noAssert) | |
} | |
Buffer.prototype.writeUInt8 = function (value, offset, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset < this.length, 'trying to write beyond buffer length') | |
verifuint(value, 0xff) | |
} | |
if (offset >= this.length) return | |
this[offset] = value | |
} | |
function _writeUInt16 (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 1 < buf.length, 'trying to write beyond buffer length') | |
verifuint(value, 0xffff) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
for (var i = 0, j = Math.min(len - offset, 2); i < j; i++) { | |
buf[offset + i] = | |
(value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>> | |
(littleEndian ? i : 1 - i) * 8 | |
} | |
} | |
Buffer.prototype.writeUInt16LE = function (value, offset, noAssert) { | |
_writeUInt16(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeUInt16BE = function (value, offset, noAssert) { | |
_writeUInt16(this, value, offset, false, noAssert) | |
} | |
function _writeUInt32 (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'trying to write beyond buffer length') | |
verifuint(value, 0xffffffff) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
for (var i = 0, j = Math.min(len - offset, 4); i < j; i++) { | |
buf[offset + i] = | |
(value >>> (littleEndian ? i : 3 - i) * 8) & 0xff | |
} | |
} | |
Buffer.prototype.writeUInt32LE = function (value, offset, noAssert) { | |
_writeUInt32(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeUInt32BE = function (value, offset, noAssert) { | |
_writeUInt32(this, value, offset, false, noAssert) | |
} | |
Buffer.prototype.writeInt8 = function (value, offset, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset < this.length, 'Trying to write beyond buffer length') | |
verifsint(value, 0x7f, -0x80) | |
} | |
if (offset >= this.length) | |
return | |
if (value >= 0) | |
this.writeUInt8(value, offset, noAssert) | |
else | |
this.writeUInt8(0xff + value + 1, offset, noAssert) | |
} | |
function _writeInt16 (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 1 < buf.length, 'Trying to write beyond buffer length') | |
verifsint(value, 0x7fff, -0x8000) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
if (value >= 0) | |
_writeUInt16(buf, value, offset, littleEndian, noAssert) | |
else | |
_writeUInt16(buf, 0xffff + value + 1, offset, littleEndian, noAssert) | |
} | |
Buffer.prototype.writeInt16LE = function (value, offset, noAssert) { | |
_writeInt16(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeInt16BE = function (value, offset, noAssert) { | |
_writeInt16(this, value, offset, false, noAssert) | |
} | |
function _writeInt32 (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'Trying to write beyond buffer length') | |
verifsint(value, 0x7fffffff, -0x80000000) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
if (value >= 0) | |
_writeUInt32(buf, value, offset, littleEndian, noAssert) | |
else | |
_writeUInt32(buf, 0xffffffff + value + 1, offset, littleEndian, noAssert) | |
} | |
Buffer.prototype.writeInt32LE = function (value, offset, noAssert) { | |
_writeInt32(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeInt32BE = function (value, offset, noAssert) { | |
_writeInt32(this, value, offset, false, noAssert) | |
} | |
function _writeFloat (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'Trying to write beyond buffer length') | |
verifIEEE754(value, 3.4028234663852886e+38, -3.4028234663852886e+38) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
ieee754.write(buf, value, offset, littleEndian, 23, 4) | |
} | |
Buffer.prototype.writeFloatLE = function (value, offset, noAssert) { | |
_writeFloat(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeFloatBE = function (value, offset, noAssert) { | |
_writeFloat(this, value, offset, false, noAssert) | |
} | |
function _writeDouble (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 7 < buf.length, | |
'Trying to write beyond buffer length') | |
verifIEEE754(value, 1.7976931348623157E+308, -1.7976931348623157E+308) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
ieee754.write(buf, value, offset, littleEndian, 52, 8) | |
} | |
Buffer.prototype.writeDoubleLE = function (value, offset, noAssert) { | |
_writeDouble(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeDoubleBE = function (value, offset, noAssert) { | |
_writeDouble(this, value, offset, false, noAssert) | |
} | |
// fill(value, start=0, end=buffer.length) | |
Buffer.prototype.fill = function (value, start, end) { | |
if (!value) value = 0 | |
if (!start) start = 0 | |
if (!end) end = this.length | |
if (typeof value === 'string') { | |
value = value.charCodeAt(0) | |
} | |
assert(typeof value === 'number' && !isNaN(value), 'value is not a number') | |
assert(end >= start, 'end < start') | |
// Fill 0 bytes; we're done | |
if (end === start) return | |
if (this.length === 0) return | |
assert(start >= 0 && start < this.length, 'start out of bounds') | |
assert(end >= 0 && end <= this.length, 'end out of bounds') | |
for (var i = start; i < end; i++) { | |
this[i] = value | |
} | |
} | |
Buffer.prototype.inspect = function () { | |
var out = [] | |
var len = this.length | |
for (var i = 0; i < len; i++) { | |
out[i] = toHex(this[i]) | |
if (i === exports.INSPECT_MAX_BYTES) { | |
out[i + 1] = '...' | |
break | |
} | |
} | |
return '<Buffer ' + out.join(' ') + '>' | |
} | |
/** | |
* Creates a new `ArrayBuffer` with the *copied* memory of the buffer instance. | |
* Added in Node 0.12. Only available in browsers that support ArrayBuffer. | |
*/ | |
Buffer.prototype.toArrayBuffer = function () { | |
if (typeof Uint8Array === 'function') { | |
if (Buffer._useTypedArrays) { | |
return (new Buffer(this)).buffer | |
} else { | |
var buf = new Uint8Array(this.length) | |
for (var i = 0, len = buf.length; i < len; i += 1) | |
buf[i] = this[i] | |
return buf.buffer | |
} | |
} else { | |
throw new Error('Buffer.toArrayBuffer not supported in this browser') | |
} | |
} | |
// HELPER FUNCTIONS | |
// ================ | |
function stringtrim (str) { | |
if (str.trim) return str.trim() | |
return str.replace(/^\s+|\s+$/g, '') | |
} | |
var BP = Buffer.prototype | |
/** | |
* Augment the Uint8Array *instance* (not the class!) with Buffer methods | |
*/ | |
function augment (arr) { | |
arr._isBuffer = true | |
// save reference to original Uint8Array get/set methods before overwriting | |
arr._get = arr.get | |
arr._set = arr.set | |
// deprecated, will be removed in node 0.13+ | |
arr.get = BP.get | |
arr.set = BP.set | |
arr.write = BP.write | |
arr.toString = BP.toString | |
arr.toLocaleString = BP.toString | |
arr.toJSON = BP.toJSON | |
arr.copy = BP.copy | |
arr.slice = BP.slice | |
arr.readUInt8 = BP.readUInt8 | |
arr.readUInt16LE = BP.readUInt16LE | |
arr.readUInt16BE = BP.readUInt16BE | |
arr.readUInt32LE = BP.readUInt32LE | |
arr.readUInt32BE = BP.readUInt32BE | |
arr.readInt8 = BP.readInt8 | |
arr.readInt16LE = BP.readInt16LE | |
arr.readInt16BE = BP.readInt16BE | |
arr.readInt32LE = BP.readInt32LE | |
arr.readInt32BE = BP.readInt32BE | |
arr.readFloatLE = BP.readFloatLE | |
arr.readFloatBE = BP.readFloatBE | |
arr.readDoubleLE = BP.readDoubleLE | |
arr.readDoubleBE = BP.readDoubleBE | |
arr.writeUInt8 = BP.writeUInt8 | |
arr.writeUInt16LE = BP.writeUInt16LE | |
arr.writeUInt16BE = BP.writeUInt16BE | |
arr.writeUInt32LE = BP.writeUInt32LE | |
arr.writeUInt32BE = BP.writeUInt32BE | |
arr.writeInt8 = BP.writeInt8 | |
arr.writeInt16LE = BP.writeInt16LE | |
arr.writeInt16BE = BP.writeInt16BE | |
arr.writeInt32LE = BP.writeInt32LE | |
arr.writeInt32BE = BP.writeInt32BE | |
arr.writeFloatLE = BP.writeFloatLE | |
arr.writeFloatBE = BP.writeFloatBE | |
arr.writeDoubleLE = BP.writeDoubleLE | |
arr.writeDoubleBE = BP.writeDoubleBE | |
arr.fill = BP.fill | |
arr.inspect = BP.inspect | |
arr.toArrayBuffer = BP.toArrayBuffer | |
return arr | |
} | |
// slice(start, end) | |
function clamp (index, len, defaultValue) { | |
if (typeof index !== 'number') return defaultValue | |
index = ~~index; // Coerce to integer. | |
if (index >= len) return len | |
if (index >= 0) return index | |
index += len | |
if (index >= 0) return index | |
return 0 | |
} | |
function coerce (length) { | |
// Coerce length to a number (possibly NaN), round up | |
// in case it's fractional (e.g. 123.456) then do a | |
// double negate to coerce a NaN to 0. Easy, right? | |
length = ~~Math.ceil(+length) | |
return length < 0 ? 0 : length | |
} | |
function isArray (subject) { | |
return (Array.isArray || function (subject) { | |
return Object.prototype.toString.call(subject) === '[object Array]' | |
})(subject) | |
} | |
function isArrayish (subject) { | |
return isArray(subject) || Buffer.isBuffer(subject) || | |
subject && typeof subject === 'object' && | |
typeof subject.length === 'number' | |
} | |
function toHex (n) { | |
if (n < 16) return '0' + n.toString(16) | |
return n.toString(16) | |
} | |
function utf8ToBytes (str) { | |
var byteArray = [] | |
for (var i = 0; i < str.length; i++) { | |
var b = str.charCodeAt(i) | |
if (b <= 0x7F) | |
byteArray.push(str.charCodeAt(i)) | |
else { | |
var start = i | |
if (b >= 0xD800 && b <= 0xDFFF) i++ | |
var h = encodeURIComponent(str.slice(start, i+1)).substr(1).split('%') | |
for (var j = 0; j < h.length; j++) | |
byteArray.push(parseInt(h[j], 16)) | |
} | |
} | |
return byteArray | |
} | |
function asciiToBytes (str) { | |
var byteArray = [] | |
for (var i = 0; i < str.length; i++) { | |
// Node's code seems to be doing this and not & 0x7F.. | |
byteArray.push(str.charCodeAt(i) & 0xFF) | |
} | |
return byteArray | |
} | |
function base64ToBytes (str) { | |
return base64.toByteArray(str) | |
} | |
function blitBuffer (src, dst, offset, length) { | |
var pos | |
for (var i = 0; i < length; i++) { | |
if ((i + offset >= dst.length) || (i >= src.length)) | |
break | |
dst[i + offset] = src[i] | |
} | |
return i | |
} | |
function decodeUtf8Char (str) { | |
try { | |
return decodeURIComponent(str) | |
} catch (err) { | |
return String.fromCharCode(0xFFFD) // UTF 8 invalid char | |
} | |
} | |
/* | |
* We have to make sure that the value is a valid integer. This means that it | |
* is non-negative. It has no fractional component and that it does not | |
* exceed the maximum allowed value. | |
*/ | |
function verifuint (value, max) { | |
assert(typeof value == 'number', 'cannot write a non-number as a number') | |
assert(value >= 0, | |
'specified a negative value for writing an unsigned value') | |
assert(value <= max, 'value is larger than maximum value for type') | |
assert(Math.floor(value) === value, 'value has a fractional component') | |
} | |
function verifsint(value, max, min) { | |
assert(typeof value == 'number', 'cannot write a non-number as a number') | |
assert(value <= max, 'value larger than maximum allowed value') | |
assert(value >= min, 'value smaller than minimum allowed value') | |
assert(Math.floor(value) === value, 'value has a fractional component') | |
} | |
function verifIEEE754(value, max, min) { | |
assert(typeof value == 'number', 'cannot write a non-number as a number') | |
assert(value <= max, 'value larger than maximum allowed value') | |
assert(value >= min, 'value smaller than minimum allowed value') | |
} | |
function assert (test, message) { | |
if (!test) throw new Error(message || 'Failed assertion') | |
} | |
},{"base64-js":5,"ieee754":6}],5:[function(require,module,exports){ | |
var lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; | |
;(function (exports) { | |
'use strict'; | |
var Arr = (typeof Uint8Array !== 'undefined') | |
? Uint8Array | |
: Array | |
var PLUS = '+'.charCodeAt(0) | |
var SLASH = '/'.charCodeAt(0) | |
var NUMBER = '0'.charCodeAt(0) | |
var LOWER = 'a'.charCodeAt(0) | |
var UPPER = 'A'.charCodeAt(0) | |
function decode (elt) { | |
var code = elt.charCodeAt(0) | |
if (code === PLUS) | |
return 62 // '+' | |
if (code === SLASH) | |
return 63 // '/' | |
if (code < NUMBER) | |
return -1 //no match | |
if (code < NUMBER + 10) | |
return code - NUMBER + 26 + 26 | |
if (code < UPPER + 26) | |
return code - UPPER | |
if (code < LOWER + 26) | |
return code - LOWER + 26 | |
} | |
function b64ToByteArray (b64) { | |
var i, j, l, tmp, placeHolders, arr | |
if (b64.length % 4 > 0) { | |
throw new Error('Invalid string. Length must be a multiple of 4') | |
} | |
// the number of equal signs (place holders) | |
// if there are two placeholders, than the two characters before it | |
// represent one byte | |
// if there is only one, then the three characters before it represent 2 bytes | |
// this is just a cheap hack to not do indexOf twice | |
var len = b64.length | |
placeHolders = '=' === b64.charAt(len - 2) ? 2 : '=' === b64.charAt(len - 1) ? 1 : 0 | |
// base64 is 4/3 + up to two characters of the original data | |
arr = new Arr(b64.length * 3 / 4 - placeHolders) | |
// if there are placeholders, only get up to the last complete 4 chars | |
l = placeHolders > 0 ? b64.length - 4 : b64.length | |
var L = 0 | |
function push (v) { | |
arr[L++] = v | |
} | |
for (i = 0, j = 0; i < l; i += 4, j += 3) { | |
tmp = (decode(b64.charAt(i)) << 18) | (decode(b64.charAt(i + 1)) << 12) | (decode(b64.charAt(i + 2)) << 6) | decode(b64.charAt(i + 3)) | |
push((tmp & 0xFF0000) >> 16) | |
push((tmp & 0xFF00) >> 8) | |
push(tmp & 0xFF) | |
} | |
if (placeHolders === 2) { | |
tmp = (decode(b64.charAt(i)) << 2) | (decode(b64.charAt(i + 1)) >> 4) | |
push(tmp & 0xFF) | |
} else if (placeHolders === 1) { | |
tmp = (decode(b64.charAt(i)) << 10) | (decode(b64.charAt(i + 1)) << 4) | (decode(b64.charAt(i + 2)) >> 2) | |
push((tmp >> 8) & 0xFF) | |
push(tmp & 0xFF) | |
} | |
return arr | |
} | |
function uint8ToBase64 (uint8) { | |
var i, | |
extraBytes = uint8.length % 3, // if we have 1 byte left, pad 2 bytes | |
output = "", | |
temp, length | |
function encode (num) { | |
return lookup.charAt(num) | |
} | |
function tripletToBase64 (num) { | |
return encode(num >> 18 & 0x3F) + encode(num >> 12 & 0x3F) + encode(num >> 6 & 0x3F) + encode(num & 0x3F) | |
} | |
// go through the array every three bytes, we'll deal with trailing stuff later | |
for (i = 0, length = uint8.length - extraBytes; i < length; i += 3) { | |
temp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2]) | |
output += tripletToBase64(temp) | |
} | |
// pad the end with zeros, but make sure to not forget the extra bytes | |
switch (extraBytes) { | |
case 1: | |
temp = uint8[uint8.length - 1] | |
output += encode(temp >> 2) | |
output += encode((temp << 4) & 0x3F) | |
output += '==' | |
break | |
case 2: | |
temp = (uint8[uint8.length - 2] << 8) + (uint8[uint8.length - 1]) | |
output += encode(temp >> 10) | |
output += encode((temp >> 4) & 0x3F) | |
output += encode((temp << 2) & 0x3F) | |
output += '=' | |
break | |
} | |
return output | |
} | |
exports.toByteArray = b64ToByteArray | |
exports.fromByteArray = uint8ToBase64 | |
}(typeof exports === 'undefined' ? (this.base64js = {}) : exports)) | |
},{}],6:[function(require,module,exports){ | |
exports.read = function(buffer, offset, isLE, mLen, nBytes) { | |
var e, m, | |
eLen = nBytes * 8 - mLen - 1, | |
eMax = (1 << eLen) - 1, | |
eBias = eMax >> 1, | |
nBits = -7, | |
i = isLE ? (nBytes - 1) : 0, | |
d = isLE ? -1 : 1, | |
s = buffer[offset + i]; | |
i += d; | |
e = s & ((1 << (-nBits)) - 1); | |
s >>= (-nBits); | |
nBits += eLen; | |
for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8); | |
m = e & ((1 << (-nBits)) - 1); | |
e >>= (-nBits); | |
nBits += mLen; | |
for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8); | |
if (e === 0) { | |
e = 1 - eBias; | |
} else if (e === eMax) { | |
return m ? NaN : ((s ? -1 : 1) * Infinity); | |
} else { | |
m = m + Math.pow(2, mLen); | |
e = e - eBias; | |
} | |
return (s ? -1 : 1) * m * Math.pow(2, e - mLen); | |
}; | |
exports.write = function(buffer, value, offset, isLE, mLen, nBytes) { | |
var e, m, c, | |
eLen = nBytes * 8 - mLen - 1, | |
eMax = (1 << eLen) - 1, | |
eBias = eMax >> 1, | |
rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0), | |
i = isLE ? 0 : (nBytes - 1), | |
d = isLE ? 1 : -1, | |
s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0; | |
value = Math.abs(value); | |
if (isNaN(value) || value === Infinity) { | |
m = isNaN(value) ? 1 : 0; | |
e = eMax; | |
} else { | |
e = Math.floor(Math.log(value) / Math.LN2); | |
if (value * (c = Math.pow(2, -e)) < 1) { | |
e--; | |
c *= 2; | |
} | |
if (e + eBias >= 1) { | |
value += rt / c; | |
} else { | |
value += rt * Math.pow(2, 1 - eBias); | |
} | |
if (value * c >= 2) { | |
e++; | |
c /= 2; | |
} | |
if (e + eBias >= eMax) { | |
m = 0; | |
e = eMax; | |
} else if (e + eBias >= 1) { | |
m = (value * c - 1) * Math.pow(2, mLen); | |
e = e + eBias; | |
} else { | |
m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen); | |
e = 0; | |
} | |
} | |
for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8); | |
e = (e << mLen) | m; | |
eLen += mLen; | |
for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8); | |
buffer[offset + i - d] |= s * 128; | |
}; | |
},{}],7:[function(require,module,exports){ | |
(function (process){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// resolves . and .. elements in a path array with directory names there | |
// must be no slashes, empty elements, or device names (c:\) in the array | |
// (so also no leading and trailing slashes - it does not distinguish | |
// relative and absolute paths) | |
function normalizeArray(parts, allowAboveRoot) { | |
// if the path tries to go above the root, `up` ends up > 0 | |
var up = 0; | |
for (var i = parts.length - 1; i >= 0; i--) { | |
var last = parts[i]; | |
if (last === '.') { | |
parts.splice(i, 1); | |
} else if (last === '..') { | |
parts.splice(i, 1); | |
up++; | |
} else if (up) { | |
parts.splice(i, 1); | |
up--; | |
} | |
} | |
// if the path is allowed to go above the root, restore leading ..s | |
if (allowAboveRoot) { | |
for (; up--; up) { | |
parts.unshift('..'); | |
} | |
} | |
return parts; | |
} | |
// Split a filename into [root, dir, basename, ext], unix version | |
// 'root' is just a slash, or nothing. | |
var splitPathRe = | |
/^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/; | |
var splitPath = function(filename) { | |
return splitPathRe.exec(filename).slice(1); | |
}; | |
// path.resolve([from ...], to) | |
// posix version | |
exports.resolve = function() { | |
var resolvedPath = '', | |
resolvedAbsolute = false; | |
for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) { | |
var path = (i >= 0) ? arguments[i] : process.cwd(); | |
// Skip empty and invalid entries | |
if (typeof path !== 'string') { | |
throw new TypeError('Arguments to path.resolve must be strings'); | |
} else if (!path) { | |
continue; | |
} | |
resolvedPath = path + '/' + resolvedPath; | |
resolvedAbsolute = path.charAt(0) === '/'; | |
} | |
// At this point the path should be resolved to a full absolute path, but | |
// handle relative paths to be safe (might happen when process.cwd() fails) | |
// Normalize the path | |
resolvedPath = normalizeArray(filter(resolvedPath.split('/'), function(p) { | |
return !!p; | |
}), !resolvedAbsolute).join('/'); | |
return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.'; | |
}; | |
// path.normalize(path) | |
// posix version | |
exports.normalize = function(path) { | |
var isAbsolute = exports.isAbsolute(path), | |
trailingSlash = substr(path, -1) === '/'; | |
// Normalize the path | |
path = normalizeArray(filter(path.split('/'), function(p) { | |
return !!p; | |
}), !isAbsolute).join('/'); | |
if (!path && !isAbsolute) { | |
path = '.'; | |
} | |
if (path && trailingSlash) { | |
path += '/'; | |
} | |
return (isAbsolute ? '/' : '') + path; | |
}; | |
// posix version | |
exports.isAbsolute = function(path) { | |
return path.charAt(0) === '/'; | |
}; | |
// posix version | |
exports.join = function() { | |
var paths = Array.prototype.slice.call(arguments, 0); | |
return exports.normalize(filter(paths, function(p, index) { | |
if (typeof p !== 'string') { | |
throw new TypeError('Arguments to path.join must be strings'); | |
} | |
return p; | |
}).join('/')); | |
}; | |
// path.relative(from, to) | |
// posix version | |
exports.relative = function(from, to) { | |
from = exports.resolve(from).substr(1); | |
to = exports.resolve(to).substr(1); | |
function trim(arr) { | |
var start = 0; | |
for (; start < arr.length; start++) { | |
if (arr[start] !== '') break; | |
} | |
var end = arr.length - 1; | |
for (; end >= 0; end--) { | |
if (arr[end] !== '') break; | |
} | |
if (start > end) return []; | |
return arr.slice(start, end - start + 1); | |
} | |
var fromParts = trim(from.split('/')); | |
var toParts = trim(to.split('/')); | |
var length = Math.min(fromParts.length, toParts.length); | |
var samePartsLength = length; | |
for (var i = 0; i < length; i++) { | |
if (fromParts[i] !== toParts[i]) { | |
samePartsLength = i; | |
break; | |
} | |
} | |
var outputParts = []; | |
for (var i = samePartsLength; i < fromParts.length; i++) { | |
outputParts.push('..'); | |
} | |
outputParts = outputParts.concat(toParts.slice(samePartsLength)); | |
return outputParts.join('/'); | |
}; | |
exports.sep = '/'; | |
exports.delimiter = ':'; | |
exports.dirname = function(path) { | |
var result = splitPath(path), | |
root = result[0], | |
dir = result[1]; | |
if (!root && !dir) { | |
// No dirname whatsoever | |
return '.'; | |
} | |
if (dir) { | |
// It has a dirname, strip trailing slash | |
dir = dir.substr(0, dir.length - 1); | |
} | |
return root + dir; | |
}; | |
exports.basename = function(path, ext) { | |
var f = splitPath(path)[2]; | |
// TODO: make this comparison case-insensitive on windows? | |
if (ext && f.substr(-1 * ext.length) === ext) { | |
f = f.substr(0, f.length - ext.length); | |
} | |
return f; | |
}; | |
exports.extname = function(path) { | |
return splitPath(path)[3]; | |
}; | |
function filter (xs, f) { | |
if (xs.filter) return xs.filter(f); | |
var res = []; | |
for (var i = 0; i < xs.length; i++) { | |
if (f(xs[i], i, xs)) res.push(xs[i]); | |
} | |
return res; | |
} | |
// String.prototype.substr - negative index don't work in IE8 | |
var substr = 'ab'.substr(-1) === 'b' | |
? function (str, start, len) { return str.substr(start, len) } | |
: function (str, start, len) { | |
if (start < 0) start = str.length + start; | |
return str.substr(start, len); | |
} | |
; | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3}],8:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// a duplex stream is just a stream that is both readable and writable. | |
// Since JS doesn't have multiple prototypal inheritance, this class | |
// prototypally inherits from Readable, and then parasitically from | |
// Writable. | |
module.exports = Duplex; | |
var inherits = require('inherits'); | |
var setImmediate = require('process/browser.js').nextTick; | |
var Readable = require('./readable.js'); | |
var Writable = require('./writable.js'); | |
inherits(Duplex, Readable); | |
Duplex.prototype.write = Writable.prototype.write; | |
Duplex.prototype.end = Writable.prototype.end; | |
Duplex.prototype._write = Writable.prototype._write; | |
function Duplex(options) { | |
if (!(this instanceof Duplex)) | |
return new Duplex(options); | |
Readable.call(this, options); | |
Writable.call(this, options); | |
if (options && options.readable === false) | |
this.readable = false; | |
if (options && options.writable === false) | |
this.writable = false; | |
this.allowHalfOpen = true; | |
if (options && options.allowHalfOpen === false) | |
this.allowHalfOpen = false; | |
this.once('end', onend); | |
} | |
// the no-half-open enforcer | |
function onend() { | |
// if we allow half-open state, or if the writable side ended, | |
// then we're ok. | |
if (this.allowHalfOpen || this._writableState.ended) | |
return; | |
// no more data can be written. | |
// But allow more writes to happen in this tick. | |
var self = this; | |
setImmediate(function () { | |
self.end(); | |
}); | |
} | |
},{"./readable.js":12,"./writable.js":14,"inherits":16,"process/browser.js":10}],9:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
module.exports = Stream; | |
var EE = require('events').EventEmitter; | |
var inherits = require('inherits'); | |
inherits(Stream, EE); | |
Stream.Readable = require('./readable.js'); | |
Stream.Writable = require('./writable.js'); | |
Stream.Duplex = require('./duplex.js'); | |
Stream.Transform = require('./transform.js'); | |
Stream.PassThrough = require('./passthrough.js'); | |
// Backwards-compat with node 0.4.x | |
Stream.Stream = Stream; | |
// old-style streams. Note that the pipe method (the only relevant | |
// part of this class) is overridden in the Readable class. | |
function Stream() { | |
EE.call(this); | |
} | |
Stream.prototype.pipe = function(dest, options) { | |
var source = this; | |
function ondata(chunk) { | |
if (dest.writable) { | |
if (false === dest.write(chunk) && source.pause) { | |
source.pause(); | |
} | |
} | |
} | |
source.on('data', ondata); | |
function ondrain() { | |
if (source.readable && source.resume) { | |
source.resume(); | |
} | |
} | |
dest.on('drain', ondrain); | |
// If the 'end' option is not supplied, dest.end() will be called when | |
// source gets the 'end' or 'close' events. Only dest.end() once. | |
if (!dest._isStdio && (!options || options.end !== false)) { | |
source.on('end', onend); | |
source.on('close', onclose); | |
} | |
var didOnEnd = false; | |
function onend() { | |
if (didOnEnd) return; | |
didOnEnd = true; | |
dest.end(); | |
} | |
function onclose() { | |
if (didOnEnd) return; | |
didOnEnd = true; | |
if (typeof dest.destroy === 'function') dest.destroy(); | |
} | |
// don't leave dangling pipes when there are errors. | |
function onerror(er) { | |
cleanup(); | |
if (EE.listenerCount(this, 'error') === 0) { | |
throw er; // Unhandled stream error in pipe. | |
} | |
} | |
source.on('error', onerror); | |
dest.on('error', onerror); | |
// remove all the event listeners that were added. | |
function cleanup() { | |
source.removeListener('data', ondata); | |
dest.removeListener('drain', ondrain); | |
source.removeListener('end', onend); | |
source.removeListener('close', onclose); | |
source.removeListener('error', onerror); | |
dest.removeListener('error', onerror); | |
source.removeListener('end', cleanup); | |
source.removeListener('close', cleanup); | |
dest.removeListener('close', cleanup); | |
} | |
source.on('end', cleanup); | |
source.on('close', cleanup); | |
dest.on('close', cleanup); | |
dest.emit('pipe', source); | |
// Allow for unix-like usage: A.pipe(B).pipe(C) | |
return dest; | |
}; | |
},{"./duplex.js":8,"./passthrough.js":11,"./readable.js":12,"./transform.js":13,"./writable.js":14,"events":2,"inherits":16}],10:[function(require,module,exports){ | |
module.exports=require(3) | |
},{}],11:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// a passthrough stream. | |
// basically just the most minimal sort of Transform stream. | |
// Every written chunk gets output as-is. | |
module.exports = PassThrough; | |
var Transform = require('./transform.js'); | |
var inherits = require('inherits'); | |
inherits(PassThrough, Transform); | |
function PassThrough(options) { | |
if (!(this instanceof PassThrough)) | |
return new PassThrough(options); | |
Transform.call(this, options); | |
} | |
PassThrough.prototype._transform = function(chunk, encoding, cb) { | |
cb(null, chunk); | |
}; | |
},{"./transform.js":13,"inherits":16}],12:[function(require,module,exports){ | |
(function (process){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
module.exports = Readable; | |
Readable.ReadableState = ReadableState; | |
var EE = require('events').EventEmitter; | |
var Stream = require('./index.js'); | |
var Buffer = require('buffer').Buffer; | |
var setImmediate = require('process/browser.js').nextTick; | |
var StringDecoder; | |
var inherits = require('inherits'); | |
inherits(Readable, Stream); | |
function ReadableState(options, stream) { | |
options = options || {}; | |
// the point at which it stops calling _read() to fill the buffer | |
// Note: 0 is a valid value, means "don't call _read preemptively ever" | |
var hwm = options.highWaterMark; | |
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024; | |
// cast to ints. | |
this.highWaterMark = ~~this.highWaterMark; | |
this.buffer = []; | |
this.length = 0; | |
this.pipes = null; | |
this.pipesCount = 0; | |
this.flowing = false; | |
this.ended = false; | |
this.endEmitted = false; | |
this.reading = false; | |
// In streams that never have any data, and do push(null) right away, | |
// the consumer can miss the 'end' event if they do some I/O before | |
// consuming the stream. So, we don't emit('end') until some reading | |
// happens. | |
this.calledRead = false; | |
// a flag to be able to tell if the onwrite cb is called immediately, | |
// or on a later tick. We set this to true at first, becuase any | |
// actions that shouldn't happen until "later" should generally also | |
// not happen before the first write call. | |
this.sync = true; | |
// whenever we return null, then we set a flag to say | |
// that we're awaiting a 'readable' event emission. | |
this.needReadable = false; | |
this.emittedReadable = false; | |
this.readableListening = false; | |
// object stream flag. Used to make read(n) ignore n and to | |
// make all the buffer merging and length checks go away | |
this.objectMode = !!options.objectMode; | |
// Crypto is kind of old and crusty. Historically, its default string | |
// encoding is 'binary' so we have to make this configurable. | |
// Everything else in the universe uses 'utf8', though. | |
this.defaultEncoding = options.defaultEncoding || 'utf8'; | |
// when piping, we only care about 'readable' events that happen | |
// after read()ing all the bytes and not getting any pushback. | |
this.ranOut = false; | |
// the number of writers that are awaiting a drain event in .pipe()s | |
this.awaitDrain = 0; | |
// if true, a maybeReadMore has been scheduled | |
this.readingMore = false; | |
this.decoder = null; | |
this.encoding = null; | |
if (options.encoding) { | |
if (!StringDecoder) | |
StringDecoder = require('string_decoder').StringDecoder; | |
this.decoder = new StringDecoder(options.encoding); | |
this.encoding = options.encoding; | |
} | |
} | |
function Readable(options) { | |
if (!(this instanceof Readable)) | |
return new Readable(options); | |
this._readableState = new ReadableState(options, this); | |
// legacy | |
this.readable = true; | |
Stream.call(this); | |
} | |
// Manually shove something into the read() buffer. | |
// This returns true if the highWaterMark has not been hit yet, | |
// similar to how Writable.write() returns true if you should | |
// write() some more. | |
Readable.prototype.push = function(chunk, encoding) { | |
var state = this._readableState; | |
if (typeof chunk === 'string' && !state.objectMode) { | |
encoding = encoding || state.defaultEncoding; | |
if (encoding !== state.encoding) { | |
chunk = new Buffer(chunk, encoding); | |
encoding = ''; | |
} | |
} | |
return readableAddChunk(this, state, chunk, encoding, false); | |
}; | |
// Unshift should *always* be something directly out of read() | |
Readable.prototype.unshift = function(chunk) { | |
var state = this._readableState; | |
return readableAddChunk(this, state, chunk, '', true); | |
}; | |
function readableAddChunk(stream, state, chunk, encoding, addToFront) { | |
var er = chunkInvalid(state, chunk); | |
if (er) { | |
stream.emit('error', er); | |
} else if (chunk === null || chunk === undefined) { | |
state.reading = false; | |
if (!state.ended) | |
onEofChunk(stream, state); | |
} else if (state.objectMode || chunk && chunk.length > 0) { | |
if (state.ended && !addToFront) { | |
var e = new Error('stream.push() after EOF'); | |
stream.emit('error', e); | |
} else if (state.endEmitted && addToFront) { | |
var e = new Error('stream.unshift() after end event'); | |
stream.emit('error', e); | |
} else { | |
if (state.decoder && !addToFront && !encoding) | |
chunk = state.decoder.write(chunk); | |
// update the buffer info. | |
state.length += state.objectMode ? 1 : chunk.length; | |
if (addToFront) { | |
state.buffer.unshift(chunk); | |
} else { | |
state.reading = false; | |
state.buffer.push(chunk); | |
} | |
if (state.needReadable) | |
emitReadable(stream); | |
maybeReadMore(stream, state); | |
} | |
} else if (!addToFront) { | |
state.reading = false; | |
} | |
return needMoreData(state); | |
} | |
// if it's past the high water mark, we can push in some more. | |
// Also, if we have no data yet, we can stand some | |
// more bytes. This is to work around cases where hwm=0, | |
// such as the repl. Also, if the push() triggered a | |
// readable event, and the user called read(largeNumber) such that | |
// needReadable was set, then we ought to push more, so that another | |
// 'readable' event will be triggered. | |
function needMoreData(state) { | |
return !state.ended && | |
(state.needReadable || | |
state.length < state.highWaterMark || | |
state.length === 0); | |
} | |
// backwards compatibility. | |
Readable.prototype.setEncoding = function(enc) { | |
if (!StringDecoder) | |
StringDecoder = require('string_decoder').StringDecoder; | |
this._readableState.decoder = new StringDecoder(enc); | |
this._readableState.encoding = enc; | |
}; | |
// Don't raise the hwm > 128MB | |
var MAX_HWM = 0x800000; | |
function roundUpToNextPowerOf2(n) { | |
if (n >= MAX_HWM) { | |
n = MAX_HWM; | |
} else { | |
// Get the next highest power of 2 | |
n--; | |
for (var p = 1; p < 32; p <<= 1) n |= n >> p; | |
n++; | |
} | |
return n; | |
} | |
function howMuchToRead(n, state) { | |
if (state.length === 0 && state.ended) | |
return 0; | |
if (state.objectMode) | |
return n === 0 ? 0 : 1; | |
if (isNaN(n) || n === null) { | |
// only flow one buffer at a time | |
if (state.flowing && state.buffer.length) | |
return state.buffer[0].length; | |
else | |
return state.length; | |
} | |
if (n <= 0) | |
return 0; | |
// If we're asking for more than the target buffer level, | |
// then raise the water mark. Bump up to the next highest | |
// power of 2, to prevent increasing it excessively in tiny | |
// amounts. | |
if (n > state.highWaterMark) | |
state.highWaterMark = roundUpToNextPowerOf2(n); | |
// don't have that much. return null, unless we've ended. | |
if (n > state.length) { | |
if (!state.ended) { | |
state.needReadable = true; | |
return 0; | |
} else | |
return state.length; | |
} | |
return n; | |
} | |
// you can override either this method, or the async _read(n) below. | |
Readable.prototype.read = function(n) { | |
var state = this._readableState; | |
state.calledRead = true; | |
var nOrig = n; | |
if (typeof n !== 'number' || n > 0) | |
state.emittedReadable = false; | |
// if we're doing read(0) to trigger a readable event, but we | |
// already have a bunch of data in the buffer, then just trigger | |
// the 'readable' event and move on. | |
if (n === 0 && | |
state.needReadable && | |
(state.length >= state.highWaterMark || state.ended)) { | |
emitReadable(this); | |
return null; | |
} | |
n = howMuchToRead(n, state); | |
// if we've ended, and we're now clear, then finish it up. | |
if (n === 0 && state.ended) { | |
if (state.length === 0) | |
endReadable(this); | |
return null; | |
} | |
// All the actual chunk generation logic needs to be | |
// *below* the call to _read. The reason is that in certain | |
// synthetic stream cases, such as passthrough streams, _read | |
// may be a completely synchronous operation which may change | |
// the state of the read buffer, providing enough data when | |
// before there was *not* enough. | |
// | |
// So, the steps are: | |
// 1. Figure out what the state of things will be after we do | |
// a read from the buffer. | |
// | |
// 2. If that resulting state will trigger a _read, then call _read. | |
// Note that this may be asynchronous, or synchronous. Yes, it is | |
// deeply ugly to write APIs this way, but that still doesn't mean | |
// that the Readable class should behave improperly, as streams are | |
// designed to be sync/async agnostic. | |
// Take note if the _read call is sync or async (ie, if the read call | |
// has returned yet), so that we know whether or not it's safe to emit | |
// 'readable' etc. | |
// | |
// 3. Actually pull the requested chunks out of the buffer and return. | |
// if we need a readable event, then we need to do some reading. | |
var doRead = state.needReadable; | |
// if we currently have less than the highWaterMark, then also read some | |
if (state.length - n <= state.highWaterMark) | |
doRead = true; | |
// however, if we've ended, then there's no point, and if we're already | |
// reading, then it's unnecessary. | |
if (state.ended || state.reading) | |
doRead = false; | |
if (doRead) { | |
state.reading = true; | |
state.sync = true; | |
// if the length is currently zero, then we *need* a readable event. | |
if (state.length === 0) | |
state.needReadable = true; | |
// call internal read method | |
this._read(state.highWaterMark); | |
state.sync = false; | |
} | |
// If _read called its callback synchronously, then `reading` | |
// will be false, and we need to re-evaluate how much data we | |
// can return to the user. | |
if (doRead && !state.reading) | |
n = howMuchToRead(nOrig, state); | |
var ret; | |
if (n > 0) | |
ret = fromList(n, state); | |
else | |
ret = null; | |
if (ret === null) { | |
state.needReadable = true; | |
n = 0; | |
} | |
state.length -= n; | |
// If we have nothing in the buffer, then we want to know | |
// as soon as we *do* get something into the buffer. | |
if (state.length === 0 && !state.ended) | |
state.needReadable = true; | |
// If we happened to read() exactly the remaining amount in the | |
// buffer, and the EOF has been seen at this point, then make sure | |
// that we emit 'end' on the very next tick. | |
if (state.ended && !state.endEmitted && state.length === 0) | |
endReadable(this); | |
return ret; | |
}; | |
function chunkInvalid(state, chunk) { | |
var er = null; | |
if (!Buffer.isBuffer(chunk) && | |
'string' !== typeof chunk && | |
chunk !== null && | |
chunk !== undefined && | |
!state.objectMode && | |
!er) { | |
er = new TypeError('Invalid non-string/buffer chunk'); | |
} | |
return er; | |
} | |
function onEofChunk(stream, state) { | |
if (state.decoder && !state.ended) { | |
var chunk = state.decoder.end(); | |
if (chunk && chunk.length) { | |
state.buffer.push(chunk); | |
state.length += state.objectMode ? 1 : chunk.length; | |
} | |
} | |
state.ended = true; | |
// if we've ended and we have some data left, then emit | |
// 'readable' now to make sure it gets picked up. | |
if (state.length > 0) | |
emitReadable(stream); | |
else | |
endReadable(stream); | |
} | |
// Don't emit readable right away in sync mode, because this can trigger | |
// another read() call => stack overflow. This way, it might trigger | |
// a nextTick recursion warning, but that's not so bad. | |
function emitReadable(stream) { | |
var state = stream._readableState; | |
state.needReadable = false; | |
if (state.emittedReadable) | |
return; | |
state.emittedReadable = true; | |
if (state.sync) | |
setImmediate(function() { | |
emitReadable_(stream); | |
}); | |
else | |
emitReadable_(stream); | |
} | |
function emitReadable_(stream) { | |
stream.emit('readable'); | |
} | |
// at this point, the user has presumably seen the 'readable' event, | |
// and called read() to consume some data. that may have triggered | |
// in turn another _read(n) call, in which case reading = true if | |
// it's in progress. | |
// However, if we're not ended, or reading, and the length < hwm, | |
// then go ahead and try to read some more preemptively. | |
function maybeReadMore(stream, state) { | |
if (!state.readingMore) { | |
state.readingMore = true; | |
setImmediate(function() { | |
maybeReadMore_(stream, state); | |
}); | |
} | |
} | |
function maybeReadMore_(stream, state) { | |
var len = state.length; | |
while (!state.reading && !state.flowing && !state.ended && | |
state.length < state.highWaterMark) { | |
stream.read(0); | |
if (len === state.length) | |
// didn't get any data, stop spinning. | |
break; | |
else | |
len = state.length; | |
} | |
state.readingMore = false; | |
} | |
// abstract method. to be overridden in specific implementation classes. | |
// call cb(er, data) where data is <= n in length. | |
// for virtual (non-string, non-buffer) streams, "length" is somewhat | |
// arbitrary, and perhaps not very meaningful. | |
Readable.prototype._read = function(n) { | |
this.emit('error', new Error('not implemented')); | |
}; | |
Readable.prototype.pipe = function(dest, pipeOpts) { | |
var src = this; | |
var state = this._readableState; | |
switch (state.pipesCount) { | |
case 0: | |
state.pipes = dest; | |
break; | |
case 1: | |
state.pipes = [state.pipes, dest]; | |
break; | |
default: | |
state.pipes.push(dest); | |
break; | |
} | |
state.pipesCount += 1; | |
var doEnd = (!pipeOpts || pipeOpts.end !== false) && | |
dest !== process.stdout && | |
dest !== process.stderr; | |
var endFn = doEnd ? onend : cleanup; | |
if (state.endEmitted) | |
setImmediate(endFn); | |
else | |
src.once('end', endFn); | |
dest.on('unpipe', onunpipe); | |
function onunpipe(readable) { | |
if (readable !== src) return; | |
cleanup(); | |
} | |
function onend() { | |
dest.end(); | |
} | |
// when the dest drains, it reduces the awaitDrain counter | |
// on the source. This would be more elegant with a .once() | |
// handler in flow(), but adding and removing repeatedly is | |
// too slow. | |
var ondrain = pipeOnDrain(src); | |
dest.on('drain', ondrain); | |
function cleanup() { | |
// cleanup event handlers once the pipe is broken | |
dest.removeListener('close', onclose); | |
dest.removeListener('finish', onfinish); | |
dest.removeListener('drain', ondrain); | |
dest.removeListener('error', onerror); | |
dest.removeListener('unpipe', onunpipe); | |
src.removeListener('end', onend); | |
src.removeListener('end', cleanup); | |
// if the reader is waiting for a drain event from this | |
// specific writer, then it would cause it to never start | |
// flowing again. | |
// So, if this is awaiting a drain, then we just call it now. | |
// If we don't know, then assume that we are waiting for one. | |
if (!dest._writableState || dest._writableState.needDrain) | |
ondrain(); | |
} | |
// if the dest has an error, then stop piping into it. | |
// however, don't suppress the throwing behavior for this. | |
// check for listeners before emit removes one-time listeners. | |
var errListeners = EE.listenerCount(dest, 'error'); | |
function onerror(er) { | |
unpipe(); | |
if (errListeners === 0 && EE.listenerCount(dest, 'error') === 0) | |
dest.emit('error', er); | |
} | |
dest.once('error', onerror); | |
// Both close and finish should trigger unpipe, but only once. | |
function onclose() { | |
dest.removeListener('finish', onfinish); | |
unpipe(); | |
} | |
dest.once('close', onclose); | |
function onfinish() { | |
dest.removeListener('close', onclose); | |
unpipe(); | |
} | |
dest.once('finish', onfinish); | |
function unpipe() { | |
src.unpipe(dest); | |
} | |
// tell the dest that it's being piped to | |
dest.emit('pipe', src); | |
// start the flow if it hasn't been started already. | |
if (!state.flowing) { | |
// the handler that waits for readable events after all | |
// the data gets sucked out in flow. | |
// This would be easier to follow with a .once() handler | |
// in flow(), but that is too slow. | |
this.on('readable', pipeOnReadable); | |
state.flowing = true; | |
setImmediate(function() { | |
flow(src); | |
}); | |
} | |
return dest; | |
}; | |
function pipeOnDrain(src) { | |
return function() { | |
var dest = this; | |
var state = src._readableState; | |
state.awaitDrain--; | |
if (state.awaitDrain === 0) | |
flow(src); | |
}; | |
} | |
function flow(src) { | |
var state = src._readableState; | |
var chunk; | |
state.awaitDrain = 0; | |
function write(dest, i, list) { | |
var written = dest.write(chunk); | |
if (false === written) { | |
state.awaitDrain++; | |
} | |
} | |
while (state.pipesCount && null !== (chunk = src.read())) { | |
if (state.pipesCount === 1) | |
write(state.pipes, 0, null); | |
else | |
forEach(state.pipes, write); | |
src.emit('data', chunk); | |
// if anyone needs a drain, then we have to wait for that. | |
if (state.awaitDrain > 0) | |
return; | |
} | |
// if every destination was unpiped, either before entering this | |
// function, or in the while loop, then stop flowing. | |
// | |
// NB: This is a pretty rare edge case. | |
if (state.pipesCount === 0) { | |
state.flowing = false; | |
// if there were data event listeners added, then switch to old mode. | |
if (EE.listenerCount(src, 'data') > 0) | |
emitDataEvents(src); | |
return; | |
} | |
// at this point, no one needed a drain, so we just ran out of data | |
// on the next readable event, start it over again. | |
state.ranOut = true; | |
} | |
function pipeOnReadable() { | |
if (this._readableState.ranOut) { | |
this._readableState.ranOut = false; | |
flow(this); | |
} | |
} | |
Readable.prototype.unpipe = function(dest) { | |
var state = this._readableState; | |
// if we're not piping anywhere, then do nothing. | |
if (state.pipesCount === 0) | |
return this; | |
// just one destination. most common case. | |
if (state.pipesCount === 1) { | |
// passed in one, but it's not the right one. | |
if (dest && dest !== state.pipes) | |
return this; | |
if (!dest) | |
dest = state.pipes; | |
// got a match. | |
state.pipes = null; | |
state.pipesCount = 0; | |
this.removeListener('readable', pipeOnReadable); | |
state.flowing = false; | |
if (dest) | |
dest.emit('unpipe', this); | |
return this; | |
} | |
// slow case. multiple pipe destinations. | |
if (!dest) { | |
// remove all. | |
var dests = state.pipes; | |
var len = state.pipesCount; | |
state.pipes = null; | |
state.pipesCount = 0; | |
this.removeListener('readable', pipeOnReadable); | |
state.flowing = false; | |
for (var i = 0; i < len; i++) | |
dests[i].emit('unpipe', this); | |
return this; | |
} | |
// try to find the right one. | |
var i = indexOf(state.pipes, dest); | |
if (i === -1) | |
return this; | |
state.pipes.splice(i, 1); | |
state.pipesCount -= 1; | |
if (state.pipesCount === 1) | |
state.pipes = state.pipes[0]; | |
dest.emit('unpipe', this); | |
return this; | |
}; | |
// set up data events if they are asked for | |
// Ensure readable listeners eventually get something | |
Readable.prototype.on = function(ev, fn) { | |
var res = Stream.prototype.on.call(this, ev, fn); | |
if (ev === 'data' && !this._readableState.flowing) | |
emitDataEvents(this); | |
if (ev === 'readable' && this.readable) { | |
var state = this._readableState; | |
if (!state.readableListening) { | |
state.readableListening = true; | |
state.emittedReadable = false; | |
state.needReadable = true; | |
if (!state.reading) { | |
this.read(0); | |
} else if (state.length) { | |
emitReadable(this, state); | |
} | |
} | |
} | |
return res; | |
}; | |
Readable.prototype.addListener = Readable.prototype.on; | |
// pause() and resume() are remnants of the legacy readable stream API | |
// If the user uses them, then switch into old mode. | |
Readable.prototype.resume = function() { | |
emitDataEvents(this); | |
this.read(0); | |
this.emit('resume'); | |
}; | |
Readable.prototype.pause = function() { | |
emitDataEvents(this, true); | |
this.emit('pause'); | |
}; | |
function emitDataEvents(stream, startPaused) { | |
var state = stream._readableState; | |
if (state.flowing) { | |
// https://github.com/isaacs/readable-stream/issues/16 | |
throw new Error('Cannot switch to old mode now.'); | |
} | |
var paused = startPaused || false; | |
var readable = false; | |
// convert to an old-style stream. | |
stream.readable = true; | |
stream.pipe = Stream.prototype.pipe; | |
stream.on = stream.addListener = Stream.prototype.on; | |
stream.on('readable', function() { | |
readable = true; | |
var c; | |
while (!paused && (null !== (c = stream.read()))) | |
stream.emit('data', c); | |
if (c === null) { | |
readable = false; | |
stream._readableState.needReadable = true; | |
} | |
}); | |
stream.pause = function() { | |
paused = true; | |
this.emit('pause'); | |
}; | |
stream.resume = function() { | |
paused = false; | |
if (readable) | |
setImmediate(function() { | |
stream.emit('readable'); | |
}); | |
else | |
this.read(0); | |
this.emit('resume'); | |
}; | |
// now make it start, just in case it hadn't already. | |
stream.emit('readable'); | |
} | |
// wrap an old-style stream as the async data source. | |
// This is *not* part of the readable stream interface. | |
// It is an ugly unfortunate mess of history. | |
Readable.prototype.wrap = function(stream) { | |
var state = this._readableState; | |
var paused = false; | |
var self = this; | |
stream.on('end', function() { | |
if (state.decoder && !state.ended) { | |
var chunk = state.decoder.end(); | |
if (chunk && chunk.length) | |
self.push(chunk); | |
} | |
self.push(null); | |
}); | |
stream.on('data', function(chunk) { | |
if (state.decoder) | |
chunk = state.decoder.write(chunk); | |
if (!chunk || !state.objectMode && !chunk.length) | |
return; | |
var ret = self.push(chunk); | |
if (!ret) { | |
paused = true; | |
stream.pause(); | |
} | |
}); | |
// proxy all the other methods. | |
// important when wrapping filters and duplexes. | |
for (var i in stream) { | |
if (typeof stream[i] === 'function' && | |
typeof this[i] === 'undefined') { | |
this[i] = function(method) { return function() { | |
return stream[method].apply(stream, arguments); | |
}}(i); | |
} | |
} | |
// proxy certain important events. | |
var events = ['error', 'close', 'destroy', 'pause', 'resume']; | |
forEach(events, function(ev) { | |
stream.on(ev, function (x) { | |
return self.emit.apply(self, ev, x); | |
}); | |
}); | |
// when we try to consume some more bytes, simply unpause the | |
// underlying stream. | |
self._read = function(n) { | |
if (paused) { | |
paused = false; | |
stream.resume(); | |
} | |
}; | |
return self; | |
}; | |
// exposed for testing purposes only. | |
Readable._fromList = fromList; | |
// Pluck off n bytes from an array of buffers. | |
// Length is the combined lengths of all the buffers in the list. | |
function fromList(n, state) { | |
var list = state.buffer; | |
var length = state.length; | |
var stringMode = !!state.decoder; | |
var objectMode = !!state.objectMode; | |
var ret; | |
// nothing in the list, definitely empty. | |
if (list.length === 0) | |
return null; | |
if (length === 0) | |
ret = null; | |
else if (objectMode) | |
ret = list.shift(); | |
else if (!n || n >= length) { | |
// read it all, truncate the array. | |
if (stringMode) | |
ret = list.join(''); | |
else | |
ret = Buffer.concat(list, length); | |
list.length = 0; | |
} else { | |
// read just some of it. | |
if (n < list[0].length) { | |
// just take a part of the first list item. | |
// slice is the same for buffers and strings. | |
var buf = list[0]; | |
ret = buf.slice(0, n); | |
list[0] = buf.slice(n); | |
} else if (n === list[0].length) { | |
// first list is a perfect match | |
ret = list.shift(); | |
} else { | |
// complex case. | |
// we have enough to cover it, but it spans past the first buffer. | |
if (stringMode) | |
ret = ''; | |
else | |
ret = new Buffer(n); | |
var c = 0; | |
for (var i = 0, l = list.length; i < l && c < n; i++) { | |
var buf = list[0]; | |
var cpy = Math.min(n - c, buf.length); | |
if (stringMode) | |
ret += buf.slice(0, cpy); | |
else | |
buf.copy(ret, c, 0, cpy); | |
if (cpy < buf.length) | |
list[0] = buf.slice(cpy); | |
else | |
list.shift(); | |
c += cpy; | |
} | |
} | |
} | |
return ret; | |
} | |
function endReadable(stream) { | |
var state = stream._readableState; | |
// If we get here before consuming all the bytes, then that is a | |
// bug in node. Should never happen. | |
if (state.length > 0) | |
throw new Error('endReadable called on non-empty stream'); | |
if (!state.endEmitted && state.calledRead) { | |
state.ended = true; | |
setImmediate(function() { | |
// Check that we didn't get one last unshift. | |
if (!state.endEmitted && state.length === 0) { | |
state.endEmitted = true; | |
stream.readable = false; | |
stream.emit('end'); | |
} | |
}); | |
} | |
} | |
function forEach (xs, f) { | |
for (var i = 0, l = xs.length; i < l; i++) { | |
f(xs[i], i); | |
} | |
} | |
function indexOf (xs, x) { | |
for (var i = 0, l = xs.length; i < l; i++) { | |
if (xs[i] === x) return i; | |
} | |
return -1; | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"./index.js":9,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"buffer":4,"events":2,"inherits":16,"process/browser.js":10,"string_decoder":15}],13:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// a transform stream is a readable/writable stream where you do | |
// something with the data. Sometimes it's called a "filter", | |
// but that's not a great name for it, since that implies a thing where | |
// some bits pass through, and others are simply ignored. (That would | |
// be a valid example of a transform, of course.) | |
// | |
// While the output is causally related to the input, it's not a | |
// necessarily symmetric or synchronous transformation. For example, | |
// a zlib stream might take multiple plain-text writes(), and then | |
// emit a single compressed chunk some time in the future. | |
// | |
// Here's how this works: | |
// | |
// The Transform stream has all the aspects of the readable and writable | |
// stream classes. When you write(chunk), that calls _write(chunk,cb) | |
// internally, and returns false if there's a lot of pending writes | |
// buffered up. When you call read(), that calls _read(n) until | |
// there's enough pending readable data buffered up. | |
// | |
// In a transform stream, the written data is placed in a buffer. When | |
// _read(n) is called, it transforms the queued up data, calling the | |
// buffered _write cb's as it consumes chunks. If consuming a single | |
// written chunk would result in multiple output chunks, then the first | |
// outputted bit calls the readcb, and subsequent chunks just go into | |
// the read buffer, and will cause it to emit 'readable' if necessary. | |
// | |
// This way, back-pressure is actually determined by the reading side, | |
// since _read has to be called to start processing a new chunk. However, | |
// a pathological inflate type of transform can cause excessive buffering | |
// here. For example, imagine a stream where every byte of input is | |
// interpreted as an integer from 0-255, and then results in that many | |
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in | |
// 1kb of data being output. In this case, you could write a very small | |
// amount of input, and end up with a very large amount of output. In | |
// such a pathological inflating mechanism, there'd be no way to tell | |
// the system to stop doing the transform. A single 4MB write could | |
// cause the system to run out of memory. | |
// | |
// However, even in such a pathological case, only a single written chunk | |
// would be consumed, and then the rest would wait (un-transformed) until | |
// the results of the previous transformed chunk were consumed. | |
module.exports = Transform; | |
var Duplex = require('./duplex.js'); | |
var inherits = require('inherits'); | |
inherits(Transform, Duplex); | |
function TransformState(options, stream) { | |
this.afterTransform = function(er, data) { | |
return afterTransform(stream, er, data); | |
}; | |
this.needTransform = false; | |
this.transforming = false; | |
this.writecb = null; | |
this.writechunk = null; | |
} | |
function afterTransform(stream, er, data) { | |
var ts = stream._transformState; | |
ts.transforming = false; | |
var cb = ts.writecb; | |
if (!cb) | |
return stream.emit('error', new Error('no writecb in Transform class')); | |
ts.writechunk = null; | |
ts.writecb = null; | |
if (data !== null && data !== undefined) | |
stream.push(data); | |
if (cb) | |
cb(er); | |
var rs = stream._readableState; | |
rs.reading = false; | |
if (rs.needReadable || rs.length < rs.highWaterMark) { | |
stream._read(rs.highWaterMark); | |
} | |
} | |
function Transform(options) { | |
if (!(this instanceof Transform)) | |
return new Transform(options); | |
Duplex.call(this, options); | |
var ts = this._transformState = new TransformState(options, this); | |
// when the writable side finishes, then flush out anything remaining. | |
var stream = this; | |
// start out asking for a readable event once data is transformed. | |
this._readableState.needReadable = true; | |
// we have implemented the _read method, and done the other things | |
// that Readable wants before the first _read call, so unset the | |
// sync guard flag. | |
this._readableState.sync = false; | |
this.once('finish', function() { | |
if ('function' === typeof this._flush) | |
this._flush(function(er) { | |
done(stream, er); | |
}); | |
else | |
done(stream); | |
}); | |
} | |
Transform.prototype.push = function(chunk, encoding) { | |
this._transformState.needTransform = false; | |
return Duplex.prototype.push.call(this, chunk, encoding); | |
}; | |
// This is the part where you do stuff! | |
// override this function in implementation classes. | |
// 'chunk' is an input chunk. | |
// | |
// Call `push(newChunk)` to pass along transformed output | |
// to the readable side. You may call 'push' zero or more times. | |
// | |
// Call `cb(err)` when you are done with this chunk. If you pass | |
// an error, then that'll put the hurt on the whole operation. If you | |
// never call cb(), then you'll never get another chunk. | |
Transform.prototype._transform = function(chunk, encoding, cb) { | |
throw new Error('not implemented'); | |
}; | |
Transform.prototype._write = function(chunk, encoding, cb) { | |
var ts = this._transformState; | |
ts.writecb = cb; | |
ts.writechunk = chunk; | |
ts.writeencoding = encoding; | |
if (!ts.transforming) { | |
var rs = this._readableState; | |
if (ts.needTransform || | |
rs.needReadable || | |
rs.length < rs.highWaterMark) | |
this._read(rs.highWaterMark); | |
} | |
}; | |
// Doesn't matter what the args are here. | |
// _transform does all the work. | |
// That we got here means that the readable side wants more data. | |
Transform.prototype._read = function(n) { | |
var ts = this._transformState; | |
if (ts.writechunk && ts.writecb && !ts.transforming) { | |
ts.transforming = true; | |
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); | |
} else { | |
// mark that we need a transform, so that any data that comes in | |
// will get processed, now that we've asked for it. | |
ts.needTransform = true; | |
} | |
}; | |
function done(stream, er) { | |
if (er) | |
return stream.emit('error', er); | |
// if there's nothing in the write buffer, then that means | |
// that nothing more will ever be provided | |
var ws = stream._writableState; | |
var rs = stream._readableState; | |
var ts = stream._transformState; | |
if (ws.length) | |
throw new Error('calling transform done when ws.length != 0'); | |
if (ts.transforming) | |
throw new Error('calling transform done when still transforming'); | |
return stream.push(null); | |
} | |
},{"./duplex.js":8,"inherits":16}],14:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// A bit simpler than readable streams. | |
// Implement an async ._write(chunk, cb), and it'll handle all | |
// the drain event emission and buffering. | |
module.exports = Writable; | |
Writable.WritableState = WritableState; | |
var isUint8Array = typeof Uint8Array !== 'undefined' | |
? function (x) { return x instanceof Uint8Array } | |
: function (x) { | |
return x && x.constructor && x.constructor.name === 'Uint8Array' | |
} | |
; | |
var isArrayBuffer = typeof ArrayBuffer !== 'undefined' | |
? function (x) { return x instanceof ArrayBuffer } | |
: function (x) { | |
return x && x.constructor && x.constructor.name === 'ArrayBuffer' | |
} | |
; | |
var inherits = require('inherits'); | |
var Stream = require('./index.js'); | |
var setImmediate = require('process/browser.js').nextTick; | |
var Buffer = require('buffer').Buffer; | |
inherits(Writable, Stream); | |
function WriteReq(chunk, encoding, cb) { | |
this.chunk = chunk; | |
this.encoding = encoding; | |
this.callback = cb; | |
} | |
function WritableState(options, stream) { | |
options = options || {}; | |
// the point at which write() starts returning false | |
// Note: 0 is a valid value, means that we always return false if | |
// the entire buffer is not flushed immediately on write() | |
var hwm = options.highWaterMark; | |
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024; | |
// object stream flag to indicate whether or not this stream | |
// contains buffers or objects. | |
this.objectMode = !!options.objectMode; | |
// cast to ints. | |
this.highWaterMark = ~~this.highWaterMark; | |
this.needDrain = false; | |
// at the start of calling end() | |
this.ending = false; | |
// when end() has been called, and returned | |
this.ended = false; | |
// when 'finish' is emitted | |
this.finished = false; | |
// should we decode strings into buffers before passing to _write? | |
// this is here so that some node-core streams can optimize string | |
// handling at a lower level. | |
var noDecode = options.decodeStrings === false; | |
this.decodeStrings = !noDecode; | |
// Crypto is kind of old and crusty. Historically, its default string | |
// encoding is 'binary' so we have to make this configurable. | |
// Everything else in the universe uses 'utf8', though. | |
this.defaultEncoding = options.defaultEncoding || 'utf8'; | |
// not an actual buffer we keep track of, but a measurement | |
// of how much we're waiting to get pushed to some underlying | |
// socket or file. | |
this.length = 0; | |
// a flag to see when we're in the middle of a write. | |
this.writing = false; | |
// a flag to be able to tell if the onwrite cb is called immediately, | |
// or on a later tick. We set this to true at first, becuase any | |
// actions that shouldn't happen until "later" should generally also | |
// not happen before the first write call. | |
this.sync = true; | |
// a flag to know if we're processing previously buffered items, which | |
// may call the _write() callback in the same tick, so that we don't | |
// end up in an overlapped onwrite situation. | |
this.bufferProcessing = false; | |
// the callback that's passed to _write(chunk,cb) | |
this.onwrite = function(er) { | |
onwrite(stream, er); | |
}; | |
// the callback that the user supplies to write(chunk,encoding,cb) | |
this.writecb = null; | |
// the amount that is being written when _write is called. | |
this.writelen = 0; | |
this.buffer = []; | |
} | |
function Writable(options) { | |
// Writable ctor is applied to Duplexes, though they're not | |
// instanceof Writable, they're instanceof Readable. | |
if (!(this instanceof Writable) && !(this instanceof Stream.Duplex)) | |
return new Writable(options); | |
this._writableState = new WritableState(options, this); | |
// legacy. | |
this.writable = true; | |
Stream.call(this); | |
} | |
// Otherwise people can pipe Writable streams, which is just wrong. | |
Writable.prototype.pipe = function() { | |
this.emit('error', new Error('Cannot pipe. Not readable.')); | |
}; | |
function writeAfterEnd(stream, state, cb) { | |
var er = new Error('write after end'); | |
// TODO: defer error events consistently everywhere, not just the cb | |
stream.emit('error', er); | |
setImmediate(function() { | |
cb(er); | |
}); | |
} | |
// If we get something that is not a buffer, string, null, or undefined, | |
// and we're not in objectMode, then that's an error. | |
// Otherwise stream chunks are all considered to be of length=1, and the | |
// watermarks determine how many objects to keep in the buffer, rather than | |
// how many bytes or characters. | |
function validChunk(stream, state, chunk, cb) { | |
var valid = true; | |
if (!Buffer.isBuffer(chunk) && | |
'string' !== typeof chunk && | |
chunk !== null && | |
chunk !== undefined && | |
!state.objectMode) { | |
var er = new TypeError('Invalid non-string/buffer chunk'); | |
stream.emit('error', er); | |
setImmediate(function() { | |
cb(er); | |
}); | |
valid = false; | |
} | |
return valid; | |
} | |
Writable.prototype.write = function(chunk, encoding, cb) { | |
var state = this._writableState; | |
var ret = false; | |
if (typeof encoding === 'function') { | |
cb = encoding; | |
encoding = null; | |
} | |
if (!Buffer.isBuffer(chunk) && isUint8Array(chunk)) | |
chunk = new Buffer(chunk); | |
if (isArrayBuffer(chunk) && typeof Uint8Array !== 'undefined') | |
chunk = new Buffer(new Uint8Array(chunk)); | |
if (Buffer.isBuffer(chunk)) | |
encoding = 'buffer'; | |
else if (!encoding) | |
encoding = state.defaultEncoding; | |
if (typeof cb !== 'function') | |
cb = function() {}; | |
if (state.ended) | |
writeAfterEnd(this, state, cb); | |
else if (validChunk(this, state, chunk, cb)) | |
ret = writeOrBuffer(this, state, chunk, encoding, cb); | |
return ret; | |
}; | |
function decodeChunk(state, chunk, encoding) { | |
if (!state.objectMode && | |
state.decodeStrings !== false && | |
typeof chunk === 'string') { | |
chunk = new Buffer(chunk, encoding); | |
} | |
return chunk; | |
} | |
// if we're already writing something, then just put this | |
// in the queue, and wait our turn. Otherwise, call _write | |
// If we return false, then we need a drain event, so set that flag. | |
function writeOrBuffer(stream, state, chunk, encoding, cb) { | |
chunk = decodeChunk(state, chunk, encoding); | |
var len = state.objectMode ? 1 : chunk.length; | |
state.length += len; | |
var ret = state.length < state.highWaterMark; | |
state.needDrain = !ret; | |
if (state.writing) | |
state.buffer.push(new WriteReq(chunk, encoding, cb)); | |
else | |
doWrite(stream, state, len, chunk, encoding, cb); | |
return ret; | |
} | |
function doWrite(stream, state, len, chunk, encoding, cb) { | |
state.writelen = len; | |
state.writecb = cb; | |
state.writing = true; | |
state.sync = true; | |
stream._write(chunk, encoding, state.onwrite); | |
state.sync = false; | |
} | |
function onwriteError(stream, state, sync, er, cb) { | |
if (sync) | |
setImmediate(function() { | |
cb(er); | |
}); | |
else | |
cb(er); | |
stream.emit('error', er); | |
} | |
function onwriteStateUpdate(state) { | |
state.writing = false; | |
state.writecb = null; | |
state.length -= state.writelen; | |
state.writelen = 0; | |
} | |
function onwrite(stream, er) { | |
var state = stream._writableState; | |
var sync = state.sync; | |
var cb = state.writecb; | |
onwriteStateUpdate(state); | |
if (er) | |
onwriteError(stream, state, sync, er, cb); | |
else { | |
// Check if we're actually ready to finish, but don't emit yet | |
var finished = needFinish(stream, state); | |
if (!finished && !state.bufferProcessing && state.buffer.length) | |
clearBuffer(stream, state); | |
if (sync) { | |
setImmediate(function() { | |
afterWrite(stream, state, finished, cb); | |
}); | |
} else { | |
afterWrite(stream, state, finished, cb); | |
} | |
} | |
} | |
function afterWrite(stream, state, finished, cb) { | |
if (!finished) | |
onwriteDrain(stream, state); | |
cb(); | |
if (finished) | |
finishMaybe(stream, state); | |
} | |
// Must force callback to be called on nextTick, so that we don't | |
// emit 'drain' before the write() consumer gets the 'false' return | |
// value, and has a chance to attach a 'drain' listener. | |
function onwriteDrain(stream, state) { | |
if (state.length === 0 && state.needDrain) { | |
state.needDrain = false; | |
stream.emit('drain'); | |
} | |
} | |
// if there's something in the buffer waiting, then process it | |
function clearBuffer(stream, state) { | |
state.bufferProcessing = true; | |
for (var c = 0; c < state.buffer.length; c++) { | |
var entry = state.buffer[c]; | |
var chunk = entry.chunk; | |
var encoding = entry.encoding; | |
var cb = entry.callback; | |
var len = state.objectMode ? 1 : chunk.length; | |
doWrite(stream, state, len, chunk, encoding, cb); | |
// if we didn't call the onwrite immediately, then | |
// it means that we need to wait until it does. | |
// also, that means that the chunk and cb are currently | |
// being processed, so move the buffer counter past them. | |
if (state.writing) { | |
c++; | |
break; | |
} | |
} | |
state.bufferProcessing = false; | |
if (c < state.buffer.length) | |
state.buffer = state.buffer.slice(c); | |
else | |
state.buffer.length = 0; | |
} | |
Writable.prototype._write = function(chunk, encoding, cb) { | |
cb(new Error('not implemented')); | |
}; | |
Writable.prototype.end = function(chunk, encoding, cb) { | |
var state = this._writableState; | |
if (typeof chunk === 'function') { | |
cb = chunk; | |
chunk = null; | |
encoding = null; | |
} else if (typeof encoding === 'function') { | |
cb = encoding; | |
encoding = null; | |
} | |
if (typeof chunk !== 'undefined' && chunk !== null) | |
this.write(chunk, encoding); | |
// ignore unnecessary end() calls. | |
if (!state.ending && !state.finished) | |
endWritable(this, state, cb); | |
}; | |
function needFinish(stream, state) { | |
return (state.ending && | |
state.length === 0 && | |
!state.finished && | |
!state.writing); | |
} | |
function finishMaybe(stream, state) { | |
var need = needFinish(stream, state); | |
if (need) { | |
state.finished = true; | |
stream.emit('finish'); | |
} | |
return need; | |
} | |
function endWritable(stream, state, cb) { | |
state.ending = true; | |
finishMaybe(stream, state); | |
if (cb) { | |
if (state.finished) | |
setImmediate(cb); | |
else | |
stream.once('finish', cb); | |
} | |
state.ended = true; | |
} | |
},{"./index.js":9,"buffer":4,"inherits":16,"process/browser.js":10}],15:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
var Buffer = require('buffer').Buffer; | |
function assertEncoding(encoding) { | |
if (encoding && !Buffer.isEncoding(encoding)) { | |
throw new Error('Unknown encoding: ' + encoding); | |
} | |
} | |
var StringDecoder = exports.StringDecoder = function(encoding) { | |
this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); | |
assertEncoding(encoding); | |
switch (this.encoding) { | |
case 'utf8': | |
// CESU-8 represents each of Surrogate Pair by 3-bytes | |
this.surrogateSize = 3; | |
break; | |
case 'ucs2': | |
case 'utf16le': | |
// UTF-16 represents each of Surrogate Pair by 2-bytes | |
this.surrogateSize = 2; | |
this.detectIncompleteChar = utf16DetectIncompleteChar; | |
break; | |
case 'base64': | |
// Base-64 stores 3 bytes in 4 chars, and pads the remainder. | |
this.surrogateSize = 3; | |
this.detectIncompleteChar = base64DetectIncompleteChar; | |
break; | |
default: | |
this.write = passThroughWrite; | |
return; | |
} | |
this.charBuffer = new Buffer(6); | |
this.charReceived = 0; | |
this.charLength = 0; | |
}; | |
StringDecoder.prototype.write = function(buffer) { | |
var charStr = ''; | |
var offset = 0; | |
// if our last write ended with an incomplete multibyte character | |
while (this.charLength) { | |
// determine how many remaining bytes this buffer has to offer for this char | |
var i = (buffer.length >= this.charLength - this.charReceived) ? | |
this.charLength - this.charReceived : | |
buffer.length; | |
// add the new bytes to the char buffer | |
buffer.copy(this.charBuffer, this.charReceived, offset, i); | |
this.charReceived += (i - offset); | |
offset = i; | |
if (this.charReceived < this.charLength) { | |
// still not enough chars in this buffer? wait for more ... | |
return ''; | |
} | |
// get the character that was split | |
charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); | |
// lead surrogate (D800-DBFF) is also the incomplete character | |
var charCode = charStr.charCodeAt(charStr.length - 1); | |
if (charCode >= 0xD800 && charCode <= 0xDBFF) { | |
this.charLength += this.surrogateSize; | |
charStr = ''; | |
continue; | |
} | |
this.charReceived = this.charLength = 0; | |
// if there are no more bytes in this buffer, just emit our char | |
if (i == buffer.length) return charStr; | |
// otherwise cut off the characters end from the beginning of this buffer | |
buffer = buffer.slice(i, buffer.length); | |
break; | |
} | |
var lenIncomplete = this.detectIncompleteChar(buffer); | |
var end = buffer.length; | |
if (this.charLength) { | |
// buffer the incomplete character bytes we got | |
buffer.copy(this.charBuffer, 0, buffer.length - lenIncomplete, end); | |
this.charReceived = lenIncomplete; | |
end -= lenIncomplete; | |
} | |
charStr += buffer.toString(this.encoding, 0, end); | |
var end = charStr.length - 1; | |
var charCode = charStr.charCodeAt(end); | |
// lead surrogate (D800-DBFF) is also the incomplete character | |
if (charCode >= 0xD800 && charCode <= 0xDBFF) { | |
var size = this.surrogateSize; | |
this.charLength += size; | |
this.charReceived += size; | |
this.charBuffer.copy(this.charBuffer, size, 0, size); | |
this.charBuffer.write(charStr.charAt(charStr.length - 1), this.encoding); | |
return charStr.substring(0, end); | |
} | |
// or just emit the charStr | |
return charStr; | |
}; | |
StringDecoder.prototype.detectIncompleteChar = function(buffer) { | |
// determine how many bytes we have to check at the end of this buffer | |
var i = (buffer.length >= 3) ? 3 : buffer.length; | |
// Figure out if one of the last i bytes of our buffer announces an | |
// incomplete char. | |
for (; i > 0; i--) { | |
var c = buffer[buffer.length - i]; | |
// See http://en.wikipedia.org/wiki/UTF-8#Description | |
// 110XXXXX | |
if (i == 1 && c >> 5 == 0x06) { | |
this.charLength = 2; | |
break; | |
} | |
// 1110XXXX | |
if (i <= 2 && c >> 4 == 0x0E) { | |
this.charLength = 3; | |
break; | |
} | |
// 11110XXX | |
if (i <= 3 && c >> 3 == 0x1E) { | |
this.charLength = 4; | |
break; | |
} | |
} | |
return i; | |
}; | |
StringDecoder.prototype.end = function(buffer) { | |
var res = ''; | |
if (buffer && buffer.length) | |
res = this.write(buffer); | |
if (this.charReceived) { | |
var cr = this.charReceived; | |
var buf = this.charBuffer; | |
var enc = this.encoding; | |
res += buf.slice(0, cr).toString(enc); | |
} | |
return res; | |
}; | |
function passThroughWrite(buffer) { | |
return buffer.toString(this.encoding); | |
} | |
function utf16DetectIncompleteChar(buffer) { | |
var incomplete = this.charReceived = buffer.length % 2; | |
this.charLength = incomplete ? 2 : 0; | |
return incomplete; | |
} | |
function base64DetectIncompleteChar(buffer) { | |
var incomplete = this.charReceived = buffer.length % 3; | |
this.charLength = incomplete ? 3 : 0; | |
return incomplete; | |
} | |
},{"buffer":4}],16:[function(require,module,exports){ | |
if (typeof Object.create === 'function') { | |
// implementation from standard node.js 'util' module | |
module.exports = function inherits(ctor, superCtor) { | |
ctor.super_ = superCtor | |
ctor.prototype = Object.create(superCtor.prototype, { | |
constructor: { | |
value: ctor, | |
enumerable: false, | |
writable: true, | |
configurable: true | |
} | |
}); | |
}; | |
} else { | |
// old school shim for old browsers | |
module.exports = function inherits(ctor, superCtor) { | |
ctor.super_ = superCtor | |
var TempCtor = function () {} | |
TempCtor.prototype = superCtor.prototype | |
ctor.prototype = new TempCtor() | |
ctor.prototype.constructor = ctor | |
} | |
} | |
},{}],17:[function(require,module,exports){ | |
'use strict'; | |
module.exports = INTERNAL; | |
function INTERNAL() {} | |
},{}],18:[function(require,module,exports){ | |
'use strict'; | |
var Promise = require('./promise'); | |
var reject = require('./reject'); | |
var resolve = require('./resolve'); | |
var INTERNAL = require('./INTERNAL'); | |
var handlers = require('./handlers'); | |
module.exports = all; | |
function all(iterable) { | |
if (Object.prototype.toString.call(iterable) !== '[object Array]') { | |
return reject(new TypeError('must be an array')); | |
} | |
var len = iterable.length; | |
var called = false; | |
if (!len) { | |
return resolve([]); | |
} | |
var values = new Array(len); | |
var resolved = 0; | |
var i = -1; | |
var promise = new Promise(INTERNAL); | |
while (++i < len) { | |
allResolver(iterable[i], i); | |
} | |
return promise; | |
function allResolver(value, i) { | |
resolve(value).then(resolveFromAll, function (error) { | |
if (!called) { | |
called = true; | |
handlers.reject(promise, error); | |
} | |
}); | |
function resolveFromAll(outValue) { | |
values[i] = outValue; | |
if (++resolved === len & !called) { | |
called = true; | |
handlers.resolve(promise, values); | |
} | |
} | |
} | |
} | |
},{"./INTERNAL":17,"./handlers":19,"./promise":21,"./reject":24,"./resolve":25}],19:[function(require,module,exports){ | |
'use strict'; | |
var tryCatch = require('./tryCatch'); | |
var resolveThenable = require('./resolveThenable'); | |
var states = require('./states'); | |
exports.resolve = function (self, value) { | |
var result = tryCatch(getThen, value); | |
if (result.status === 'error') { | |
return exports.reject(self, result.value); | |
} | |
var thenable = result.value; | |
if (thenable) { | |
resolveThenable.safely(self, thenable); | |
} else { | |
self.state = states.FULFILLED; | |
self.outcome = value; | |
var i = -1; | |
var len = self.queue.length; | |
while (++i < len) { | |
self.queue[i].callFulfilled(value); | |
} | |
} | |
return self; | |
}; | |
exports.reject = function (self, error) { | |
self.state = states.REJECTED; | |
self.outcome = error; | |
var i = -1; | |
var len = self.queue.length; | |
while (++i < len) { | |
self.queue[i].callRejected(error); | |
} | |
return self; | |
}; | |
function getThen(obj) { | |
// Make sure we only access the accessor once as required by the spec | |
var then = obj && obj.then; | |
if (obj && typeof obj === 'object' && typeof then === 'function') { | |
return function appyThen() { | |
then.apply(obj, arguments); | |
}; | |
} | |
} | |
},{"./resolveThenable":26,"./states":27,"./tryCatch":28}],20:[function(require,module,exports){ | |
module.exports = exports = require('./promise'); | |
exports.resolve = require('./resolve'); | |
exports.reject = require('./reject'); | |
exports.all = require('./all'); | |
exports.race = require('./race'); | |
},{"./all":18,"./promise":21,"./race":23,"./reject":24,"./resolve":25}],21:[function(require,module,exports){ | |
'use strict'; | |
var unwrap = require('./unwrap'); | |
var INTERNAL = require('./INTERNAL'); | |
var resolveThenable = require('./resolveThenable'); | |
var states = require('./states'); | |
var QueueItem = require('./queueItem'); | |
module.exports = Promise; | |
function Promise(resolver) { | |
if (!(this instanceof Promise)) { | |
return new Promise(resolver); | |
} | |
if (typeof resolver !== 'function') { | |
throw new TypeError('reslover must be a function'); | |
} | |
this.state = states.PENDING; | |
this.queue = []; | |
this.outcome = void 0; | |
if (resolver !== INTERNAL) { | |
resolveThenable.safely(this, resolver); | |
} | |
} | |
Promise.prototype['catch'] = function (onRejected) { | |
return this.then(null, onRejected); | |
}; | |
Promise.prototype.then = function (onFulfilled, onRejected) { | |
if (typeof onFulfilled !== 'function' && this.state === states.FULFILLED || | |
typeof onRejected !== 'function' && this.state === states.REJECTED) { | |
return this; | |
} | |
var promise = new Promise(INTERNAL); | |
if (this.state !== states.PENDING) { | |
var resolver = this.state === states.FULFILLED ? onFulfilled: onRejected; | |
unwrap(promise, resolver, this.outcome); | |
} else { | |
this.queue.push(new QueueItem(promise, onFulfilled, onRejected)); | |
} | |
return promise; | |
}; | |
},{"./INTERNAL":17,"./queueItem":22,"./resolveThenable":26,"./states":27,"./unwrap":29}],22:[function(require,module,exports){ | |
'use strict'; | |
var handlers = require('./handlers'); | |
var unwrap = require('./unwrap'); | |
module.exports = QueueItem; | |
function QueueItem(promise, onFulfilled, onRejected) { | |
this.promise = promise; | |
if (typeof onFulfilled === 'function') { | |
this.onFulfilled = onFulfilled; | |
this.callFulfilled = this.otherCallFulfilled; | |
} | |
if (typeof onRejected === 'function') { | |
this.onRejected = onRejected; | |
this.callRejected = this.otherCallRejected; | |
} | |
} | |
QueueItem.prototype.callFulfilled = function (value) { | |
handlers.resolve(this.promise, value); | |
}; | |
QueueItem.prototype.otherCallFulfilled = function (value) { | |
unwrap(this.promise, this.onFulfilled, value); | |
}; | |
QueueItem.prototype.callRejected = function (value) { | |
handlers.reject(this.promise, value); | |
}; | |
QueueItem.prototype.otherCallRejected = function (value) { | |
unwrap(this.promise, this.onRejected, value); | |
}; | |
},{"./handlers":19,"./unwrap":29}],23:[function(require,module,exports){ | |
'use strict'; | |
var Promise = require('./promise'); | |
var reject = require('./reject'); | |
var resolve = require('./resolve'); | |
var INTERNAL = require('./INTERNAL'); | |
var handlers = require('./handlers'); | |
module.exports = race; | |
function race(iterable) { | |
if (Object.prototype.toString.call(iterable) !== '[object Array]') { | |
return reject(new TypeError('must be an array')); | |
} | |
var len = iterable.length; | |
var called = false; | |
if (!len) { | |
return resolve([]); | |
} | |
var resolved = 0; | |
var i = -1; | |
var promise = new Promise(INTERNAL); | |
while (++i < len) { | |
resolver(iterable[i]); | |
} | |
return promise; | |
function resolver(value) { | |
resolve(value).then(function (response) { | |
if (!called) { | |
called = true; | |
handlers.resolve(promise, response); | |
} | |
}, function (error) { | |
if (!called) { | |
called = true; | |
handlers.reject(promise, error); | |
} | |
}); | |
} | |
} | |
},{"./INTERNAL":17,"./handlers":19,"./promise":21,"./reject":24,"./resolve":25}],24:[function(require,module,exports){ | |
'use strict'; | |
var Promise = require('./promise'); | |
var INTERNAL = require('./INTERNAL'); | |
var handlers = require('./handlers'); | |
module.exports = reject; | |
function reject(reason) { | |
var promise = new Promise(INTERNAL); | |
return handlers.reject(promise, reason); | |
} | |
},{"./INTERNAL":17,"./handlers":19,"./promise":21}],25:[function(require,module,exports){ | |
'use strict'; | |
var Promise = require('./promise'); | |
var INTERNAL = require('./INTERNAL'); | |
var handlers = require('./handlers'); | |
module.exports = resolve; | |
var FALSE = handlers.resolve(new Promise(INTERNAL), false); | |
var NULL = handlers.resolve(new Promise(INTERNAL), null); | |
var UNDEFINED = handlers.resolve(new Promise(INTERNAL), void 0); | |
var ZERO = handlers.resolve(new Promise(INTERNAL), 0); | |
var EMPTYSTRING = handlers.resolve(new Promise(INTERNAL), ''); | |
function resolve(value) { | |
if (value) { | |
if (value instanceof Promise) { | |
return value; | |
} | |
return handlers.resolve(new Promise(INTERNAL), value); | |
} | |
var valueType = typeof value; | |
switch (valueType) { | |
case 'boolean': | |
return FALSE; | |
case 'undefined': | |
return UNDEFINED; | |
case 'object': | |
return NULL; | |
case 'number': | |
return ZERO; | |
case 'string': | |
return EMPTYSTRING; | |
} | |
} | |
},{"./INTERNAL":17,"./handlers":19,"./promise":21}],26:[function(require,module,exports){ | |
'use strict'; | |
var handlers = require('./handlers'); | |
var tryCatch = require('./tryCatch'); | |
function safelyResolveThenable(self, thenable) { | |
// Either fulfill, reject or reject with error | |
var called = false; | |
function onError(value) { | |
if (called) { | |
return; | |
} | |
called = true; | |
handlers.reject(self, value); | |
} | |
function onSuccess(value) { | |
if (called) { | |
return; | |
} | |
called = true; | |
handlers.resolve(self, value); | |
} | |
function tryToUnwrap() { | |
thenable(onSuccess, onError); | |
} | |
var result = tryCatch(tryToUnwrap); | |
if (result.status === 'error') { | |
onError(result.value); | |
} | |
} | |
exports.safely = safelyResolveThenable; | |
},{"./handlers":19,"./tryCatch":28}],27:[function(require,module,exports){ | |
// Lazy man's symbols for states | |
exports.REJECTED = ['REJECTED']; | |
exports.FULFILLED = ['FULFILLED']; | |
exports.PENDING = ['PENDING']; | |
},{}],28:[function(require,module,exports){ | |
'use strict'; | |
module.exports = tryCatch; | |
function tryCatch(func, value) { | |
var out = {}; | |
try { | |
out.value = func(value); | |
out.status = 'success'; | |
} catch (e) { | |
out.status = 'error'; | |
out.value = e; | |
} | |
return out; | |
} | |
},{}],29:[function(require,module,exports){ | |
'use strict'; | |
var immediate = require('immediate'); | |
var handlers = require('./handlers'); | |
module.exports = unwrap; | |
function unwrap(promise, func, value) { | |
immediate(function () { | |
var returnValue; | |
try { | |
returnValue = func(value); | |
} catch (e) { | |
return handlers.reject(promise, e); | |
} | |
if (returnValue === promise) { | |
handlers.reject(promise, new TypeError('Cannot resolve promise with itself')); | |
} else { | |
handlers.resolve(promise, returnValue); | |
} | |
}); | |
} | |
},{"./handlers":19,"immediate":30}],30:[function(require,module,exports){ | |
'use strict'; | |
var types = [ | |
require('./nextTick'), | |
require('./mutation.js'), | |
require('./messageChannel'), | |
require('./stateChange'), | |
require('./timeout') | |
]; | |
var draining; | |
var queue = []; | |
function drainQueue() { | |
draining = true; | |
var i, oldQueue; | |
var len = queue.length; | |
while (len) { | |
oldQueue = queue; | |
queue = []; | |
i = -1; | |
while (++i < len) { | |
oldQueue[i](); | |
} | |
len = queue.length; | |
} | |
draining = false; | |
} | |
var scheduleDrain; | |
var i = -1; | |
var len = types.length; | |
while (++ i < len) { | |
if (types[i] && types[i].test && types[i].test()) { | |
scheduleDrain = types[i].install(drainQueue); | |
break; | |
} | |
} | |
module.exports = immediate; | |
function immediate(task) { | |
if (queue.push(task) === 1 && !draining) { | |
scheduleDrain(); | |
} | |
} | |
},{"./messageChannel":31,"./mutation.js":32,"./nextTick":1,"./stateChange":33,"./timeout":34}],31:[function(require,module,exports){ | |
(function (global){ | |
'use strict'; | |
exports.test = function () { | |
if (global.setImmediate) { | |
// we can only get here in IE10 | |
// which doesn't handel postMessage well | |
return false; | |
} | |
return typeof global.MessageChannel !== 'undefined'; | |
}; | |
exports.install = function (func) { | |
var channel = new global.MessageChannel(); | |
channel.port1.onmessage = func; | |
return function () { | |
channel.port2.postMessage(0); | |
}; | |
}; | |
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{}],32:[function(require,module,exports){ | |
(function (global){ | |
'use strict'; | |
//based off rsvp https://github.com/tildeio/rsvp.js | |
//license https://github.com/tildeio/rsvp.js/blob/master/LICENSE | |
//https://github.com/tildeio/rsvp.js/blob/master/lib/rsvp/asap.js | |
var Mutation = global.MutationObserver || global.WebKitMutationObserver; | |
exports.test = function () { | |
return Mutation; | |
}; | |
exports.install = function (handle) { | |
var called = 0; | |
var observer = new Mutation(handle); | |
var element = global.document.createTextNode(''); | |
observer.observe(element, { | |
characterData: true | |
}); | |
return function () { | |
element.data = (called = ++called % 2); | |
}; | |
}; | |
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{}],33:[function(require,module,exports){ | |
(function (global){ | |
'use strict'; | |
exports.test = function () { | |
return 'document' in global && 'onreadystatechange' in global.document.createElement('script'); | |
}; | |
exports.install = function (handle) { | |
return function () { | |
// Create a <script> element; its readystatechange event will be fired asynchronously once it is inserted | |
// into the document. Do so, thus queuing up the task. Remember to clean up once it's been called. | |
var scriptEl = global.document.createElement('script'); | |
scriptEl.onreadystatechange = function () { | |
handle(); | |
scriptEl.onreadystatechange = null; | |
scriptEl.parentNode.removeChild(scriptEl); | |
scriptEl = null; | |
}; | |
global.document.documentElement.appendChild(scriptEl); | |
return handle; | |
}; | |
}; | |
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{}],34:[function(require,module,exports){ | |
'use strict'; | |
exports.test = function () { | |
return true; | |
}; | |
exports.install = function (t) { | |
return function () { | |
setTimeout(t, 0); | |
}; | |
}; | |
},{}],35:[function(require,module,exports){ | |
(function (process){ | |
var defined = require('defined'); | |
var createDefaultStream = require('./lib/default_stream'); | |
var Test = require('./lib/test'); | |
var createResult = require('./lib/results'); | |
var through = require('through'); | |
var canEmitExit = typeof process !== 'undefined' && process | |
&& typeof process.on === 'function' && process.browser !== true | |
; | |
var canExit = typeof process !== 'undefined' && process | |
&& typeof process.exit === 'function' | |
; | |
var nextTick = typeof setImmediate !== 'undefined' | |
? setImmediate | |
: process.nextTick | |
; | |
exports = module.exports = (function () { | |
var harness; | |
var lazyLoad = function () { | |
return getHarness().apply(this, arguments); | |
}; | |
lazyLoad.only = function () { | |
return getHarness().only.apply(this, arguments); | |
}; | |
lazyLoad.createStream = function (opts) { | |
if (!opts) opts = {}; | |
if (!harness) { | |
var output = through(); | |
getHarness({ stream: output, objectMode: opts.objectMode }); | |
return output; | |
} | |
return harness.createStream(opts); | |
}; | |
return lazyLoad | |
function getHarness (opts) { | |
if (!opts) opts = {}; | |
opts.autoclose = !canEmitExit; | |
if (!harness) harness = createExitHarness(opts); | |
return harness; | |
} | |
})(); | |
function createExitHarness (conf) { | |
if (!conf) conf = {}; | |
var harness = createHarness({ | |
autoclose: defined(conf.autoclose, false) | |
}); | |
var stream = harness.createStream({ objectMode: conf.objectMode }); | |
var es = stream.pipe(conf.stream || createDefaultStream()); | |
if (canEmitExit) { | |
es.on('error', function (err) { harness._exitCode = 1 }); | |
} | |
var ended = false; | |
stream.on('end', function () { ended = true }); | |
if (conf.exit === false) return harness; | |
if (!canEmitExit || !canExit) return harness; | |
var _error; | |
process.on('uncaughtException', function (err) { | |
if (err && err.code === 'EPIPE' && err.errno === 'EPIPE' | |
&& err.syscall === 'write') return; | |
_error = err | |
throw err | |
}) | |
process.on('exit', function (code) { | |
if (_error) { | |
return | |
} | |
if (!ended) { | |
var only = harness._results._only; | |
for (var i = 0; i < harness._tests.length; i++) { | |
var t = harness._tests[i]; | |
if (only && t.name !== only) continue; | |
t._exit(); | |
} | |
} | |
harness.close(); | |
process.exit(code || harness._exitCode); | |
}); | |
return harness; | |
} | |
exports.createHarness = createHarness; | |
exports.Test = Test; | |
exports.test = exports; // tap compat | |
exports.test.skip = Test.skip; | |
var exitInterval; | |
function createHarness (conf_) { | |
if (!conf_) conf_ = {}; | |
var results = createResult(); | |
if (conf_.autoclose !== false) { | |
results.once('done', function () { results.close() }); | |
} | |
var test = function (name, conf, cb) { | |
var t = new Test(name, conf, cb); | |
test._tests.push(t); | |
(function inspectCode (st) { | |
st.on('test', function sub (st_) { | |
inspectCode(st_); | |
}); | |
st.on('result', function (r) { | |
if (!r.ok) test._exitCode = 1 | |
}); | |
})(t); | |
results.push(t); | |
return t; | |
}; | |
test._results = results; | |
test._tests = []; | |
test.createStream = function (opts) { | |
return results.createStream(opts); | |
}; | |
var only = false; | |
test.only = function (name) { | |
if (only) throw new Error('there can only be one only test'); | |
results.only(name); | |
only = true; | |
return test.apply(null, arguments); | |
}; | |
test._exitCode = 0; | |
test.close = function () { results.close() }; | |
return test; | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"./lib/default_stream":36,"./lib/results":37,"./lib/test":38,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"defined":42,"through":45}],36:[function(require,module,exports){ | |
var through = require('through'); | |
module.exports = function () { | |
var line = ''; | |
var stream = through(write, flush); | |
return stream; | |
function write (buf) { | |
for (var i = 0; i < buf.length; i++) { | |
var c = typeof buf === 'string' | |
? buf.charAt(i) | |
: String.fromCharCode(buf[i]) | |
; | |
if (c === '\n') flush(); | |
else line += c; | |
} | |
} | |
function flush () { | |
try { console.log(line); } | |
catch (e) { stream.emit('error', e) } | |
line = ''; | |
} | |
}; | |
},{"through":45}],37:[function(require,module,exports){ | |
(function (process){ | |
var EventEmitter = require('events').EventEmitter; | |
var inherits = require('inherits'); | |
var through = require('through'); | |
var resumer = require('resumer'); | |
var inspect = require('object-inspect'); | |
var nextTick = typeof setImmediate !== 'undefined' | |
? setImmediate | |
: process.nextTick | |
; | |
module.exports = Results; | |
inherits(Results, EventEmitter); | |
function Results () { | |
if (!(this instanceof Results)) return new Results; | |
this.count = 0; | |
this.fail = 0; | |
this.pass = 0; | |
this._stream = through(); | |
this.tests = []; | |
} | |
Results.prototype.createStream = function (opts) { | |
if (!opts) opts = {}; | |
var self = this; | |
var output, testId = 0; | |
if (opts.objectMode) { | |
output = through(); | |
self.on('_push', function ontest (t, extra) { | |
if (!extra) extra = {}; | |
var id = testId++; | |
t.once('prerun', function () { | |
var row = { | |
type: 'test', | |
name: t.name, | |
id: id | |
}; | |
if (extra.parent) { | |
row.parent = extra.parent; | |
} | |
output.queue(row); | |
}); | |
t.on('test', function (st) { | |
ontest(st, { parent: id }); | |
}); | |
t.on('result', function (res) { | |
res.test = id; | |
res.type = 'assert'; | |
output.queue(res); | |
}); | |
t.on('end', function () { | |
output.queue({ type: 'end', test: id }); | |
}); | |
}); | |
self.on('done', function () { output.queue(null) }); | |
} | |
else { | |
output = resumer(); | |
output.queue('TAP version 13\n'); | |
self._stream.pipe(output); | |
} | |
nextTick(function next() { | |
var t; | |
while (t = getNextTest(self)) { | |
t.run(); | |
if (!t.ended) return t.once('end', function(){ nextTick(next); }); | |
} | |
self.emit('done'); | |
}); | |
return output; | |
}; | |
Results.prototype.push = function (t) { | |
var self = this; | |
self.tests.push(t); | |
self._watch(t); | |
self.emit('_push', t); | |
}; | |
Results.prototype.only = function (name) { | |
if (this._only) { | |
self.count ++; | |
self.fail ++; | |
write('not ok ' + self.count + ' already called .only()\n'); | |
} | |
this._only = name; | |
}; | |
Results.prototype._watch = function (t) { | |
var self = this; | |
var write = function (s) { self._stream.queue(s) }; | |
t.once('prerun', function () { | |
write('# ' + t.name + '\n'); | |
}); | |
t.on('result', function (res) { | |
if (typeof res === 'string') { | |
write('# ' + res + '\n'); | |
return; | |
} | |
write(encodeResult(res, self.count + 1)); | |
self.count ++; | |
if (res.ok) self.pass ++ | |
else self.fail ++ | |
}); | |
t.on('test', function (st) { self._watch(st) }); | |
}; | |
Results.prototype.close = function () { | |
var self = this; | |
if (self.closed) self._stream.emit('error', new Error('ALREADY CLOSED')); | |
self.closed = true; | |
var write = function (s) { self._stream.queue(s) }; | |
write('\n1..' + self.count + '\n'); | |
write('# tests ' + self.count + '\n'); | |
write('# pass ' + self.pass + '\n'); | |
if (self.fail) write('# fail ' + self.fail + '\n') | |
else write('\n# ok\n') | |
self._stream.queue(null); | |
}; | |
function encodeResult (res, count) { | |
var output = ''; | |
output += (res.ok ? 'ok ' : 'not ok ') + count; | |
output += res.name ? ' ' + res.name.toString().replace(/\s+/g, ' ') : ''; | |
if (res.skip) output += ' # SKIP'; | |
else if (res.todo) output += ' # TODO'; | |
output += '\n'; | |
if (res.ok) return output; | |
var outer = ' '; | |
var inner = outer + ' '; | |
output += outer + '---\n'; | |
output += inner + 'operator: ' + res.operator + '\n'; | |
if (has(res, 'expected') || has(res, 'actual')) { | |
var ex = inspect(res.expected); | |
var ac = inspect(res.actual); | |
if (Math.max(ex.length, ac.length) > 65) { | |
output += inner + 'expected:\n' + inner + ' ' + ex + '\n'; | |
output += inner + 'actual:\n' + inner + ' ' + ac + '\n'; | |
} | |
else { | |
output += inner + 'expected: ' + ex + '\n'; | |
output += inner + 'actual: ' + ac + '\n'; | |
} | |
} | |
if (res.at) { | |
output += inner + 'at: ' + res.at + '\n'; | |
} | |
if (res.operator === 'error' && res.actual && res.actual.stack) { | |
var lines = String(res.actual.stack).split('\n'); | |
output += inner + 'stack:\n'; | |
output += inner + ' ' + lines[0] + '\n'; | |
for (var i = 1; i < lines.length; i++) { | |
output += inner + lines[i] + '\n'; | |
} | |
} | |
output += outer + '...\n'; | |
return output; | |
} | |
function getNextTest (results) { | |
if (!results._only) { | |
return results.tests.shift(); | |
} | |
do { | |
var t = results.tests.shift(); | |
if (!t) continue; | |
if (results._only === t.name) { | |
return t; | |
} | |
} while (results.tests.length !== 0) | |
} | |
function has (obj, prop) { | |
return Object.prototype.hasOwnProperty.call(obj, prop); | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"events":2,"inherits":16,"object-inspect":43,"resumer":44,"through":45}],38:[function(require,module,exports){ | |
(function (process,__dirname){ | |
var Stream = require('stream'); | |
var deepEqual = require('deep-equal'); | |
var defined = require('defined'); | |
var path = require('path'); | |
var inherits = require('inherits'); | |
var EventEmitter = require('events').EventEmitter; | |
module.exports = Test; | |
var nextTick = typeof setImmediate !== 'undefined' | |
? setImmediate | |
: process.nextTick | |
; | |
inherits(Test, EventEmitter); | |
var getTestArgs = function (name_, opts_, cb_) { | |
var name = '(anonymous)'; | |
var opts = {}; | |
var cb; | |
for (var i = 0; i < arguments.length; i++) { | |
var arg = arguments[i]; | |
var t = typeof arg; | |
if (t === 'string') { | |
name = arg; | |
} | |
else if (t === 'object') { | |
opts = arg || opts; | |
} | |
else if (t === 'function') { | |
cb = arg; | |
} | |
} | |
return { name: name, opts: opts, cb: cb }; | |
}; | |
function Test (name_, opts_, cb_) { | |
var self = this; | |
var args = getTestArgs(name_, opts_, cb_); | |
this.readable = true; | |
this.name = args.name || '(anonymous)'; | |
this.assertCount = 0; | |
this.pendingCount = 0; | |
this._skip = args.opts.skip || false; | |
this._plan = undefined; | |
this._cb = args.cb; | |
this._progeny = []; | |
this._ok = true; | |
this.end = function () { | |
return Test.prototype.end.apply(self, arguments); | |
}; | |
} | |
Test.prototype.run = function () { | |
if (!this._cb || this._skip) { | |
return this._end(); | |
} | |
this.emit('prerun'); | |
try { | |
this._cb(this); | |
} | |
catch (err) { | |
this.error(err); | |
this._end(); | |
return; | |
} | |
this.emit('run'); | |
}; | |
Test.prototype.test = function (name, opts, cb) { | |
var self = this; | |
var t = new Test(name, opts, cb); | |
this._progeny.push(t); | |
this.pendingCount++; | |
this.emit('test', t); | |
t.on('prerun', function () { | |
self.assertCount++; | |
}) | |
if (!self._pendingAsserts()) { | |
nextTick(function () { | |
self._end(); | |
}); | |
} | |
nextTick(function() { | |
if (!self._plan && self.pendingCount == self._progeny.length) { | |
self._end(); | |
} | |
}); | |
}; | |
Test.prototype.comment = function (msg) { | |
this.emit('result', msg.trim().replace(/^#\s*/, '')); | |
}; | |
Test.prototype.plan = function (n) { | |
this._plan = n; | |
this.emit('plan', n); | |
}; | |
Test.prototype.end = function (err) { | |
var self = this; | |
if (arguments.length >= 1) { | |
this.ifError(err); | |
} | |
if (this.calledEnd) { | |
this.fail('.end() called twice'); | |
} | |
this.calledEnd = true; | |
this._end(); | |
}; | |
Test.prototype._end = function (err) { | |
var self = this; | |
if (this._progeny.length) { | |
var t = this._progeny.shift(); | |
t.on('end', function () { self._end() }); | |
t.run(); | |
return; | |
} | |
if (!this.ended) this.emit('end'); | |
var pendingAsserts = this._pendingAsserts(); | |
if (!this._planError && this._plan !== undefined && pendingAsserts) { | |
this._planError = true; | |
this.fail('plan != count', { | |
expected : this._plan, | |
actual : this.assertCount | |
}); | |
} | |
this.ended = true; | |
}; | |
Test.prototype._exit = function () { | |
if (this._plan !== undefined && | |
!this._planError && this.assertCount !== this._plan) { | |
this._planError = true; | |
this.fail('plan != count', { | |
expected : this._plan, | |
actual : this.assertCount, | |
exiting : true | |
}); | |
} | |
else if (!this.ended) { | |
this.fail('test exited without ending', { | |
exiting: true | |
}); | |
} | |
}; | |
Test.prototype._pendingAsserts = function () { | |
if (this._plan === undefined) { | |
return 1; | |
} | |
else { | |
return this._plan - (this._progeny.length + this.assertCount); | |
} | |
}; | |
Test.prototype._assert = function assert (ok, opts) { | |
var self = this; | |
var extra = opts.extra || {}; | |
var res = { | |
id : self.assertCount ++, | |
ok : Boolean(ok), | |
skip : defined(extra.skip, opts.skip), | |
name : defined(extra.message, opts.message, '(unnamed assert)'), | |
operator : defined(extra.operator, opts.operator) | |
}; | |
if (has(opts, 'actual') || has(extra, 'actual')) { | |
res.actual = defined(extra.actual, opts.actual); | |
} | |
if (has(opts, 'expected') || has(extra, 'expected')) { | |
res.expected = defined(extra.expected, opts.expected); | |
} | |
this._ok = Boolean(this._ok && ok); | |
if (!ok) { | |
res.error = defined(extra.error, opts.error, new Error(res.name)); | |
} | |
var e = new Error('exception'); | |
var err = (e.stack || '').split('\n'); | |
var dir = path.dirname(__dirname) + '/'; | |
for (var i = 0; i < err.length; i++) { | |
var m = /^\s*\bat\s+(.+)/.exec(err[i]); | |
if (!m) continue; | |
var s = m[1].split(/\s+/); | |
var filem = /(\/[^:\s]+:(\d+)(?::(\d+))?)/.exec(s[1]); | |
if (!filem) { | |
filem = /(\/[^:\s]+:(\d+)(?::(\d+))?)/.exec(s[3]); | |
if (!filem) continue; | |
} | |
if (filem[1].slice(0, dir.length) === dir) continue; | |
res.functionName = s[0]; | |
res.file = filem[1]; | |
res.line = Number(filem[2]); | |
if (filem[3]) res.column = filem[3]; | |
res.at = m[1]; | |
break; | |
} | |
self.emit('result', res); | |
var pendingAsserts = self._pendingAsserts(); | |
if (!pendingAsserts) { | |
if (extra.exiting) { | |
self._end(); | |
} else { | |
nextTick(function () { | |
self._end(); | |
}); | |
} | |
} | |
if (!self._planError && pendingAsserts < 0) { | |
self._planError = true; | |
self.fail('plan != count', { | |
expected : self._plan, | |
actual : self._plan - pendingAsserts | |
}); | |
} | |
}; | |
Test.prototype.fail = function (msg, extra) { | |
this._assert(false, { | |
message : msg, | |
operator : 'fail', | |
extra : extra | |
}); | |
}; | |
Test.prototype.pass = function (msg, extra) { | |
this._assert(true, { | |
message : msg, | |
operator : 'pass', | |
extra : extra | |
}); | |
}; | |
Test.prototype.skip = function (msg, extra) { | |
this._assert(true, { | |
message : msg, | |
operator : 'skip', | |
skip : true, | |
extra : extra | |
}); | |
}; | |
Test.prototype.ok | |
= Test.prototype['true'] | |
= Test.prototype.assert | |
= function (value, msg, extra) { | |
this._assert(value, { | |
message : msg, | |
operator : 'ok', | |
expected : true, | |
actual : value, | |
extra : extra | |
}); | |
}; | |
Test.prototype.notOk | |
= Test.prototype['false'] | |
= Test.prototype.notok | |
= function (value, msg, extra) { | |
this._assert(!value, { | |
message : msg, | |
operator : 'notOk', | |
expected : false, | |
actual : value, | |
extra : extra | |
}); | |
}; | |
Test.prototype.error | |
= Test.prototype.ifError | |
= Test.prototype.ifErr | |
= Test.prototype.iferror | |
= function (err, msg, extra) { | |
this._assert(!err, { | |
message : defined(msg, String(err)), | |
operator : 'error', | |
actual : err, | |
extra : extra | |
}); | |
}; | |
Test.prototype.equal | |
= Test.prototype.equals | |
= Test.prototype.isEqual | |
= Test.prototype.is | |
= Test.prototype.strictEqual | |
= Test.prototype.strictEquals | |
= function (a, b, msg, extra) { | |
this._assert(a === b, { | |
message : defined(msg, 'should be equal'), | |
operator : 'equal', | |
actual : a, | |
expected : b, | |
extra : extra | |
}); | |
}; | |
Test.prototype.notEqual | |
= Test.prototype.notEquals | |
= Test.prototype.notStrictEqual | |
= Test.prototype.notStrictEquals | |
= Test.prototype.isNotEqual | |
= Test.prototype.isNot | |
= Test.prototype.not | |
= Test.prototype.doesNotEqual | |
= Test.prototype.isInequal | |
= function (a, b, msg, extra) { | |
this._assert(a !== b, { | |
message : defined(msg, 'should not be equal'), | |
operator : 'notEqual', | |
actual : a, | |
notExpected : b, | |
extra : extra | |
}); | |
}; | |
Test.prototype.deepEqual | |
= Test.prototype.deepEquals | |
= Test.prototype.isEquivalent | |
= Test.prototype.same | |
= function (a, b, msg, extra) { | |
this._assert(deepEqual(a, b, { strict: true }), { | |
message : defined(msg, 'should be equivalent'), | |
operator : 'deepEqual', | |
actual : a, | |
expected : b, | |
extra : extra | |
}); | |
}; | |
Test.prototype.deepLooseEqual | |
= Test.prototype.looseEqual | |
= Test.prototype.looseEquals | |
= function (a, b, msg, extra) { | |
this._assert(deepEqual(a, b), { | |
message : defined(msg, 'should be equivalent'), | |
operator : 'deepLooseEqual', | |
actual : a, | |
expected : b, | |
extra : extra | |
}); | |
}; | |
Test.prototype.notDeepEqual | |
= Test.prototype.notEquivalent | |
= Test.prototype.notDeeply | |
= Test.prototype.notSame | |
= Test.prototype.isNotDeepEqual | |
= Test.prototype.isNotDeeply | |
= Test.prototype.isNotEquivalent | |
= Test.prototype.isInequivalent | |
= function (a, b, msg, extra) { | |
this._assert(!deepEqual(a, b, { strict: true }), { | |
message : defined(msg, 'should not be equivalent'), | |
operator : 'notDeepEqual', | |
actual : a, | |
notExpected : b, | |
extra : extra | |
}); | |
}; | |
Test.prototype.notDeepLooseEqual | |
= Test.prototype.notLooseEqual | |
= Test.prototype.notLooseEquals | |
= function (a, b, msg, extra) { | |
this._assert(deepEqual(a, b), { | |
message : defined(msg, 'should be equivalent'), | |
operator : 'notDeepLooseEqual', | |
actual : a, | |
expected : b, | |
extra : extra | |
}); | |
}; | |
Test.prototype['throws'] = function (fn, expected, msg, extra) { | |
if (typeof expected === 'string') { | |
msg = expected; | |
expected = undefined; | |
} | |
var caught = undefined; | |
try { | |
fn(); | |
} | |
catch (err) { | |
caught = { error : err }; | |
var message = err.message; | |
delete err.message; | |
err.message = message; | |
} | |
var passed = caught; | |
if (expected instanceof RegExp) { | |
passed = expected.test(caught && caught.error); | |
expected = String(expected); | |
} | |
this._assert(passed, { | |
message : defined(msg, 'should throw'), | |
operator : 'throws', | |
actual : caught && caught.error, | |
expected : expected, | |
error: !passed && caught && caught.error, | |
extra : extra | |
}); | |
}; | |
Test.prototype.doesNotThrow = function (fn, expected, msg, extra) { | |
if (typeof expected === 'string') { | |
msg = expected; | |
expected = undefined; | |
} | |
var caught = undefined; | |
try { | |
fn(); | |
} | |
catch (err) { | |
caught = { error : err }; | |
} | |
this._assert(!caught, { | |
message : defined(msg, 'should not throw'), | |
operator : 'throws', | |
actual : caught && caught.error, | |
expected : expected, | |
error : caught && caught.error, | |
extra : extra | |
}); | |
}; | |
function has (obj, prop) { | |
return Object.prototype.hasOwnProperty.call(obj, prop); | |
} | |
Test.skip = function (name_, _opts, _cb) { | |
var args = getTestArgs.apply(null, arguments); | |
args.opts.skip = true; | |
return Test(args.name, args.opts, args.cb); | |
}; | |
// vim: set softtabstop=4 shiftwidth=4: | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),"/../../node_modules/tape/lib") | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"deep-equal":39,"defined":42,"events":2,"inherits":16,"path":7,"stream":9}],39:[function(require,module,exports){ | |
var pSlice = Array.prototype.slice; | |
var objectKeys = require('./lib/keys.js'); | |
var isArguments = require('./lib/is_arguments.js'); | |
var deepEqual = module.exports = function (actual, expected, opts) { | |
if (!opts) opts = {}; | |
// 7.1. All identical values are equivalent, as determined by ===. | |
if (actual === expected) { | |
return true; | |
} else if (actual instanceof Date && expected instanceof Date) { | |
return actual.getTime() === expected.getTime(); | |
// 7.3. Other pairs that do not both pass typeof value == 'object', | |
// equivalence is determined by ==. | |
} else if (typeof actual != 'object' && typeof expected != 'object') { | |
return opts.strict ? actual === expected : actual == expected; | |
// 7.4. For all other Object pairs, including Array objects, equivalence is | |
// determined by having the same number of owned properties (as verified | |
// with Object.prototype.hasOwnProperty.call), the same set of keys | |
// (although not necessarily the same order), equivalent values for every | |
// corresponding key, and an identical 'prototype' property. Note: this | |
// accounts for both named and indexed properties on Arrays. | |
} else { | |
return objEquiv(actual, expected, opts); | |
} | |
} | |
function isUndefinedOrNull(value) { | |
return value === null || value === undefined; | |
} | |
function isBuffer (x) { | |
if (!x || typeof x !== 'object' || typeof x.length !== 'number') return false; | |
if (typeof x.copy !== 'function' || typeof x.slice !== 'function') { | |
return false; | |
} | |
if (x.length > 0 && typeof x[0] !== 'number') return false; | |
return true; | |
} | |
function objEquiv(a, b, opts) { | |
var i, key; | |
if (isUndefinedOrNull(a) || isUndefinedOrNull(b)) | |
return false; | |
// an identical 'prototype' property. | |
if (a.prototype !== b.prototype) return false; | |
//~~~I've managed to break Object.keys through screwy arguments passing. | |
// Converting to array solves the problem. | |
if (isArguments(a)) { | |
if (!isArguments(b)) { | |
return false; | |
} | |
a = pSlice.call(a); | |
b = pSlice.call(b); | |
return deepEqual(a, b, opts); | |
} | |
if (isBuffer(a)) { | |
if (!isBuffer(b)) { | |
return false; | |
} | |
if (a.length !== b.length) return false; | |
for (i = 0; i < a.length; i++) { | |
if (a[i] !== b[i]) return false; | |
} | |
return true; | |
} | |
try { | |
var ka = objectKeys(a), | |
kb = objectKeys(b); | |
} catch (e) {//happens when one is a string literal and the other isn't | |
return false; | |
} | |
// having the same number of owned properties (keys incorporates | |
// hasOwnProperty) | |
if (ka.length != kb.length) | |
return false; | |
//the same set of keys (although not necessarily the same order), | |
ka.sort(); | |
kb.sort(); | |
//~~~cheap key test | |
for (i = ka.length - 1; i >= 0; i--) { | |
if (ka[i] != kb[i]) | |
return false; | |
} | |
//equivalent values for every corresponding key, and | |
//~~~possibly expensive deep test | |
for (i = ka.length - 1; i >= 0; i--) { | |
key = ka[i]; | |
if (!deepEqual(a[key], b[key], opts)) return false; | |
} | |
return true; | |
} | |
},{"./lib/is_arguments.js":40,"./lib/keys.js":41}],40:[function(require,module,exports){ | |
var supportsArgumentsClass = (function(){ | |
return Object.prototype.toString.call(arguments) | |
})() == '[object Arguments]'; | |
exports = module.exports = supportsArgumentsClass ? supported : unsupported; | |
exports.supported = supported; | |
function supported(object) { | |
return Object.prototype.toString.call(object) == '[object Arguments]'; | |
}; | |
exports.unsupported = unsupported; | |
function unsupported(object){ | |
return object && | |
typeof object == 'object' && | |
typeof object.length == 'number' && | |
Object.prototype.hasOwnProperty.call(object, 'callee') && | |
!Object.prototype.propertyIsEnumerable.call(object, 'callee') || | |
false; | |
}; | |
},{}],41:[function(require,module,exports){ | |
exports = module.exports = typeof Object.keys === 'function' | |
? Object.keys : shim; | |
exports.shim = shim; | |
function shim (obj) { | |
var keys = []; | |
for (var key in obj) keys.push(key); | |
return keys; | |
} | |
},{}],42:[function(require,module,exports){ | |
module.exports = function () { | |
for (var i = 0; i < arguments.length; i++) { | |
if (arguments[i] !== undefined) return arguments[i]; | |
} | |
}; | |
},{}],43:[function(require,module,exports){ | |
module.exports = function inspect_ (obj, opts, depth, seen) { | |
if (!opts) opts = {}; | |
var maxDepth = opts.depth === undefined ? 5 : opts.depth; | |
if (depth === undefined) depth = 0; | |
if (depth > maxDepth && maxDepth > 0) return '...'; | |
if (seen === undefined) seen = []; | |
else if (indexOf(seen, obj) >= 0) { | |
return '[Circular]'; | |
} | |
function inspect (value, from) { | |
if (from) { | |
seen = seen.slice(); | |
seen.push(from); | |
} | |
return inspect_(value, opts, depth + 1, seen); | |
} | |
if (typeof obj === 'string') { | |
return inspectString(obj); | |
} | |
else if (typeof obj === 'function') { | |
var name = nameOf(obj); | |
return '[Function' + (name ? ': ' + name : '') + ']'; | |
} | |
else if (obj === null) { | |
return 'null'; | |
} | |
else if (isElement(obj)) { | |
var s = '<' + String(obj.nodeName).toLowerCase(); | |
var attrs = obj.attributes || []; | |
for (var i = 0; i < attrs.length; i++) { | |
s += ' ' + attrs[i].name + '="' + quote(attrs[i].value) + '"'; | |
} | |
s += '>'; | |
if (obj.childNodes && obj.childNodes.length) s += '...'; | |
s += '</' + String(obj.tagName).toLowerCase() + '>'; | |
return s; | |
} | |
else if (isArray(obj)) { | |
if (obj.length === 0) return '[]'; | |
var xs = Array(obj.length); | |
for (var i = 0; i < obj.length; i++) { | |
xs[i] = has(obj, i) ? inspect(obj[i], obj) : ''; | |
} | |
return '[ ' + xs.join(', ') + ' ]'; | |
} | |
else if (typeof obj === 'object' && typeof obj.inspect === 'function') { | |
return obj.inspect(); | |
} | |
else if (typeof obj === 'object' && !isDate(obj) && !isRegExp(obj)) { | |
var xs = [], keys = []; | |
for (var key in obj) { | |
if (has(obj, key)) keys.push(key); | |
} | |
keys.sort(); | |
for (var i = 0; i < keys.length; i++) { | |
var key = keys[i]; | |
if (/[^\w$]/.test(key)) { | |
xs.push(inspect(key) + ': ' + inspect(obj[key], obj)); | |
} | |
else xs.push(key + ': ' + inspect(obj[key], obj)); | |
} | |
if (xs.length === 0) return '{}'; | |
return '{ ' + xs.join(', ') + ' }'; | |
} | |
else return String(obj); | |
}; | |
function quote (s) { | |
return String(s).replace(/"/g, '"'); | |
} | |
function isArray (obj) { | |
return {}.toString.call(obj) === '[object Array]'; | |
} | |
function isDate (obj) { | |
return {}.toString.call(obj) === '[object Date]'; | |
} | |
function isRegExp (obj) { | |
return {}.toString.call(obj) === '[object RegExp]'; | |
} | |
function has (obj, key) { | |
if (!{}.hasOwnProperty) return key in obj; | |
return {}.hasOwnProperty.call(obj, key); | |
} | |
function nameOf (f) { | |
if (f.name) return f.name; | |
var m = f.toString().match(/^function\s*([\w$]+)/); | |
if (m) return m[1]; | |
} | |
function indexOf (xs, x) { | |
if (xs.indexOf) return xs.indexOf(x); | |
for (var i = 0, l = xs.length; i < l; i++) { | |
if (xs[i] === x) return i; | |
} | |
return -1; | |
} | |
function isElement (x) { | |
if (!x || typeof x !== 'object') return false; | |
if (typeof HTMLElement !== 'undefined') { | |
return x instanceof HTMLElement; | |
} | |
else return typeof x.nodeName === 'string' | |
&& typeof x.getAttribute === 'function' | |
; | |
} | |
function inspectString (str) { | |
var s = str.replace(/(['\\])/g, '\\$1').replace(/[\x00-\x1f]/g, lowbyte); | |
return "'" + s + "'"; | |
function lowbyte (c) { | |
var n = c.charCodeAt(0); | |
var x = { 8: 'b', 9: 't', 10: 'n', 12: 'f', 13: 'r' }[n]; | |
if (x) return '\\' + x; | |
return '\\x' + (n < 0x10 ? '0' : '') + n.toString(16); | |
} | |
} | |
},{}],44:[function(require,module,exports){ | |
(function (process){ | |
var through = require('through'); | |
var nextTick = typeof setImmediate !== 'undefined' | |
? setImmediate | |
: process.nextTick | |
; | |
module.exports = function (write, end) { | |
var tr = through(write, end); | |
tr.pause(); | |
var resume = tr.resume; | |
var pause = tr.pause; | |
var paused = false; | |
tr.pause = function () { | |
paused = true; | |
return pause.apply(this, arguments); | |
}; | |
tr.resume = function () { | |
paused = false; | |
return resume.apply(this, arguments); | |
}; | |
nextTick(function () { | |
if (!paused) tr.resume(); | |
}); | |
return tr; | |
}; | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"through":45}],45:[function(require,module,exports){ | |
(function (process){ | |
var Stream = require('stream') | |
// through | |
// | |
// a stream that does nothing but re-emit the input. | |
// useful for aggregating a series of changing but not ending streams into one stream) | |
exports = module.exports = through | |
through.through = through | |
//create a readable writable stream. | |
function through (write, end, opts) { | |
write = write || function (data) { this.queue(data) } | |
end = end || function () { this.queue(null) } | |
var ended = false, destroyed = false, buffer = [], _ended = false | |
var stream = new Stream() | |
stream.readable = stream.writable = true | |
stream.paused = false | |
// stream.autoPause = !(opts && opts.autoPause === false) | |
stream.autoDestroy = !(opts && opts.autoDestroy === false) | |
stream.write = function (data) { | |
write.call(this, data) | |
return !stream.paused | |
} | |
function drain() { | |
while(buffer.length && !stream.paused) { | |
var data = buffer.shift() | |
if(null === data) | |
return stream.emit('end') | |
else | |
stream.emit('data', data) | |
} | |
} | |
stream.queue = stream.push = function (data) { | |
// console.error(ended) | |
if(_ended) return stream | |
if(data == null) _ended = true | |
buffer.push(data) | |
drain() | |
return stream | |
} | |
//this will be registered as the first 'end' listener | |
//must call destroy next tick, to make sure we're after any | |
//stream piped from here. | |
//this is only a problem if end is not emitted synchronously. | |
//a nicer way to do this is to make sure this is the last listener for 'end' | |
stream.on('end', function () { | |
stream.readable = false | |
if(!stream.writable && stream.autoDestroy) | |
process.nextTick(function () { | |
stream.destroy() | |
}) | |
}) | |
function _end () { | |
stream.writable = false | |
end.call(stream) | |
if(!stream.readable && stream.autoDestroy) | |
stream.destroy() | |
} | |
stream.end = function (data) { | |
if(ended) return | |
ended = true | |
if(arguments.length) stream.write(data) | |
_end() // will emit or queue | |
return stream | |
} | |
stream.destroy = function () { | |
if(destroyed) return | |
destroyed = true | |
ended = true | |
buffer.length = 0 | |
stream.writable = stream.readable = false | |
stream.emit('close') | |
return stream | |
} | |
stream.pause = function () { | |
if(stream.paused) return | |
stream.paused = true | |
return stream | |
} | |
stream.resume = function () { | |
if(stream.paused) { | |
stream.paused = false | |
stream.emit('resume') | |
} | |
drain() | |
//may have become paused again, | |
//as drain emits 'data'. | |
if(!stream.paused) | |
stream.emit('drain') | |
return stream | |
} | |
return stream | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"stream":9}],46:[function(require,module,exports){ | |
// UAParser.js v0.6.2 | |
// Lightweight JavaScript-based User-Agent string parser | |
// https://github.com/faisalman/ua-parser-js | |
// | |
// Copyright © 2012-2013 Faisalman <[email protected]> | |
// Dual licensed under GPLv2 & MIT | |
(function (window, undefined) { | |
'use strict'; | |
////////////// | |
// Constants | |
///////////// | |
var EMPTY = '', | |
UNKNOWN = '?', | |
FUNC_TYPE = 'function', | |
UNDEF_TYPE = 'undefined', | |
OBJ_TYPE = 'object', | |
MAJOR = 'major', | |
MODEL = 'model', | |
NAME = 'name', | |
TYPE = 'type', | |
VENDOR = 'vendor', | |
VERSION = 'version', | |
ARCHITECTURE= 'architecture', | |
CONSOLE = 'console', | |
MOBILE = 'mobile', | |
TABLET = 'tablet'; | |
/////////// | |
// Helper | |
////////// | |
var util = { | |
has : function (str1, str2) { | |
return str2.toLowerCase().indexOf(str1.toLowerCase()) !== -1; | |
}, | |
lowerize : function (str) { | |
return str.toLowerCase(); | |
} | |
}; | |
/////////////// | |
// Map helper | |
////////////// | |
var mapper = { | |
rgx : function () { | |
// loop through all regexes maps | |
for (var result, i = 0, j, k, p, q, matches, match, args = arguments; i < args.length; i += 2) { | |
var regex = args[i], // even sequence (0,2,4,..) | |
props = args[i + 1]; // odd sequence (1,3,5,..) | |
// construct object barebones | |
if (typeof(result) === UNDEF_TYPE) { | |
result = {}; | |
for (p in props) { | |
q = props[p]; | |
if (typeof(q) === OBJ_TYPE) { | |
result[q[0]] = undefined; | |
} else { | |
result[q] = undefined; | |
} | |
} | |
} | |
// try matching uastring with regexes | |
for (j = k = 0; j < regex.length; j++) { | |
matches = regex[j].exec(this.getUA()); | |
if (!!matches) { | |
for (p in props) { | |
match = matches[++k]; | |
q = props[p]; | |
// check if given property is actually array | |
if (typeof(q) === OBJ_TYPE && q.length > 0) { | |
if (q.length == 2) { | |
if (typeof(q[1]) == FUNC_TYPE) { | |
// assign modified match | |
result[q[0]] = q[1].call(this, match); | |
} else { | |
// assign given value, ignore regex match | |
result[q[0]] = q[1]; | |
} | |
} else if (q.length == 3) { | |
// check whether function or regex | |
if (typeof(q[1]) === FUNC_TYPE && !(q[1].exec && q[1].test)) { | |
// call function (usually string mapper) | |
result[q[0]] = match ? q[1].call(this, match, q[2]) : undefined; | |
} else { | |
// sanitize match using given regex | |
result[q[0]] = match ? match.replace(q[1], q[2]) : undefined; | |
} | |
} else if (q.length == 4) { | |
result[q[0]] = match ? q[3].call(this, match.replace(q[1], q[2])) : undefined; | |
} | |
} else { | |
result[q] = match ? match : undefined; | |
} | |
} | |
break; | |
} | |
} | |
if(!!matches) break; // break the loop immediately if match found | |
} | |
return result; | |
}, | |
str : function (str, map) { | |
for (var i in map) { | |
// check if array | |
if (typeof(map[i]) === OBJ_TYPE && map[i].length > 0) { | |
for (var j = 0; j < map[i].length; j++) { | |
if (util.has(map[i][j], str)) { | |
return (i === UNKNOWN) ? undefined : i; | |
} | |
} | |
} else if (util.has(map[i], str)) { | |
return (i === UNKNOWN) ? undefined : i; | |
} | |
} | |
return str; | |
} | |
}; | |
/////////////// | |
// String map | |
////////////// | |
var maps = { | |
browser : { | |
oldsafari : { | |
major : { | |
'1' : ['/8', '/1', '/3'], | |
'2' : '/4', | |
'?' : '/' | |
}, | |
version : { | |
'1.0' : '/8', | |
'1.2' : '/1', | |
'1.3' : '/3', | |
'2.0' : '/412', | |
'2.0.2' : '/416', | |
'2.0.3' : '/417', | |
'2.0.4' : '/419', | |
'?' : '/' | |
} | |
} | |
}, | |
device : { | |
sprint : { | |
model : { | |
'Evo Shift 4G' : '7373KT' | |
}, | |
vendor : { | |
'HTC' : 'APA', | |
'Sprint' : 'Sprint' | |
} | |
} | |
}, | |
os : { | |
windows : { | |
version : { | |
'ME' : '4.90', | |
'NT 3.11' : 'NT3.51', | |
'NT 4.0' : 'NT4.0', | |
'2000' : 'NT 5.0', | |
'XP' : ['NT 5.1', 'NT 5.2'], | |
'Vista' : 'NT 6.0', | |
'7' : 'NT 6.1', | |
'8' : 'NT 6.2', | |
'RT' : 'ARM' | |
} | |
} | |
} | |
}; | |
////////////// | |
// Regex map | |
///////////// | |
var regexes = { | |
browser : [[ | |
// Presto based | |
/(opera\smini)\/((\d+)?[\w\.-]+)/i, // Opera Mini | |
/(opera\s[mobiletab]+).+version\/((\d+)?[\w\.-]+)/i, // Opera Mobi/Tablet | |
/(opera).+version\/((\d+)?[\w\.]+)/i, // Opera > 9.80 | |
/(opera)[\/\s]+((\d+)?[\w\.]+)/i // Opera < 9.80 | |
], [NAME, VERSION, MAJOR], [ | |
/\s(opr)\/((\d+)?[\w\.]+)/i // Opera Webkit | |
], [[NAME, 'Opera'], VERSION, MAJOR], [ | |
// Mixed | |
/(kindle)\/((\d+)?[\w\.]+)/i, // Kindle | |
/(lunascape|maxthon|netfront|jasmine|blazer)[\/\s]?((\d+)?[\w\.]+)*/i, | |
// Lunascape/Maxthon/Netfront/Jasmine/Blazer | |
// Trident based | |
/(avant\s|iemobile|slim|baidu)(?:browser)?[\/\s]?((\d+)?[\w\.]*)/i, | |
// Avant/IEMobile/SlimBrowser/Baidu | |
/(?:ms|\()(ie)\s((\d+)?[\w\.]+)/i, // Internet Explorer | |
// Webkit/KHTML based | |
/(rekonq)((?:\/)[\w\.]+)*/i, // Rekonq | |
/(chromium|flock|rockmelt|midori|epiphany|silk|skyfire|ovibrowser|bolt|iron)\/((\d+)?[\w\.-]+)/i | |
// Chromium/Flock/RockMelt/Midori/Epiphany/Silk/Skyfire/Bolt/Iron | |
], [NAME, VERSION, MAJOR], [ | |
/(trident).+rv[:\s]((\d+)?[\w\.]+).+like\sgecko/i // IE11 | |
], [[NAME, 'IE'], VERSION, MAJOR], [ | |
/(yabrowser)\/((\d+)?[\w\.]+)/i // Yandex | |
], [[NAME, 'Yandex'], VERSION, MAJOR], [ | |
/(comodo_dragon)\/((\d+)?[\w\.]+)/i // Comodo Dragon | |
], [[NAME, /_/g, ' '], VERSION, MAJOR], [ | |
/(chrome|omniweb|arora|[tizenoka]{5}\s?browser)\/v?((\d+)?[\w\.]+)/i | |
// Chrome/OmniWeb/Arora/Tizen/Nokia | |
], [NAME, VERSION, MAJOR], [ | |
/(dolfin)\/((\d+)?[\w\.]+)/i // Dolphin | |
], [[NAME, 'Dolphin'], VERSION, MAJOR], [ | |
/((?:android.+)crmo|crios)\/((\d+)?[\w\.]+)/i // Chrome for Android/iOS | |
], [[NAME, 'Chrome'], VERSION, MAJOR], [ | |
/version\/((\d+)?[\w\.]+).+?mobile\/\w+\s(safari)/i // Mobile Safari | |
], [VERSION, MAJOR, [NAME, 'Mobile Safari']], [ | |
/version\/((\d+)?[\w\.]+).+?(mobile\s?safari|safari)/i // Safari & Safari Mobile | |
], [VERSION, MAJOR, NAME], [ | |
/webkit.+?(mobile\s?safari|safari)((\/[\w\.]+))/i // Safari < 3.0 | |
], [NAME, [MAJOR, mapper.str, maps.browser.oldsafari.major], [VERSION, mapper.str, maps.browser.oldsafari.version]], [ | |
/(konqueror)\/((\d+)?[\w\.]+)/i, // Konqueror | |
/(webkit|khtml)\/((\d+)?[\w\.]+)/i | |
], [NAME, VERSION, MAJOR], [ | |
// Gecko based | |
/(navigator|netscape)\/((\d+)?[\w\.-]+)/i // Netscape | |
], [[NAME, 'Netscape'], VERSION, MAJOR], [ | |
/(swiftfox)/i, // Swiftfox | |
/(icedragon|iceweasel|camino|chimera|fennec|maemo\sbrowser|minimo|conkeror)[\/\s]?((\d+)?[\w\.\+]+)/i, | |
// IceDragon/Iceweasel/Camino/Chimera/Fennec/Maemo/Minimo/Conkeror | |
/(firefox|seamonkey|k-meleon|icecat|iceape|firebird|phoenix)\/((\d+)?[\w\.-]+)/i, | |
// Firefox/SeaMonkey/K-Meleon/IceCat/IceApe/Firebird/Phoenix | |
/(mozilla)\/((\d+)?[\w\.]+).+rv\:.+gecko\/\d+/i, // Mozilla | |
// Other | |
/(uc\s?browser|polaris|lynx|dillo|icab|doris|amaya|w3m|netsurf|qqbrowser)[\/\s]?((\d+)?[\w\.]+)/i, | |
// UCBrowser/Polaris/Lynx/Dillo/iCab/Doris/Amaya/w3m/NetSurf/QQBrowser | |
/(links)\s\(((\d+)?[\w\.]+)/i, // Links | |
/(gobrowser)\/?((\d+)?[\w\.]+)*/i, // GoBrowser | |
/(ice\s?browser)\/v?((\d+)?[\w\._]+)/i, // ICE Browser | |
/(mosaic)[\/\s]((\d+)?[\w\.]+)/i // Mosaic | |
], [NAME, VERSION, MAJOR] | |
], | |
cpu : [[ | |
/(?:(amd|x(?:(?:86|64)[_-])?|wow|win)64)[;\)]/i // AMD64 | |
], [[ARCHITECTURE, 'amd64']], [ | |
/((?:i[346]|x)86)[;\)]/i // IA32 | |
], [[ARCHITECTURE, 'ia32']], [ | |
// PocketPC mistakenly identified as PowerPC | |
/windows\s(ce|mobile);\sppc;/i | |
], [[ARCHITECTURE, 'arm']], [ | |
/((?:ppc|powerpc)(?:64)?)(?:\smac|;|\))/i // PowerPC | |
], [[ARCHITECTURE, /ower/, '', util.lowerize]], [ | |
/(sun4\w)[;\)]/i // SPARC | |
], [[ARCHITECTURE, 'sparc']], [ | |
/(ia64(?=;)|68k(?=\))|arm(?=v\d+;)|(?:irix|mips|sparc)(?:64)?(?=;)|pa-risc)/i | |
// IA64, 68K, ARM, IRIX, MIPS, SPARC, PA-RISC | |
], [ARCHITECTURE, util.lowerize] | |
], | |
device : [[ | |
/\((ipad|playbook);[\w\s\);-]+(rim|apple)/i // iPad/PlayBook | |
], [MODEL, VENDOR, [TYPE, TABLET]], [ | |
/(hp).+(touchpad)/i, // HP TouchPad | |
/(kindle)\/([\w\.]+)/i, // Kindle | |
/\s(nook)[\w\s]+build\/(\w+)/i, // Nook | |
/(dell)\s(strea[kpr\s\d]*[\dko])/i // Dell Streak | |
], [VENDOR, MODEL, [TYPE, TABLET]], [ | |
/\((ip[honed]+);.+(apple)/i // iPod/iPhone | |
], [MODEL, VENDOR, [TYPE, MOBILE]], [ | |
/(blackberry)[\s-]?(\w+)/i, // BlackBerry | |
/(blackberry|benq|palm(?=\-)|sonyericsson|acer|asus|dell|huawei|meizu|motorola)[\s_-]?([\w-]+)*/i, | |
// BenQ/Palm/Sony-Ericsson/Acer/Asus/Dell/Huawei/Meizu/Motorola | |
/(hp)\s([\w\s]+\w)/i, // HP iPAQ | |
/(asus)-?(\w+)/i // Asus | |
], [VENDOR, MODEL, [TYPE, MOBILE]], [ | |
/\((bb10);\s(\w+)/i // BlackBerry 10 | |
], [[VENDOR, 'BlackBerry'], MODEL, [TYPE, MOBILE]], [ | |
/android.+((transfo[prime\s]{4,10}\s\w+|eeepc|slider\s\w+))/i // Asus Tablets | |
], [[VENDOR, 'Asus'], MODEL, [TYPE, TABLET]], [ | |
/(sony)\s(tablet\s[ps])/i // Sony Tablets | |
], [VENDOR, MODEL, [TYPE, TABLET]], [ | |
/(nintendo)\s([wids3u]+)/i // Nintendo | |
], [VENDOR, MODEL, [TYPE, CONSOLE]], [ | |
/((playstation)\s[3portablevi]+)/i // Playstation | |
], [[VENDOR, 'Sony'], MODEL, [TYPE, CONSOLE]], [ | |
/(sprint\s(\w+))/i // Sprint Phones | |
], [[VENDOR, mapper.str, maps.device.sprint.vendor], [MODEL, mapper.str, maps.device.sprint.model], [TYPE, MOBILE]], [ | |
/(htc)[;_\s-]+([\w\s]+(?=\))|\w+)*/i, // HTC | |
/(zte)-(\w+)*/i, // ZTE | |
/(alcatel|geeksphone|huawei|lenovo|nexian|panasonic|(?=;\s)sony)[_\s-]?([\w-]+)*/i | |
// Alcatel/GeeksPhone/Huawei/Lenovo/Nexian/Panasonic/Sony | |
], [VENDOR, [MODEL, /_/g, ' '], [TYPE, MOBILE]], [ | |
// Motorola | |
/\s((milestone|droid(?:[2-4x]|\s(?:bionic|x2|pro|razr))?(:?\s4g)?))[\w\s]+build\//i, | |
/(mot)[\s-]?(\w+)*/i | |
], [[VENDOR, 'Motorola'], MODEL, [TYPE, MOBILE]], [ | |
/android.+\s((mz60\d|xoom[\s2]{0,2}))\sbuild\//i | |
], [[VENDOR, 'Motorola'], MODEL, [TYPE, TABLET]], [ | |
/android.+((sch-i[89]0\d|shw-m380s|gt-p\d{4}|gt-n8000|sgh-t8[56]9))/i | |
], [[VENDOR, 'Samsung'], MODEL, [TYPE, TABLET]], [ // Samsung | |
/((s[cgp]h-\w+|gt-\w+|galaxy\snexus))/i, | |
/(sam[sung]*)[\s-]*(\w+-?[\w-]*)*/i, | |
/sec-((sgh\w+))/i | |
], [[VENDOR, 'Samsung'], MODEL, [TYPE, MOBILE]], [ | |
/(sie)-(\w+)*/i // Siemens | |
], [[VENDOR, 'Siemens'], MODEL, [TYPE, MOBILE]], [ | |
/(maemo|nokia).*(n900|lumia\s\d+)/i, // Nokia | |
/(nokia)[\s_-]?([\w-]+)*/i | |
], [[VENDOR, 'Nokia'], MODEL, [TYPE, MOBILE]], [ | |
/android\s3\.[\s\w-;]{10}((a\d{3}))/i // Acer | |
], [[VENDOR, 'Acer'], MODEL, [TYPE, TABLET]], [ | |
/android\s3\.[\s\w-;]{10}(lg?)-([06cv9]{3,4})/i // LG | |
], [[VENDOR, 'LG'], MODEL, [TYPE, TABLET]], [ | |
/((nexus\s4))/i, | |
/(lg)[e;\s-\/]+(\w+)*/i | |
], [[VENDOR, 'LG'], MODEL, [TYPE, MOBILE]], [ | |
/(mobile|tablet);.+rv\:.+gecko\//i // Unidentifiable | |
], [TYPE, VENDOR, MODEL] | |
], | |
engine : [[ | |
/(presto)\/([\w\.]+)/i, // Presto | |
/(webkit|trident|netfront|netsurf|amaya|lynx|w3m)\/([\w\.]+)/i, // WebKit/Trident/NetFront/NetSurf/Amaya/Lynx/w3m | |
/(khtml|tasman|links)[\/\s]\(?([\w\.]+)/i, // KHTML/Tasman/Links | |
/(icab)[\/\s]([23]\.[\d\.]+)/i // iCab | |
], [NAME, VERSION], [ | |
/rv\:([\w\.]+).*(gecko)/i // Gecko | |
], [VERSION, NAME] | |
], | |
os : [[ | |
// Windows based | |
/(windows)\snt\s6\.2;\s(arm)/i, // Windows RT | |
/(windows\sphone(?:\sos)*|windows\smobile|windows)[\s\/]?([ntce\d\.\s]+\w)/i | |
], [NAME, [VERSION, mapper.str, maps.os.windows.version]], [ | |
/(win(?=3|9|n)|win\s9x\s)([nt\d\.]+)/i | |
], [[NAME, 'Windows'], [VERSION, mapper.str, maps.os.windows.version]], [ | |
// Mobile/Embedded OS | |
/\((bb)(10);/i // BlackBerry 10 | |
], [[NAME, 'BlackBerry'], VERSION], [ | |
/(blackberry)\w*\/?([\w\.]+)*/i, // Blackberry | |
/(tizen)\/([\w\.]+)/i, // Tizen | |
/(android|webos|palm\os|qnx|bada|rim\stablet\sos|meego)[\/\s-]?([\w\.]+)*/i | |
// Android/WebOS/Palm/QNX/Bada/RIM/MeeGo | |
], [NAME, VERSION], [ | |
/(symbian\s?os|symbos|s60(?=;))[\/\s-]?([\w\.]+)*/i // Symbian | |
], [[NAME, 'Symbian'], VERSION],[ | |
/mozilla.+\(mobile;.+gecko.+firefox/i // Firefox OS | |
], [[NAME, 'Firefox OS'], VERSION], [ | |
// Console | |
/(nintendo|playstation)\s([wids3portablevu]+)/i, // Nintendo/Playstation | |
// GNU/Linux based | |
/(mint)[\/\s\(]?(\w+)*/i, // Mint | |
/(joli|[kxln]?ubuntu|debian|[open]*suse|gentoo|arch|slackware|fedora|mandriva|centos|pclinuxos|redhat|zenwalk)[\/\s-]?([\w\.-]+)*/i, | |
// Joli/Ubuntu/Debian/SUSE/Gentoo/Arch/Slackware | |
// Fedora/Mandriva/CentOS/PCLinuxOS/RedHat/Zenwalk | |
/(hurd|linux)\s?([\w\.]+)*/i, // Hurd/Linux | |
/(gnu)\s?([\w\.]+)*/i // GNU | |
], [NAME, VERSION], [ | |
/(cros)\s[\w]+\s([\w\.]+\w)/i // Chromium OS | |
], [[NAME, 'Chromium OS'], VERSION],[ | |
// Solaris | |
/(sunos)\s?([\w\.]+\d)*/i // Solaris | |
], [[NAME, 'Solaris'], VERSION], [ | |
// BSD based | |
/\s([frentopc-]{0,4}bsd|dragonfly)\s?([\w\.]+)*/i // FreeBSD/NetBSD/OpenBSD/PC-BSD/DragonFly | |
], [NAME, VERSION],[ | |
/(ip[honead]+)(?:.*os\s*([\w]+)*\slike\smac|;\sopera)/i // iOS | |
], [[NAME, 'iOS'], [VERSION, /_/g, '.']], [ | |
/(mac\sos\sx)\s?([\w\s\.]+\w)*/i // Mac OS | |
], [NAME, [VERSION, /_/g, '.']], [ | |
// Other | |
/(haiku)\s(\w+)/i, // Haiku | |
/(aix)\s((\d)(?=\.|\)|\s)[\w\.]*)*/i, // AIX | |
/(macintosh|mac(?=_powerpc)|plan\s9|minix|beos|os\/2|amigaos|morphos|risc\sos)/i, | |
// Plan9/Minix/BeOS/OS2/AmigaOS/MorphOS/RISCOS | |
/(unix)\s?([\w\.]+)*/i // UNIX | |
], [NAME, VERSION] | |
] | |
}; | |
///////////////// | |
// Constructor | |
//////////////// | |
var UAParser = function (uastring) { | |
var ua = uastring || ((window && window.navigator && window.navigator.userAgent) ? window.navigator.userAgent : EMPTY); | |
if (!(this instanceof UAParser)) { | |
return new UAParser(uastring).getResult(); | |
} | |
this.getBrowser = function () { | |
return mapper.rgx.apply(this, regexes.browser); | |
}; | |
this.getCPU = function () { | |
return mapper.rgx.apply(this, regexes.cpu); | |
}; | |
this.getDevice = function () { | |
return mapper.rgx.apply(this, regexes.device); | |
}; | |
this.getEngine = function () { | |
return mapper.rgx.apply(this, regexes.engine); | |
}; | |
this.getOS = function () { | |
return mapper.rgx.apply(this, regexes.os); | |
}; | |
this.getResult = function() { | |
return { | |
ua : this.getUA(), | |
browser : this.getBrowser(), | |
engine : this.getEngine(), | |
os : this.getOS(), | |
device : this.getDevice(), | |
cpu : this.getCPU() | |
}; | |
}; | |
this.getUA = function () { | |
return ua; | |
}; | |
this.setUA = function (uastring) { | |
ua = uastring; | |
return this; | |
}; | |
this.setUA(ua); | |
}; | |
/////////// | |
// Export | |
////////// | |
// check js environment | |
if (typeof(exports) !== UNDEF_TYPE) { | |
// nodejs env | |
if (typeof(module) !== UNDEF_TYPE && module.exports) { | |
exports = module.exports = UAParser; | |
} | |
exports.UAParser = UAParser; | |
} else { | |
// browser env | |
window.UAParser = UAParser; | |
// requirejs env (optional) | |
if (typeof(define) === FUNC_TYPE && define.amd) { | |
define(function () { | |
return UAParser; | |
}); | |
} | |
// jQuery specific (optional) | |
if (typeof(window.jQuery) !== UNDEF_TYPE) { | |
var $ = window.jQuery; | |
var parser = new UAParser(); | |
$.ua = parser.getResult(); | |
$.ua.get = function() { | |
return parser.getUA(); | |
}; | |
$.ua.set = function (uastring) { | |
parser.setUA(uastring); | |
var result = parser.getResult(); | |
for (var prop in result) { | |
$.ua[prop] = result[prop]; | |
} | |
}; | |
} | |
} | |
})(this); | |
},{}],47:[function(require,module,exports){ | |
(function (global){ | |
'use strict'; | |
var opts = {adapter: 'memory'}; | |
function runTestSuites(PouchDB) { | |
var reporter = require('./perf.reporter'); | |
reporter.log('Testing PouchDB version ' + PouchDB.version + | |
(opts.adapter ? (', using adapter: ' + opts.adapter) : '') + | |
'\n\n'); | |
require('./perf.basics')(PouchDB, opts); | |
require('./perf.views')(PouchDB, opts); | |
} | |
var startNow = true; | |
if (startNow) { | |
runTestSuites(global.PouchDB); | |
} | |
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"./perf.basics":48,"./perf.reporter":49,"./perf.views":50}],48:[function(require,module,exports){ | |
'use strict'; | |
module.exports = function (PouchDB, opts) { | |
// need to use bluebird for promises everywhere, so we're comparing | |
// apples to apples | |
var Promise = require('bluebird'); | |
var utils = require('./utils'); | |
function createDocId(i) { | |
var intString = i.toString(); | |
while (intString.length < 10) { | |
intString = '0' + intString; | |
} | |
return 'doc_' + intString; | |
} | |
var testCases = [ | |
{ | |
name: 'basic-inserts', | |
assertions: 1, | |
iterations: 1000, | |
setup: function (db, callback) { | |
callback(null, {'yo': 'dawg'}); | |
}, | |
test: function (db, itr, doc, done) { | |
db.post(doc, done); | |
} | |
}, { | |
name: 'bulk-inserts', | |
assertions: 1, | |
iterations: 100, | |
setup: function (db, callback) { | |
var docs = []; | |
for (var i = 0; i < 100; i++) { | |
docs.push({much : 'docs', very : 'bulk'}); | |
} | |
callback(null, {docs : docs}); | |
}, | |
test: function (db, itr, docs, done) { | |
db.bulkDocs(docs, done); | |
} | |
}, { | |
name: 'basic-gets', | |
assertions: 1, | |
iterations: 10000, | |
setup: function (db, callback) { | |
var docs = []; | |
for (var i = 0; i < 10000; i++) { | |
docs.push({_id : createDocId(i), foo : 'bar', baz : 'quux'}); | |
} | |
db.bulkDocs({docs : docs}, callback); | |
}, | |
test: function (db, itr, docs, done) { | |
db.get(createDocId(itr), done); | |
} | |
}, { | |
name: 'all-docs-skip-limit', | |
assertions: 1, | |
iterations: 50, | |
setup: function (db, callback) { | |
var docs = []; | |
for (var i = 0; i < 1000; i++) { | |
docs.push({_id : createDocId(i), foo : 'bar', baz : 'quux'}); | |
} | |
db.bulkDocs({docs : docs}, callback); | |
}, | |
test: function (db, itr, docs, done) { | |
var tasks = []; | |
for (var i = 0; i < 10; i++) { | |
tasks.push(i); | |
} | |
Promise.all(tasks.map(function (doc, i) { | |
return db.allDocs({skip : i * 100, limit : 10}); | |
})).then(function () { | |
done(); | |
}, done); | |
} | |
}, { | |
name: 'all-docs-startkey-endkey', | |
assertions: 1, | |
iterations: 50, | |
setup: function (db, callback) { | |
var docs = []; | |
for (var i = 0; i < 1000; i++) { | |
docs.push({_id : createDocId(i), foo : 'bar', baz : 'quux'}); | |
} | |
db.bulkDocs({docs : docs}, callback); | |
}, | |
test: function (db, itr, docs, done) { | |
var tasks = []; | |
for (var i = 0; i < 10; i++) { | |
tasks.push(i); | |
} | |
Promise.all(tasks.map(function (doc, i) { | |
return db.allDocs({ | |
startkey : createDocId(i * 100), | |
endkey : createDocId((i * 100) + 10) | |
}); | |
})).then(function () { | |
done(); | |
}, done); | |
} | |
} | |
]; | |
utils.runTests(PouchDB, 'basics', testCases, opts); | |
}; | |
},{"./utils":51,"bluebird":20}],49:[function(require,module,exports){ | |
(function (process,global){ | |
'use strict'; | |
var isNode = process && !process.browser; | |
var UAParser = require('ua-parser-js'); | |
var ua = !isNode && new UAParser(navigator.userAgent); | |
global.results = {}; | |
var pre = !isNode && global.document.getElementById('output'); | |
function log(msg) { | |
if (pre) { | |
pre.innerHTML = pre.innerHTML + msg; | |
} else { | |
console.log(msg); | |
} | |
} | |
exports.log = log; | |
exports.startSuite = function (suiteName) { | |
log('Starting suite: ' + suiteName + '\n\n'); | |
}; | |
exports.start = function (testCase) { | |
var key = testCase.name; | |
log('Starting test: ' + key + ' with ' + testCase.assertions + | |
' assertions and ' + testCase.iterations + ' iterations... '); | |
global.results[key] = { | |
start: Date.now() | |
}; | |
}; | |
exports.end = function (testCase) { | |
var key = testCase.name; | |
var obj = global.results[key]; | |
obj.end = Date.now(); | |
obj.duration = obj.end - obj.start; | |
log('done in ' + obj.duration + 'ms\n'); | |
}; | |
exports.complete = function (suiteName) { | |
global.results.completed = true; | |
if (isNode) { | |
global.results.client = {node: process.version}; | |
} else { | |
global.results.client = { | |
browser: ua.getBrowser(), | |
device: ua.getDevice(), | |
engine: ua.getEngine(), | |
cpu: ua.getCPU(), | |
os : ua.getOS(), | |
userAgent: navigator.userAgent | |
}; | |
} | |
console.log(global.results); | |
log('\nTests Complete!\n\n'); | |
}; | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"ua-parser-js":46}],50:[function(require,module,exports){ | |
'use strict'; | |
module.exports = function (PouchDB, opts) { | |
// need to use bluebird for promises everywhere, so we're comparing | |
// apples to apples | |
var Promise = require('bluebird'); | |
var utils = require('./utils'); | |
function makeTestDocs() { | |
return [ | |
{key: null}, | |
{key: true}, | |
{key: false}, | |
{key: -1}, | |
{key: 0}, | |
{key: 1}, | |
{key: 2}, | |
{key: 3}, | |
{key: Math.random()}, | |
{key: 'bar' + Math.random()}, | |
{key: 'foo' + Math.random()}, | |
{key: 'foobar' + Math.random()} | |
]; | |
} | |
var testCases = [ | |
{ | |
name: 'temp-views', | |
assertions: 1, | |
iterations: 10, | |
setup: function (db, callback) { | |
var tasks = []; | |
for (var i = 0; i < 100; i++) { | |
tasks.push(i); | |
} | |
Promise.all(tasks.map(function () { | |
return db.bulkDocs({docs : makeTestDocs()}); | |
})).then(function () { | |
callback(); | |
}, callback); | |
}, | |
test: function (db, itr, doc, done) { | |
var tasks = [ | |
{startkey: 'foo', limit: 5}, | |
{startkey: 'foobar', limit: 5}, | |
{startkey: 'foo', limit: 5}, | |
{startkey: -1, limit: 5}, | |
{startkey: null, limit: 5} | |
]; | |
Promise.all(tasks.map(function (task) { | |
return db.query(function (doc) { | |
emit(doc.key); | |
}, task); | |
})).then(function (res) { | |
done(); | |
}, done); | |
} | |
}, | |
{ | |
name: 'persisted-views', | |
assertions: 1, | |
iterations: 10, | |
setup: function (db, callback) { | |
var tasks = []; | |
for (var i = 0; i < 100; i++) { | |
tasks.push(i); | |
} | |
Promise.all(tasks.map(function () { | |
return db.bulkDocs({docs : makeTestDocs()}); | |
})).then(function () { | |
return db.put({ | |
_id : '_design/myview', | |
views : { | |
myview : { | |
map : function (doc) { | |
emit(doc.key); | |
}.toString() | |
} | |
} | |
}); | |
}).then(function () { | |
return db.query('myview/myview'); | |
}).then(function () { | |
callback(); | |
}, callback); | |
}, | |
test: function (db, itr, doc, done) { | |
var tasks = [ | |
{startkey: 'foo', limit: 5}, | |
{startkey: 'foobar', limit: 5}, | |
{startkey: 'foo', limit: 5}, | |
{startkey: -1, limit: 5}, | |
{startkey: null, limit: 5} | |
]; | |
Promise.all(tasks.map(function (task) { | |
return db.query('myview/myview', task); | |
})).then(function (res) { | |
done(); | |
}, done); | |
} | |
}, | |
{ | |
name: 'persisted-views-stale-ok', | |
assertions: 1, | |
iterations: 10, | |
setup: function (db, callback) { | |
var tasks = []; | |
for (var i = 0; i < 100; i++) { | |
tasks.push(i); | |
} | |
Promise.all(tasks.map(function () { | |
return db.bulkDocs({docs : makeTestDocs()}); | |
})).then(function () { | |
return db.put({ | |
_id : '_design/myview', | |
views : { | |
myview : { | |
map : function (doc) { | |
emit(doc.key); | |
}.toString() | |
} | |
} | |
}); | |
}).then(function () { | |
return db.query('myview/myview'); | |
}).then(function () { | |
callback(); | |
}, callback); | |
}, | |
test: function (db, itr, doc, done) { | |
var tasks = [ | |
{startkey: 'foo', limit: 5, stale : 'ok'}, | |
{startkey: 'foobar', limit: 5, stale : 'ok'}, | |
{startkey: 'foo', limit: 5, stale : 'ok'}, | |
{startkey: -1, limit: 5, stale : 'ok'}, | |
{startkey: null, limit: 5, stale : 'ok'} | |
]; | |
Promise.all(tasks.map(function (task) { | |
return db.query('myview/myview', task); | |
})).then(function (res) { | |
done(); | |
}, done); | |
} | |
} | |
]; | |
utils.runTests(PouchDB, 'views', testCases, opts); | |
}; | |
},{"./utils":51,"bluebird":20}],51:[function(require,module,exports){ | |
(function (process,global){ | |
'use strict'; | |
var reporter = require('./perf.reporter'); | |
var test = require('tape'); | |
var grep; | |
if (global.window && global.window.location && global.window.location.search) { | |
grep = global.window.location.search.match(/[&?]grep=([^&]+)/); | |
grep = grep && grep[1]; | |
} else if (process && process.env) { | |
grep = process.env.GREP; | |
} | |
exports.runTests = function (PouchDB, suiteName, testCases, opts) { | |
testCases.forEach(function (testCase, i) { | |
if (grep && suiteName.indexOf(grep) === -1 && | |
testCase.name.indexOf(grep) === -1) { | |
return; | |
} | |
test('benchmarking', function (t) { | |
var db; | |
var setupObj; | |
var randomizer = Math.random(); | |
t.test('setup', function (t) { | |
db = new PouchDB('test' + randomizer, opts); | |
testCase.setup(db, function (err, res) { | |
setupObj = res; | |
if (i === 0) { | |
reporter.startSuite(suiteName); | |
} | |
reporter.start(testCase); | |
t.end(); | |
}); | |
}); | |
t.test(testCase.name, function (t) { | |
t.plan(testCase.assertions); | |
var num = 0; | |
function after(err) { | |
if (err) { | |
t.error(err); | |
reporter.log(testCase.name + ' errored: ' + err.message + '\n'); | |
} | |
if (++num < testCase.iterations) { | |
process.nextTick(function () { | |
testCase.test(db, num, setupObj, after); | |
}); | |
} else { | |
t.ok(testCase.name + ' completed'); | |
} | |
} | |
testCase.test(db, num, setupObj, after); | |
}); | |
t.test('teardown', function (t) { | |
reporter.end(testCase); | |
var opts = {adapter : db.adapter}; | |
PouchDB.destroy('test' + randomizer, opts, function () { | |
t.end(); | |
if (i === testCases.length - 1) { | |
reporter.complete(suiteName); | |
} | |
}); | |
}); | |
}); | |
}); | |
}; | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"./perf.reporter":49,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"tape":35}]},{},[47,48,49,50,51]) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
require=(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ | |
(function (process,Buffer){ | |
'use strict'; | |
var levelup = require('levelup'); | |
var originalLeveldown = require('leveldown'); | |
var sublevel = require('level-sublevel'); | |
var through = require('through2').obj; | |
var errors = require('../deps/errors'); | |
var merge = require('../merge'); | |
var utils = require('../utils'); | |
var migrate = require('../deps/migrate'); | |
var vuvuzela = require('vuvuzela'); | |
var DOC_STORE = 'document-store'; | |
var BY_SEQ_STORE = 'by-sequence'; | |
var ATTACHMENT_STORE = 'attach-store'; | |
var BINARY_STORE = 'attach-binary-store'; | |
var LOCAL_STORE = 'local-store'; | |
var META_STORE = 'meta-store'; | |
var BATCH_SIZE = 50; | |
// leveldb barks if we try to open a db multiple times | |
// so we cache opened connections here for initstore() | |
var dbStores = new utils.Map(); | |
// store the value of update_seq in the by-sequence store the key name will | |
// never conflict, since the keys in the by-sequence store are integers | |
var UPDATE_SEQ_KEY = '_local_last_update_seq'; | |
var DOC_COUNT_KEY = '_local_doc_count'; | |
var UUID_KEY = '_local_uuid'; | |
var MD5_PREFIX = 'md5-'; | |
var vuvuEncoding = { | |
encode: vuvuzela.stringify, | |
decode: vuvuzela.parse, | |
buffer: false, | |
type: 'cheap-json' | |
}; | |
function LevelPouch(opts, callback) { | |
opts = utils.clone(opts); | |
var api = this; | |
var instanceId; | |
var stores = {}; | |
var db; | |
var name = opts.name; | |
if (typeof opts.createIfMissing === 'undefined') { | |
opts.createIfMissing = true; | |
} | |
var leveldown = opts.db || originalLeveldown; | |
if (typeof leveldown.destroy !== 'function') { | |
leveldown.destroy = function (name, cb) { cb(); }; | |
} | |
var dbStore; | |
if (dbStores.has(leveldown.name)) { | |
dbStore = dbStores.get(leveldown.name); | |
} else { | |
dbStore = new utils.Map(); | |
dbStores.set(leveldown.name, dbStore); | |
} | |
if (dbStore.has(name)) { | |
db = dbStore.get(name); | |
afterDBCreated(); | |
} else { | |
dbStore.set(name, sublevel(levelup(name, opts, function (err) { | |
if (err) { | |
dbStore["delete"](name); | |
return callback(err); | |
} | |
db = dbStore.get(name); | |
db._locks = db._locks || new utils.Set(); | |
db._docCountQueue = { | |
queue : [], | |
running : false, | |
docCount : -1 | |
}; | |
if (opts.db || opts.noMigrate) { | |
afterDBCreated(); | |
} else { | |
migrate.toSublevel(name, db, afterDBCreated); | |
} | |
}))); | |
} | |
function afterDBCreated() { | |
stores.docStore = db.sublevel(DOC_STORE, {valueEncoding: vuvuEncoding}); | |
stores.bySeqStore = db.sublevel(BY_SEQ_STORE, {valueEncoding: 'json'}); | |
stores.attachmentStore = | |
db.sublevel(ATTACHMENT_STORE, {valueEncoding: 'json'}); | |
stores.binaryStore = db.sublevel(BINARY_STORE, {valueEncoding: 'binary'}); | |
stores.localStore = db.sublevel(LOCAL_STORE, {valueEncoding: 'json'}); | |
stores.metaStore = db.sublevel(META_STORE, {valueEncoding: 'json'}); | |
migrate.localAndMetaStores(db, stores, function () { | |
stores.metaStore.get(UPDATE_SEQ_KEY, function (err, value) { | |
if (typeof db._updateSeq === 'undefined') { | |
db._updateSeq = value || 0; | |
} | |
stores.metaStore.get(DOC_COUNT_KEY, function (err, value) { | |
db._docCountQueue.docCount = !err ? value : 0; | |
countDocs(function (err) { // notify queue that the docCount is ready | |
if (err) { | |
api.emit('error', err); | |
} | |
stores.metaStore.get(UUID_KEY, function (err, value) { | |
instanceId = !err ? value : utils.uuid(); | |
stores.metaStore.put(UUID_KEY, instanceId, function (err, value) { | |
process.nextTick(function () { | |
callback(null, api); | |
}); | |
}); | |
}); | |
}); | |
}); | |
}); | |
}); | |
} | |
function countDocs(callback) { | |
if (db._docCountQueue.running || !db._docCountQueue.queue.length || | |
db._docCountQueue.docCount === -1) { | |
return incrementDocCount(0, callback); // wait for fresh data | |
} | |
return db._docCountQueue.docCount; // use cached value | |
} | |
function applyNextDocCountDelta() { | |
if (db._docCountQueue.running || !db._docCountQueue.queue.length || | |
db._docCountQueue.docCount === -1) { | |
return; | |
} | |
db._docCountQueue.running = true; | |
var item = db._docCountQueue.queue.shift(); | |
if (db.isClosed()) { | |
return item.callback(new Error('database is closed')); | |
} | |
stores.metaStore.get(DOC_COUNT_KEY, function (err, docCount) { | |
docCount = !err ? docCount : 0; | |
function complete(err) { | |
db._docCountQueue.docCount = docCount; | |
item.callback(err, docCount); | |
db._docCountQueue.running = false; | |
applyNextDocCountDelta(); | |
} | |
if (item.delta === 0) { | |
complete(); | |
} else { | |
stores.metaStore.put(DOC_COUNT_KEY, docCount + item.delta, complete); | |
} | |
}); | |
} | |
function incrementDocCount(delta, callback) { | |
db._docCountQueue.queue.push({delta : delta, callback : callback}); | |
applyNextDocCountDelta(); | |
} | |
api.type = function () { | |
return 'leveldb'; | |
}; | |
api._id = function (callback) { | |
callback(null, instanceId); | |
}; | |
api._info = function (callback) { | |
countDocs(function (err, docCount) { | |
if (err) { | |
return callback(err); | |
} | |
stores.metaStore.get(UPDATE_SEQ_KEY, function (err, otherUpdateSeq) { | |
if (err) { | |
otherUpdateSeq = db._updateSeq; | |
} | |
return callback(null, { | |
doc_count: docCount, | |
update_seq: otherUpdateSeq | |
}); | |
}); | |
}); | |
}; | |
function formatSeq(n) { | |
return ('0000000000000000' + n).slice(-16); | |
} | |
function parseSeq(s) { | |
return parseInt(s, 10); | |
} | |
function makeDoc(rawDoc, callback) { | |
var doc = rawDoc.data; | |
doc._id = rawDoc.metadata.id; | |
if ('_rev' in doc) { | |
if (doc._rev !== rawDoc.metadata.rev) { | |
return callback(new Error('wrong doc returned')); | |
} | |
} else { | |
// we didn't always store rev | |
doc._rev = rawDoc.metadata.rev; | |
} | |
callback(null, {doc: doc, metadata: rawDoc.metadata}); | |
} | |
api._get = function (id, opts, callback) { | |
opts = utils.clone(opts); | |
var docChanged = []; | |
function didDocChange(doc) { | |
docChanged.push(doc); | |
} | |
db.on('pouchdb-id-' + id, didDocChange); | |
stores.docStore.get(id, function (err, metadata) { | |
db.removeListener('pouchdb-id-' + id, didDocChange); | |
if (err || !metadata) { | |
return callback(errors.MISSING_DOC); | |
} | |
if (utils.isDeleted(metadata) && !opts.rev) { | |
return callback(errors.error(errors.MISSING_DOC, "deleted")); | |
} | |
var updated; | |
function ifUpdate(doc) { | |
updated = doc; | |
} | |
var rev = merge.winningRev(metadata); | |
rev = opts.rev ? opts.rev : rev; | |
var seq = metadata.rev_map[rev]; | |
var anyChanged = docChanged.filter(function (doc) { | |
return doc.metadata.seq === seq; | |
}); | |
if (anyChanged.length) { | |
return makeDoc(anyChanged.pop(), callback); | |
} | |
db.on('pouchdb-' + seq, ifUpdate); | |
stores.bySeqStore.get(formatSeq(seq), function (err, doc) { | |
db.removeListener('pouchdb-' + seq, ifUpdate); | |
if (updated) { | |
return makeDoc(updated, callback); | |
} | |
if (!doc) { | |
return callback(errors.MISSING_DOC); | |
} | |
if ('_id' in doc && doc._id !== metadata.id) { | |
// this failing implies something very wrong | |
return callback(new Error('wrong doc returned')); | |
} | |
doc._id = metadata.id; | |
if ('_rev' in doc) { | |
if (doc._rev !== rev) { | |
// this failing implies something very wrong | |
return callback(new Error('wrong doc returned')); | |
} | |
} else { | |
// we didn't always store this | |
doc._rev = rev; | |
} | |
return callback(null, {doc: doc, metadata: metadata}); | |
}); | |
}); | |
}; | |
// not technically part of the spec, but if putAttachment has its own | |
// method... | |
api._getAttachment = function (attachment, opts, callback) { | |
var digest = attachment.digest; | |
stores.binaryStore.get(digest, function (err, attach) { | |
var data; | |
if (err && err.name === 'NotFoundError') { | |
// Empty attachment | |
data = opts.encode ? '' : process.browser ? | |
utils.createBlob([''], {type: attachment.content_type}) : | |
new Buffer(''); | |
return callback(null, data); | |
} | |
if (err) { | |
return callback(err); | |
} | |
if (process.browser) { | |
if (opts.encode) { | |
data = utils.btoa(attach); | |
} else { | |
data = utils.createBlob([utils.fixBinary(attach)], | |
{type: attachment.content_type}); | |
} | |
} else { | |
data = opts.encode ? utils.btoa(attach) : attach; | |
} | |
callback(null, data); | |
}); | |
}; | |
api.lock = function (id) { | |
if (db._locks.has(id)) { | |
return false; | |
} else { | |
db._locks.add(id); | |
return true; | |
} | |
}; | |
api.unlock = function (id) { | |
if (db._locks.has(id)) { | |
db._locks["delete"](id); | |
return true; | |
} | |
return false; | |
}; | |
api._bulkDocs = function (req, opts, callback) { | |
var newEdits = opts.new_edits; | |
var results = new Array(req.docs.length); | |
// parse the docs and give each a sequence number | |
var userDocs = req.docs; | |
var info = userDocs.map(function (doc, i) { | |
if (doc._id && utils.isLocalId(doc._id)) { | |
return doc; | |
} | |
var newDoc = utils.parseDoc(doc, newEdits); | |
newDoc._bulk_seq = i; | |
if (newDoc.metadata && !newDoc.metadata.rev_map) { | |
newDoc.metadata.rev_map = {}; | |
} | |
return newDoc; | |
}); | |
var current = 0; | |
var infoErrors = info.filter(function (doc) { | |
return doc.error; | |
}); | |
if (infoErrors.length) { | |
return callback(infoErrors[0]); | |
} | |
var inProgress = 0; | |
function processDocs() { | |
var index = current; | |
if (inProgress > BATCH_SIZE) { | |
return; | |
} | |
if (index >= info.length) { | |
if (inProgress === 0) { | |
return complete(); | |
} else { | |
return; | |
} | |
} | |
var currentDoc = info[index]; | |
current++; | |
inProgress++; | |
if (currentDoc._id && utils.isLocalId(currentDoc._id)) { | |
api[currentDoc._deleted ? '_removeLocal' : '_putLocal']( | |
currentDoc, function (err, resp) { | |
if (err) { | |
results[index] = err; | |
} else { | |
results[index] = {}; | |
} | |
inProgress--; | |
processDocs(); | |
}); | |
return; | |
} | |
if (!api.lock(currentDoc.metadata.id)) { | |
results[index] = makeErr(errors.REV_CONFLICT, | |
'someobody else is accessing this'); | |
inProgress--; | |
return processDocs(); | |
} | |
stores.docStore.get(currentDoc.metadata.id, function (err, oldDoc) { | |
if (err) { | |
if (err.name === 'NotFoundError') { | |
insertDoc(currentDoc, index, function () { | |
api.unlock(currentDoc.metadata.id); | |
inProgress--; | |
processDocs(); | |
}); | |
} else { | |
err.error = true; | |
results[index] = err; | |
api.unlock(currentDoc.metadata.id); | |
inProgress--; | |
processDocs(); | |
} | |
} else { | |
updateDoc(oldDoc, currentDoc, index, function () { | |
api.unlock(currentDoc.metadata.id); | |
inProgress--; | |
processDocs(); | |
}); | |
} | |
}); | |
if (newEdits) { | |
processDocs(); | |
} | |
} | |
function insertDoc(doc, index, callback) { | |
// Can't insert new deleted documents | |
if ('was_delete' in opts && utils.isDeleted(doc.metadata)) { | |
results[index] = makeErr(errors.MISSING_DOC, doc._bulk_seq); | |
return callback(); | |
} | |
writeDoc(doc, index, function (err) { | |
if (err) { | |
return callback(err); | |
} | |
if (utils.isDeleted(doc.metadata)) { | |
return callback(); | |
} | |
incrementDocCount(1, callback); | |
}); | |
} | |
function updateDoc(oldDoc, docInfo, index, callback) { | |
var merged = | |
merge.merge(oldDoc.rev_tree, docInfo.metadata.rev_tree[0], 1000); | |
var conflict = (utils.isDeleted(oldDoc) && | |
utils.isDeleted(docInfo.metadata) && | |
newEdits) || | |
(!utils.isDeleted(oldDoc) && | |
newEdits && merged.conflicts !== 'new_leaf'); | |
if (conflict) { | |
results[index] = makeErr(errors.REV_CONFLICT, docInfo._bulk_seq); | |
return callback(); | |
} | |
docInfo.metadata.rev_tree = merged.tree; | |
docInfo.metadata.rev_map = oldDoc.rev_map; | |
var delta = 0; | |
var oldDeleted = utils.isDeleted(oldDoc); | |
var newDeleted = utils.isDeleted(docInfo.metadata); | |
delta = (oldDeleted === newDeleted) ? 0 : | |
oldDeleted < newDeleted ? -1 : 1; | |
incrementDocCount(delta, function (err) { | |
if (err) { | |
return callback(err); | |
} | |
writeDoc(docInfo, index, callback); | |
}); | |
} | |
function writeDoc(doc, index, callback2) { | |
var err = null; | |
var recv = 0; | |
doc.data._id = doc.metadata.id; | |
doc.data._rev = doc.metadata.rev; | |
if (utils.isDeleted(doc.metadata)) { | |
doc.data._deleted = true; | |
} | |
var attachments = doc.data._attachments ? | |
Object.keys(doc.data._attachments) : | |
[]; | |
function collectResults(attachmentErr) { | |
if (!err) { | |
if (attachmentErr) { | |
err = attachmentErr; | |
callback2(err); | |
} else if (recv === attachments.length) { | |
finish(); | |
} | |
} | |
} | |
function attachmentSaved(err) { | |
recv++; | |
collectResults(err); | |
} | |
function onMD5Load(doc, key, data, attachmentSaved) { | |
return function (result) { | |
saveAttachment(doc, MD5_PREFIX + result, key, data, attachmentSaved); | |
}; | |
} | |
function onLoadEnd(doc, key, attachmentSaved) { | |
return function (e) { | |
var data = utils.arrayBufferToBinaryString(e.target.result); | |
utils.MD5(data).then( | |
onMD5Load(doc, key, data, attachmentSaved) | |
); | |
}; | |
} | |
for (var i = 0; i < attachments.length; i++) { | |
var key = attachments[i]; | |
if (doc.data._attachments[key].stub) { | |
recv++; | |
collectResults(); | |
continue; | |
} | |
var att = doc.data._attachments[key]; | |
var data; | |
if (typeof att.data === 'string') { | |
try { | |
data = utils.atob(att.data); | |
} catch (e) { | |
callback(utils.extend({}, errors.BAD_ARG, | |
{reason: "Attachments need to be base64 encoded"})); | |
return; | |
} | |
} else if (!process.browser) { | |
data = att.data; | |
} else { // browser | |
var reader = new FileReader(); | |
reader.onloadend = onLoadEnd(doc, key, attachmentSaved); | |
reader.readAsArrayBuffer(att.data); | |
continue; | |
} | |
utils.MD5(data).then( | |
onMD5Load(doc, key, data, attachmentSaved) | |
); | |
} | |
function finish() { | |
var seq = doc.metadata.rev_map[doc.metadata.rev]; | |
if (!seq) { | |
// check that there aren't any existing revisions with the same | |
// reivision id, else we shouldn't increment updateSeq | |
seq = ++db._updateSeq; | |
} | |
doc.metadata.rev_map[doc.metadata.rev] = doc.metadata.seq = seq; | |
var seqKey = formatSeq(seq); | |
db.emit('pouchdb-id-' + doc.metadata.id, doc); | |
db.emit('pouchdb-' + seqKey, doc); | |
db.batch([{ | |
key: seqKey, | |
value: doc.data, | |
prefix: stores.bySeqStore, | |
type: 'put', | |
valueEncoding: 'json' | |
}, { | |
key: doc.metadata.id, | |
value: doc.metadata, | |
prefix: stores.docStore, | |
type: 'put', | |
valueEncoding: vuvuEncoding | |
}], function (err) { | |
if (!err) { | |
db.emit('pouchdb-id-' + doc.metadata.id, doc); | |
db.emit('pouchdb-' + seqKey, doc); | |
} | |
return stores.metaStore.put(UPDATE_SEQ_KEY, db._updateSeq, | |
function (err) { | |
if (err) { | |
results[index] = err; | |
} else { | |
results[index] = doc; | |
} | |
return callback2(); | |
}); | |
}); | |
} | |
if (!attachments.length) { | |
finish(); | |
} | |
} | |
function saveAttachment(docInfo, digest, key, data, callback) { | |
delete docInfo.data._attachments[key].data; | |
docInfo.data._attachments[key].digest = digest; | |
stores.attachmentStore.get(digest, function (err, oldAtt) { | |
if (err && err.name !== 'NotFoundError') { | |
return callback(err); | |
} | |
var ref = [docInfo.metadata.id, docInfo.metadata.rev].join('@'); | |
var newAtt = {}; | |
if (oldAtt) { | |
if (oldAtt.refs) { | |
// only update references if this attachment already has them | |
// since we cannot migrate old style attachments here without | |
// doing a full db scan for references | |
newAtt.refs = oldAtt.refs; | |
newAtt.refs[ref] = true; | |
} | |
} else { | |
newAtt.refs = {}; | |
newAtt.refs[ref] = true; | |
} | |
stores.attachmentStore.put(digest, newAtt, function (err) { | |
// do not try to store empty attachments | |
if (data.length === 0) { | |
return callback(err); | |
} | |
// doing this in batch causes a test to fail, wtf? | |
stores.binaryStore.put(digest, data, function (err) { | |
callback(err); | |
}); | |
}); | |
}); | |
} | |
function complete() { | |
results.sort(function (a, b) { return a._bulk_seq - b._bulk_seq; }); | |
var aresults = results.map(function (result) { | |
if (result._bulk_seq) { | |
delete result._bulk_seq; | |
} else if (!Object.keys(result).length) { | |
return { | |
ok: true | |
}; | |
} | |
if (result.error) { | |
return result; | |
} | |
var metadata = result.metadata; | |
var rev = merge.winningRev(metadata); | |
return { | |
ok: true, | |
id: metadata.id, | |
rev: rev | |
}; | |
}); | |
LevelPouch.Changes.notify(name); | |
process.nextTick(function () { callback(null, aresults); }); | |
} | |
function makeErr(err, seq) { | |
err._bulk_seq = seq; | |
return err; | |
} | |
processDocs(); | |
}; | |
api._allDocs = function (opts, callback) { | |
opts = utils.clone(opts); | |
countDocs(function (err, docCount) { | |
if (err) { | |
return callback(err); | |
} | |
var readstreamOpts = {}; | |
var skip = opts.skip || 0; | |
if (opts.startkey) { | |
readstreamOpts.start = opts.startkey; | |
} | |
if (opts.endkey) { | |
readstreamOpts.end = opts.endkey; | |
} | |
if (opts.key) { | |
readstreamOpts.start = readstreamOpts.end = opts.key; | |
} | |
if (opts.descending) { | |
readstreamOpts.reverse = true; | |
// switch start and ends | |
var tmp = readstreamOpts.start; | |
readstreamOpts.start = readstreamOpts.end; | |
readstreamOpts.end = tmp; | |
} | |
var limit; | |
if (typeof opts.limit === 'number') { | |
limit = opts.limit; | |
} else { | |
limit = -1; | |
} | |
if (limit === 0 || | |
('start' in readstreamOpts && 'end' in readstreamOpts && | |
readstreamOpts.start > readstreamOpts.end)) { | |
// should return 0 results when start is greater than end. | |
// normally level would "fix" this for us by reversing the order, | |
// so short-circuit instead | |
return callback(null, { | |
total_rows: docCount, | |
offset: opts.skip, | |
rows: [] | |
}); | |
} | |
var results = []; | |
var docstream = stores.docStore.readStream(readstreamOpts); | |
var throughStream = through(function (entry, _, next) { | |
if (!utils.isDeleted(entry.value)) { | |
if (skip-- > 0) { | |
next(); | |
return; | |
} else if (limit-- === 0) { | |
docstream.unpipe(); | |
docstream.destroy(); | |
next(); | |
return; | |
} | |
} else if (opts.deleted !== 'ok') { | |
next(); | |
return; | |
} | |
function allDocsInner(metadata, data) { | |
var doc = { | |
id: metadata.id, | |
key: metadata.id, | |
value: { | |
rev: merge.winningRev(metadata) | |
} | |
}; | |
if (opts.include_docs) { | |
doc.doc = data; | |
doc.doc._rev = doc.value.rev; | |
if (opts.conflicts) { | |
doc.doc._conflicts = merge.collectConflicts(metadata); | |
} | |
for (var att in doc.doc._attachments) { | |
if (doc.doc._attachments.hasOwnProperty(att)) { | |
doc.doc._attachments[att].stub = true; | |
} | |
} | |
} | |
if (opts.inclusive_end === false && metadata.id === opts.endkey) { | |
return next(); | |
} else if (utils.isDeleted(metadata)) { | |
if (opts.deleted === 'ok') { | |
doc.value.deleted = true; | |
doc.doc = null; | |
} else { | |
return next(); | |
} | |
} | |
results.push(doc); | |
next(); | |
} | |
var metadata = entry.value; | |
if (opts.include_docs) { | |
var seq = metadata.rev_map[merge.winningRev(metadata)]; | |
stores.bySeqStore.get(formatSeq(seq), function (err, data) { | |
allDocsInner(metadata, data); | |
}); | |
} | |
else { | |
allDocsInner(metadata); | |
} | |
}, function (next) { | |
callback(null, { | |
total_rows: docCount, | |
offset: opts.skip, | |
rows: results | |
}); | |
next(); | |
}).on('unpipe', function () { | |
throughStream.end(); | |
}); | |
docstream.on('error', callback); | |
docstream.pipe(throughStream); | |
}); | |
}; | |
api._changes = function (opts) { | |
opts = utils.clone(opts); | |
if (opts.continuous) { | |
var id = name + ':' + utils.uuid(); | |
LevelPouch.Changes.addListener(name, id, api, opts); | |
LevelPouch.Changes.notify(name); | |
return { | |
cancel: function () { | |
LevelPouch.Changes.removeListener(name, id); | |
} | |
}; | |
} | |
var descending = opts.descending; | |
var results = []; | |
var last_seq = 0; | |
var called = 0; | |
var streamOpts = { | |
reverse: descending | |
}; | |
var limit; | |
if ('limit' in opts && opts.limit > 0) { | |
limit = opts.limit; | |
} | |
if (!streamOpts.reverse) { | |
streamOpts.start = formatSeq(opts.since ? opts.since + 1 : 0); | |
} | |
var filter = utils.filterChange(opts); | |
var returnDocs; | |
if ('returnDocs' in opts) { | |
returnDocs = opts.returnDocs; | |
} else { | |
returnDocs = true; | |
} | |
function complete() { | |
opts.done = true; | |
if (returnDocs && opts.limit) { | |
if (opts.limit < results.length) { | |
results.length = opts.limit; | |
} | |
} | |
changeStream.unpipe(throughStream); | |
changeStream.destroy(); | |
if (!opts.continuous && !opts.cancelled) { | |
opts.complete(null, {results: results, last_seq: last_seq}); | |
} | |
} | |
var changeStream = stores.bySeqStore.readStream(streamOpts); | |
var throughStream = through(function (data, _, next) { | |
if (limit && called >= limit) { | |
complete(); | |
return next(); | |
} | |
if (opts.cancelled || opts.done) { | |
return next(); | |
} | |
stores.docStore.get(data.value._id, function (err, metadata) { | |
if (opts.cancelled || opts.done || db.isClosed() || | |
utils.isLocalId(metadata.id)) { | |
return next(); | |
} | |
var doc = data.value; | |
doc._rev = merge.winningRev(metadata); | |
var change = opts.processChange(doc, metadata, opts); | |
change.seq = metadata.seq; | |
if (last_seq < metadata.seq) { | |
last_seq = metadata.seq; | |
} | |
// Ensure duplicated dont overwrite winning rev | |
if (parseSeq(data.key) === metadata.rev_map[change.doc._rev] && | |
filter(change)) { | |
called++; | |
utils.call(opts.onChange, change); | |
if (returnDocs) { | |
results.push(change); | |
} | |
} | |
next(); | |
}); | |
}, function (next) { | |
if (opts.cancelled) { | |
return next(); | |
} | |
if (returnDocs && opts.limit) { | |
if (opts.limit < results.length) { | |
results.length = opts.limit; | |
} | |
} | |
next(); | |
}).on('unpipe', function () { | |
throughStream.end(); | |
complete(); | |
}); | |
changeStream.pipe(throughStream); | |
return { | |
cancel: function () { | |
opts.cancelled = true; | |
complete(); | |
} | |
}; | |
}; | |
api._close = function (callback) { | |
if (db.isClosed()) { | |
return callback(errors.NOT_OPEN); | |
} | |
db.close(function (err) { | |
if (err) { | |
callback(err); | |
} else { | |
dbStore["delete"](name); | |
callback(); | |
} | |
}); | |
}; | |
api._getRevisionTree = function (docId, callback) { | |
stores.docStore.get(docId, function (err, metadata) { | |
if (err) { | |
callback(errors.MISSING_DOC); | |
} else { | |
callback(null, metadata.rev_tree); | |
} | |
}); | |
}; | |
api._doCompaction = function (docId, rev_tree, revs, callback) { | |
stores.docStore.get(docId, function (err, metadata) { | |
if (err) { | |
return callback(err); | |
} | |
var seqs = metadata.rev_map; // map from rev to seq | |
metadata.rev_tree = rev_tree; | |
if (!revs.length) { | |
return callback(); | |
} | |
var batch = []; | |
batch.push({ | |
key: metadata.id, | |
value: metadata, | |
type: 'put', | |
valueEncoding: vuvuEncoding, | |
prefix: stores.docStore | |
}); | |
revs.forEach(function (rev) { | |
var seq = seqs[rev]; | |
if (!seq) { | |
return; | |
} | |
batch.push({ | |
key: formatSeq(seq), | |
type: 'del', | |
prefix: stores.bySeqStore | |
}); | |
}); | |
db.batch(batch, callback); | |
}); | |
}; | |
api._getLocal = function (id, callback) { | |
stores.localStore.get(id, function (err, doc) { | |
if (err) { | |
callback(errors.MISSING_DOC); | |
} else { | |
callback(null, doc); | |
} | |
}); | |
}; | |
api._putLocal = function (doc, callback) { | |
delete doc._revisions; // ignore this, trust the rev | |
var oldRev = doc._rev; | |
var id = doc._id; | |
stores.localStore.get(id, function (err, resp) { | |
if (err) { | |
if (oldRev) { | |
return callback(errors.REV_CONFLICT); | |
} | |
} | |
if (resp && resp._rev !== oldRev) { | |
return callback(errors.REV_CONFLICT); | |
} | |
if (!oldRev) { | |
doc._rev = '0-1'; | |
} else { | |
doc._rev = '0-' + (parseInt(oldRev.split('-')[1], 10) + 1); | |
} | |
stores.localStore.put(id, doc, function (err) { | |
if (err) { | |
return callback(err); | |
} | |
var ret = {ok: true, id: doc._id, rev: doc._rev}; | |
callback(null, ret); | |
}); | |
}); | |
}; | |
api._removeLocal = function (doc, callback) { | |
stores.localStore.get(doc._id, function (err, resp) { | |
if (err) { | |
return callback(err); | |
} | |
if (resp._rev !== doc._rev) { | |
return callback(errors.REV_CONFLICT); | |
} | |
stores.localStore.del(doc._id, function (err) { | |
if (err) { | |
return callback(err); | |
} | |
var ret = {ok: true, id: doc._id, rev: '0-0'}; | |
callback(null, ret); | |
}); | |
}); | |
}; | |
} | |
LevelPouch.valid = function () { | |
return process && !process.browser; | |
}; | |
// close and delete open leveldb stores | |
LevelPouch.destroy = utils.toPromise(function (name, opts, callback) { | |
opts = utils.clone(opts); | |
var leveldown = opts.db || originalLeveldown; | |
function callDestroy(name, cb) { | |
if (typeof leveldown.destroy === 'function') { | |
leveldown.destroy(name, cb); | |
} else { | |
process.nextTick(callback); | |
} | |
} | |
var dbStore; | |
if (dbStores.has(leveldown.name)) { | |
dbStore = dbStores.get(leveldown.name); | |
} else { | |
return callDestroy(name, callback); | |
} | |
if (dbStore.has(name)) { | |
LevelPouch.Changes.removeAllListeners(name); | |
dbStore.get(name).close(function () { | |
dbStore["delete"](name); | |
callDestroy(name, callback); | |
}); | |
} else { | |
callDestroy(name, callback); | |
} | |
}); | |
LevelPouch.use_prefix = false; | |
LevelPouch.Changes = new utils.Changes(); | |
module.exports = LevelPouch; | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),require("buffer").Buffer) | |
},{"../deps/errors":5,"../deps/migrate":"6ZjBAW","../merge":8,"../utils":15,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"buffer":20,"level-sublevel":35,"leveldown":"1XEYOF","levelup":49,"through2":112,"vuvuzela":113}],2:[function(require,module,exports){ | |
"use strict"; | |
var createBlob = require('./blob.js'); | |
var errors = require('./errors'); | |
var utils = require("../utils"); | |
var hasUpload; | |
function ajax(options, adapterCallback) { | |
var requestCompleted = false; | |
var callback = utils.getArguments(function (args) { | |
if (requestCompleted) { | |
return; | |
} | |
adapterCallback.apply(this, args); | |
requestCompleted = true; | |
}); | |
if (typeof options === "function") { | |
callback = options; | |
options = {}; | |
} | |
options = utils.clone(options); | |
var defaultOptions = { | |
method : "GET", | |
headers: {}, | |
json: true, | |
processData: true, | |
timeout: 10000, | |
cache: false | |
}; | |
options = utils.extend(true, defaultOptions, options); | |
// cache-buster, specifically designed to work around IE's aggressive caching | |
// see http://www.dashbay.com/2011/05/internet-explorer-caches-ajax/ | |
if (options.method === 'GET' && !options.cache) { | |
var hasArgs = options.url.indexOf('?') !== -1; | |
options.url += (hasArgs ? '&' : '?') + '_nonce=' + utils.uuid(16); | |
} | |
function onSuccess(obj, resp, cb) { | |
if (!options.binary && !options.json && options.processData && | |
typeof obj !== 'string') { | |
obj = JSON.stringify(obj); | |
} else if (!options.binary && options.json && typeof obj === 'string') { | |
try { | |
obj = JSON.parse(obj); | |
} catch (e) { | |
// Probably a malformed JSON from server | |
return cb(e); | |
} | |
} | |
if (Array.isArray(obj)) { | |
obj = obj.map(function (v) { | |
var obj; | |
if (v.ok) { | |
return v; | |
} else if (v.error && v.error === 'conflict') { | |
obj = errors.REV_CONFLICT; | |
obj.id = v.id; | |
return obj; | |
} else if (v.error && v.error === 'forbidden') { | |
obj = errors.FORBIDDEN; | |
obj.id = v.id; | |
obj.reason = v.reason; | |
return obj; | |
} else if (v.missing) { | |
obj = errors.MISSING_DOC; | |
obj.missing = v.missing; | |
return obj; | |
} else { | |
return v; | |
} | |
}); | |
} | |
cb(null, obj, resp); | |
} | |
function onError(err, cb) { | |
var errParsed, errObj, errType, key; | |
try { | |
errParsed = JSON.parse(err.responseText); | |
//would prefer not to have a try/catch clause | |
for (key in errors) { | |
if (errors.hasOwnProperty(key) && | |
errors[key].name === errParsed.error) { | |
errType = errors[key]; | |
break; | |
} | |
} | |
if (!errType) { | |
errType = errors.UNKNOWN_ERROR; | |
if (err.status) { | |
errType.status = err.status; | |
} | |
if (err.statusText) { | |
err.name = err.statusText; | |
} | |
} | |
errObj = errors.error(errType, errParsed.reason); | |
} catch (e) { | |
for (var key in errors) { | |
if (errors.hasOwnProperty(key) && errors[key].status === err.status) { | |
errType = errors[key]; | |
break; | |
} | |
} | |
if (!errType) { | |
errType = errors.UNKNOWN_ERROR; | |
if (err.status) { | |
errType.status = err.status; | |
} | |
if (err.statusText) { | |
err.name = err.statusText; | |
} | |
} | |
errObj = errors.error(errType); | |
} | |
if (err.withCredentials && err.status === 0) { | |
// apparently this is what we get when the method | |
// is reported as not allowed by CORS. so fudge it | |
errObj.status = 405; | |
errObj.statusText = "Method Not Allowed"; | |
} | |
cb(errObj); | |
} | |
var timer; | |
var xhr; | |
if (options.xhr) { | |
xhr = new options.xhr(); | |
} else { | |
xhr = new XMLHttpRequest(); | |
} | |
xhr.open(options.method, options.url); | |
xhr.withCredentials = true; | |
if (options.json) { | |
options.headers.Accept = 'application/json'; | |
options.headers['Content-Type'] = options.headers['Content-Type'] || | |
'application/json'; | |
if (options.body && | |
options.processData && | |
typeof options.body !== "string") { | |
options.body = JSON.stringify(options.body); | |
} | |
} | |
if (options.binary) { | |
xhr.responseType = 'arraybuffer'; | |
} | |
var createCookie = function (name, value, days) { | |
var expires = ""; | |
if (days) { | |
var date = new Date(); | |
date.setTime(date.getTime() + (days * 24 * 60 * 60 * 1000)); | |
expires = "; expires=" + date.toGMTString(); | |
} | |
document.cookie = name + "=" + value + expires + "; path=/"; | |
}; | |
for (var key in options.headers) { | |
if (key === 'Cookie') { | |
var cookie = options.headers[key].split('='); | |
createCookie(cookie[0], cookie[1], 10); | |
} else { | |
xhr.setRequestHeader(key, options.headers[key]); | |
} | |
} | |
if (!("body" in options)) { | |
options.body = null; | |
} | |
var abortReq = function () { | |
if (requestCompleted) { | |
return; | |
} | |
xhr.abort(); | |
onError(xhr, callback); | |
}; | |
xhr.onreadystatechange = function () { | |
if (xhr.readyState !== 4 || requestCompleted) { | |
return; | |
} | |
clearTimeout(timer); | |
if (xhr.status >= 200 && xhr.status < 300) { | |
var data; | |
if (options.binary) { | |
data = createBlob([xhr.response || ''], { | |
type: xhr.getResponseHeader('Content-Type') | |
}); | |
} else { | |
data = xhr.responseText; | |
} | |
onSuccess(data, xhr, callback); | |
} else { | |
onError(xhr, callback); | |
} | |
}; | |
if (options.timeout > 0) { | |
timer = setTimeout(abortReq, options.timeout); | |
xhr.onprogress = function () { | |
clearTimeout(timer); | |
timer = setTimeout(abortReq, options.timeout); | |
}; | |
if (typeof hasUpload === 'undefined') { | |
// IE throws an error if you try to access it directly | |
hasUpload = Object.keys(xhr).indexOf('upload') !== -1; | |
} | |
if (hasUpload) { // does not exist in ie9 | |
xhr.upload.onprogress = xhr.onprogress; | |
} | |
} | |
if (options.body && (options.body instanceof Blob)) { | |
var reader = new FileReader(); | |
reader.onloadend = function (e) { | |
var binary = ""; | |
var bytes = new Uint8Array(this.result); | |
var length = bytes.byteLength; | |
for (var i = 0; i < length; i++) { | |
binary += String.fromCharCode(bytes[i]); | |
} | |
binary = utils.fixBinary(binary); | |
xhr.send(binary); | |
}; | |
reader.readAsArrayBuffer(options.body); | |
} else { | |
xhr.send(options.body); | |
} | |
return {abort: abortReq}; | |
} | |
module.exports = ajax; | |
},{"../utils":15,"./blob.js":3,"./errors":5}],3:[function(require,module,exports){ | |
(function (global){ | |
"use strict"; | |
//Abstracts constructing a Blob object, so it also works in older | |
//browsers that don't support the native Blob constructor. (i.e. | |
//old QtWebKit versions, at least). | |
function createBlob(parts, properties) { | |
parts = parts || []; | |
properties = properties || {}; | |
try { | |
return new Blob(parts, properties); | |
} catch (e) { | |
if (e.name !== "TypeError") { | |
throw e; | |
} | |
var BlobBuilder = global.BlobBuilder || | |
global.MSBlobBuilder || | |
global.MozBlobBuilder || | |
global.WebKitBlobBuilder; | |
var builder = new BlobBuilder(); | |
for (var i = 0; i < parts.length; i += 1) { | |
builder.append(parts[i]); | |
} | |
return builder.getBlob(properties.type); | |
} | |
} | |
module.exports = createBlob; | |
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{}],4:[function(require,module,exports){ | |
'use strict'; | |
exports.Map = LazyMap; // TODO: use ES6 map | |
exports.Set = LazySet; // TODO: use ES6 set | |
// based on https://github.com/montagejs/collections | |
function LazyMap() { | |
this.store = {}; | |
} | |
LazyMap.prototype.mangle = function (key) { | |
if (typeof key !== "string") { | |
throw new TypeError("key must be a string but Got " + key); | |
} | |
return '$' + key; | |
}; | |
LazyMap.prototype.unmangle = function (key) { | |
return key.substring(1); | |
}; | |
LazyMap.prototype.get = function (key) { | |
var mangled = this.mangle(key); | |
if (mangled in this.store) { | |
return this.store[mangled]; | |
} else { | |
return void 0; | |
} | |
}; | |
LazyMap.prototype.set = function (key, value) { | |
var mangled = this.mangle(key); | |
this.store[mangled] = value; | |
return true; | |
}; | |
LazyMap.prototype.has = function (key) { | |
var mangled = this.mangle(key); | |
return mangled in this.store; | |
}; | |
LazyMap.prototype["delete"] = function (key) { | |
var mangled = this.mangle(key); | |
if (mangled in this.store) { | |
delete this.store[mangled]; | |
return true; | |
} | |
return false; | |
}; | |
LazyMap.prototype.forEach = function (cb) { | |
var self = this; | |
var keys = Object.keys(self.store); | |
keys.forEach(function (key) { | |
var value = self.store[key]; | |
key = self.unmangle(key); | |
cb(value, key); | |
}); | |
}; | |
function LazySet() { | |
this.store = new LazyMap(); | |
} | |
LazySet.prototype.add = function (key) { | |
return this.store.set(key, true); | |
}; | |
LazySet.prototype.has = function (key) { | |
return this.store.has(key); | |
}; | |
LazySet.prototype["delete"] = function (key) { | |
return this.store["delete"](key); | |
}; | |
},{}],5:[function(require,module,exports){ | |
"use strict"; | |
function PouchError(opts) { | |
this.status = opts.status; | |
this.name = opts.error; | |
this.message = opts.reason; | |
this.error = true; | |
} | |
PouchError.prototype__proto__ = Error.prototype; | |
PouchError.prototype.toString = function () { | |
return JSON.stringify({ | |
status: this.status, | |
name: this.name, | |
message: this.message | |
}); | |
}; | |
exports.UNAUTHORIZED = new PouchError({ | |
status: 401, | |
error: 'unauthorized', | |
reason: "Name or password is incorrect." | |
}); | |
exports.MISSING_BULK_DOCS = new PouchError({ | |
status: 400, | |
error: 'bad_request', | |
reason: "Missing JSON list of 'docs'" | |
}); | |
exports.MISSING_DOC = new PouchError({ | |
status: 404, | |
error: 'not_found', | |
reason: 'missing' | |
}); | |
exports.REV_CONFLICT = new PouchError({ | |
status: 409, | |
error: 'conflict', | |
reason: 'Document update conflict' | |
}); | |
exports.INVALID_ID = new PouchError({ | |
status: 400, | |
error: 'invalid_id', | |
reason: '_id field must contain a string' | |
}); | |
exports.MISSING_ID = new PouchError({ | |
status: 412, | |
error: 'missing_id', | |
reason: '_id is required for puts' | |
}); | |
exports.RESERVED_ID = new PouchError({ | |
status: 400, | |
error: 'bad_request', | |
reason: 'Only reserved document ids may start with underscore.' | |
}); | |
exports.NOT_OPEN = new PouchError({ | |
status: 412, | |
error: 'precondition_failed', | |
reason: 'Database not open' | |
}); | |
exports.UNKNOWN_ERROR = new PouchError({ | |
status: 500, | |
error: 'unknown_error', | |
reason: 'Database encountered an unknown error' | |
}); | |
exports.BAD_ARG = new PouchError({ | |
status: 500, | |
error: 'badarg', | |
reason: 'Some query argument is invalid' | |
}); | |
exports.INVALID_REQUEST = new PouchError({ | |
status: 400, | |
error: 'invalid_request', | |
reason: 'Request was invalid' | |
}); | |
exports.QUERY_PARSE_ERROR = new PouchError({ | |
status: 400, | |
error: 'query_parse_error', | |
reason: 'Some query parameter is invalid' | |
}); | |
exports.DOC_VALIDATION = new PouchError({ | |
status: 500, | |
error: 'doc_validation', | |
reason: 'Bad special document member' | |
}); | |
exports.BAD_REQUEST = new PouchError({ | |
status: 400, | |
error: 'bad_request', | |
reason: 'Something wrong with the request' | |
}); | |
exports.NOT_AN_OBJECT = new PouchError({ | |
status: 400, | |
error: 'bad_request', | |
reason: 'Document must be a JSON object' | |
}); | |
exports.DB_MISSING = new PouchError({ | |
status: 404, | |
error: 'not_found', | |
reason: 'Database not found' | |
}); | |
exports.IDB_ERROR = new PouchError({ | |
status: 500, | |
error: 'indexed_db_went_bad', | |
reason: 'unknown' | |
}); | |
exports.WSQ_ERROR = new PouchError({ | |
status: 500, | |
error: 'web_sql_went_bad', | |
reason: 'unknown' | |
}); | |
exports.LDB_ERROR = new PouchError({ | |
status: 500, | |
error: 'levelDB_went_went_bad', | |
reason: 'unknown' | |
}); | |
exports.FORBIDDEN = new PouchError({ | |
status: 403, | |
error: 'forbidden', | |
reason: 'Forbidden by design doc validate_doc_update function' | |
}); | |
exports.error = function (error, reason, name) { | |
function CustomPouchError(msg) { | |
this.message = reason; | |
if (name) { | |
this.name = name; | |
} | |
} | |
CustomPouchError.prototype = error; | |
return new CustomPouchError(reason); | |
}; | |
},{}],6:[function(require,module,exports){ | |
(function (process,global){ | |
'use strict'; | |
var crypto = require('crypto'); | |
var Md5 = require('spark-md5'); | |
var setImmediateShim = global.setImmediate || global.setTimeout; | |
function sliceShim(arrayBuffer, begin, end) { | |
if (typeof arrayBuffer.slice === 'function') { | |
if (!begin) { | |
return arrayBuffer.slice(); | |
} else if (!end) { | |
return arrayBuffer.slice(begin); | |
} else { | |
return arrayBuffer.slice(begin, end); | |
} | |
} | |
// | |
// shim for IE courtesy of http://stackoverflow.com/a/21440217 | |
// | |
//If `begin`/`end` is unspecified, Chrome assumes 0, so we do the same | |
//Chrome also converts the values to integers via flooring | |
begin = Math.floor(begin || 0); | |
end = Math.floor(end || 0); | |
var len = arrayBuffer.byteLength; | |
//If either `begin` or `end` is negative, it refers to an | |
//index from the end of the array, as opposed to from the beginning. | |
//The range specified by the `begin` and `end` values is clamped to the | |
//valid index range for the current array. | |
begin = begin < 0 ? Math.max(begin + len, 0) : Math.min(len, begin); | |
end = end < 0 ? Math.max(end + len, 0) : Math.min(len, end); | |
//If the computed length of the new ArrayBuffer would be negative, it | |
//is clamped to zero. | |
if (end - begin <= 0) { | |
return new ArrayBuffer(0); | |
} | |
var result = new ArrayBuffer(end - begin); | |
var resultBytes = new Uint8Array(result); | |
var sourceBytes = new Uint8Array(arrayBuffer, begin, end - begin); | |
resultBytes.set(sourceBytes); | |
return result; | |
} | |
// convert a 64-bit int to a binary string | |
function intToString(int) { | |
var bytes = [ | |
(int & 0xff), | |
((int >>> 8) & 0xff), | |
((int >>> 16) & 0xff), | |
((int >>> 24) & 0xff) | |
]; | |
return bytes.map(function (byte) { | |
return String.fromCharCode(byte); | |
}).join(''); | |
} | |
// convert an array of 64-bit ints into | |
// a base64-encoded string | |
function rawToBase64(raw) { | |
var res = ''; | |
for (var i = 0; i < raw.length; i++) { | |
res += intToString(raw[i]); | |
} | |
return global.btoa(res); | |
} | |
module.exports = function (data, callback) { | |
if (!process.browser) { | |
var base64 = crypto.createHash('md5').update(data).digest('base64'); | |
callback(null, base64); | |
return; | |
} | |
var inputIsString = typeof data === 'string'; | |
var len = inputIsString ? data.length : data.byteLength; | |
var chunkSize = Math.min(524288, len); | |
var chunks = Math.ceil(len / chunkSize); | |
var currentChunk = 0; | |
var buffer = inputIsString ? new Md5() : new Md5.ArrayBuffer(); | |
function append(buffer, data, start, end) { | |
if (inputIsString) { | |
buffer.appendBinary(data.substring(start, end)); | |
} else { | |
buffer.append(sliceShim(data, start, end)); | |
} | |
} | |
function loadNextChunk() { | |
var start = currentChunk * chunkSize; | |
var end = start + chunkSize; | |
if ((start + chunkSize) >= data.size) { | |
end = data.size; | |
} | |
currentChunk++; | |
if (currentChunk < chunks) { | |
append(buffer, data, start, end); | |
setImmediateShim(loadNextChunk); | |
} else { | |
append(buffer, data, start, end); | |
var raw = buffer.end(true); | |
var base64 = rawToBase64(raw); | |
callback(null, base64); | |
buffer.destroy(); | |
} | |
} | |
loadNextChunk(); | |
}; | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"crypto":17,"spark-md5":97}],7:[function(require,module,exports){ | |
"use strict"; | |
// BEGIN Math.uuid.js | |
/*! | |
Math.uuid.js (v1.4) | |
http://www.broofa.com | |
mailto:[email protected] | |
Copyright (c) 2010 Robert Kieffer | |
Dual licensed under the MIT and GPL licenses. | |
*/ | |
/* | |
* Generate a random uuid. | |
* | |
* USAGE: Math.uuid(length, radix) | |
* length - the desired number of characters | |
* radix - the number of allowable values for each character. | |
* | |
* EXAMPLES: | |
* // No arguments - returns RFC4122, version 4 ID | |
* >>> Math.uuid() | |
* "92329D39-6F5C-4520-ABFC-AAB64544E172" | |
* | |
* // One argument - returns ID of the specified length | |
* >>> Math.uuid(15) // 15 character ID (default base=62) | |
* "VcydxgltxrVZSTV" | |
* | |
* // Two arguments - returns ID of the specified length, and radix. | |
* // (Radix must be <= 62) | |
* >>> Math.uuid(8, 2) // 8 character ID (base=2) | |
* "01001010" | |
* >>> Math.uuid(8, 10) // 8 character ID (base=10) | |
* "47473046" | |
* >>> Math.uuid(8, 16) // 8 character ID (base=16) | |
* "098F4D35" | |
*/ | |
var chars = ( | |
'0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' + | |
'abcdefghijklmnopqrstuvwxyz' | |
).split(''); | |
function getValue(radix) { | |
return 0 | Math.random() * radix; | |
} | |
function uuid(len, radix) { | |
radix = radix || chars.length; | |
var out = ''; | |
var i = -1; | |
if (len) { | |
// Compact form | |
while (++i < len) { | |
out += chars[getValue(radix)]; | |
} | |
return out; | |
} | |
// rfc4122, version 4 form | |
// Fill in random data. At i==19 set the high bits of clock sequence as | |
// per rfc4122, sec. 4.1.5 | |
while (++i < 36) { | |
switch (i) { | |
case 8: | |
case 13: | |
case 18: | |
case 23: | |
out += '-'; | |
break; | |
case 19: | |
out += chars[(getValue(16) & 0x3) | 0x8]; | |
break; | |
default: | |
out += chars[getValue(16)]; | |
} | |
} | |
return out; | |
} | |
module.exports = uuid; | |
},{}],8:[function(require,module,exports){ | |
'use strict'; | |
var extend = require('pouchdb-extend'); | |
// for a better overview of what this is doing, read: | |
// https://github.com/apache/couchdb/blob/master/src/couchdb/couch_key_tree.erl | |
// | |
// But for a quick intro, CouchDB uses a revision tree to store a documents | |
// history, A -> B -> C, when a document has conflicts, that is a branch in the | |
// tree, A -> (B1 | B2 -> C), We store these as a nested array in the format | |
// | |
// KeyTree = [Path ... ] | |
// Path = {pos: position_from_root, ids: Tree} | |
// Tree = [Key, Opts, [Tree, ...]], in particular single node: [Key, []] | |
// Turn a path as a flat array into a tree with a single branch | |
function pathToTree(path) { | |
var doc = path.shift(); | |
var root = [doc.id, doc.opts, []]; | |
var leaf = root; | |
var nleaf; | |
while (path.length) { | |
doc = path.shift(); | |
nleaf = [doc.id, doc.opts, []]; | |
leaf[2].push(nleaf); | |
leaf = nleaf; | |
} | |
return root; | |
} | |
// Merge two trees together | |
// The roots of tree1 and tree2 must be the same revision | |
function mergeTree(in_tree1, in_tree2) { | |
var queue = [{tree1: in_tree1, tree2: in_tree2}]; | |
var conflicts = false; | |
while (queue.length > 0) { | |
var item = queue.pop(); | |
var tree1 = item.tree1; | |
var tree2 = item.tree2; | |
if (tree1[1].status || tree2[1].status) { | |
tree1[1].status = | |
(tree1[1].status === 'available' || | |
tree2[1].status === 'available') ? 'available' : 'missing'; | |
} | |
for (var i = 0; i < tree2[2].length; i++) { | |
if (!tree1[2][0]) { | |
conflicts = 'new_leaf'; | |
tree1[2][0] = tree2[2][i]; | |
continue; | |
} | |
var merged = false; | |
for (var j = 0; j < tree1[2].length; j++) { | |
if (tree1[2][j][0] === tree2[2][i][0]) { | |
queue.push({tree1: tree1[2][j], tree2: tree2[2][i]}); | |
merged = true; | |
} | |
} | |
if (!merged) { | |
conflicts = 'new_branch'; | |
tree1[2].push(tree2[2][i]); | |
tree1[2].sort(); | |
} | |
} | |
} | |
return {conflicts: conflicts, tree: in_tree1}; | |
} | |
function doMerge(tree, path, dontExpand) { | |
var restree = []; | |
var conflicts = false; | |
var merged = false; | |
var res; | |
if (!tree.length) { | |
return {tree: [path], conflicts: 'new_leaf'}; | |
} | |
tree.forEach(function (branch) { | |
if (branch.pos === path.pos && branch.ids[0] === path.ids[0]) { | |
// Paths start at the same position and have the same root, so they need | |
// merged | |
res = mergeTree(branch.ids, path.ids); | |
restree.push({pos: branch.pos, ids: res.tree}); | |
conflicts = conflicts || res.conflicts; | |
merged = true; | |
} else if (dontExpand !== true) { | |
// The paths start at a different position, take the earliest path and | |
// traverse up until it as at the same point from root as the path we | |
// want to merge. If the keys match we return the longer path with the | |
// other merged After stemming we dont want to expand the trees | |
var t1 = branch.pos < path.pos ? branch : path; | |
var t2 = branch.pos < path.pos ? path : branch; | |
var diff = t2.pos - t1.pos; | |
var candidateParents = []; | |
var trees = []; | |
trees.push({ids: t1.ids, diff: diff, parent: null, parentIdx: null}); | |
while (trees.length > 0) { | |
var item = trees.pop(); | |
if (item.diff === 0) { | |
if (item.ids[0] === t2.ids[0]) { | |
candidateParents.push(item); | |
} | |
continue; | |
} | |
if (!item.ids) { | |
continue; | |
} | |
/*jshint loopfunc:true */ | |
item.ids[2].forEach(function (el, idx) { | |
trees.push( | |
{ids: el, diff: item.diff - 1, parent: item.ids, parentIdx: idx}); | |
}); | |
} | |
var el = candidateParents[0]; | |
if (!el) { | |
restree.push(branch); | |
} else { | |
res = mergeTree(el.ids, t2.ids); | |
el.parent[2][el.parentIdx] = res.tree; | |
restree.push({pos: t1.pos, ids: t1.ids}); | |
conflicts = conflicts || res.conflicts; | |
merged = true; | |
} | |
} else { | |
restree.push(branch); | |
} | |
}); | |
// We didnt find | |
if (!merged) { | |
restree.push(path); | |
} | |
restree.sort(function (a, b) { | |
return a.pos - b.pos; | |
}); | |
return { | |
tree: restree, | |
conflicts: conflicts || 'internal_node' | |
}; | |
} | |
// To ensure we dont grow the revision tree infinitely, we stem old revisions | |
function stem(tree, depth) { | |
// First we break out the tree into a complete list of root to leaf paths, | |
// we cut off the start of the path and generate a new set of flat trees | |
var stemmedPaths = PouchMerge.rootToLeaf(tree).map(function (path) { | |
var stemmed = path.ids.slice(-depth); | |
return { | |
pos: path.pos + (path.ids.length - stemmed.length), | |
ids: pathToTree(stemmed) | |
}; | |
}); | |
// Then we remerge all those flat trees together, ensuring that we dont | |
// connect trees that would go beyond the depth limit | |
return stemmedPaths.reduce(function (prev, current, i, arr) { | |
return doMerge(prev, current, true).tree; | |
}, [stemmedPaths.shift()]); | |
} | |
var PouchMerge = {}; | |
PouchMerge.merge = function (tree, path, depth) { | |
// Ugh, nicer way to not modify arguments in place? | |
tree = extend(true, [], tree); | |
path = extend(true, {}, path); | |
var newTree = doMerge(tree, path); | |
return { | |
tree: stem(newTree.tree, depth), | |
conflicts: newTree.conflicts | |
}; | |
}; | |
// We fetch all leafs of the revision tree, and sort them based on tree length | |
// and whether they were deleted, undeleted documents with the longest revision | |
// tree (most edits) win | |
// The final sort algorithm is slightly documented in a sidebar here: | |
// http://guide.couchdb.org/draft/conflicts.html | |
PouchMerge.winningRev = function (metadata) { | |
var leafs = []; | |
PouchMerge.traverseRevTree(metadata.rev_tree, | |
function (isLeaf, pos, id, something, opts) { | |
if (isLeaf) { | |
leafs.push({pos: pos, id: id, deleted: !!opts.deleted}); | |
} | |
}); | |
leafs.sort(function (a, b) { | |
if (a.deleted !== b.deleted) { | |
return a.deleted > b.deleted ? 1 : -1; | |
} | |
if (a.pos !== b.pos) { | |
return b.pos - a.pos; | |
} | |
return a.id < b.id ? 1 : -1; | |
}); | |
return leafs[0].pos + '-' + leafs[0].id; | |
}; | |
// Pretty much all below can be combined into a higher order function to | |
// traverse revisions | |
// The return value from the callback will be passed as context to all | |
// children of that node | |
PouchMerge.traverseRevTree = function (revs, callback) { | |
var toVisit = revs.slice(); | |
var node; | |
while ((node = toVisit.pop())) { | |
var pos = node.pos; | |
var tree = node.ids; | |
var branches = tree[2]; | |
var newCtx = | |
callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]); | |
for (var i = 0, len = branches.length; i < len; i++) { | |
toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx}); | |
} | |
} | |
}; | |
PouchMerge.collectLeaves = function (revs) { | |
var leaves = []; | |
PouchMerge.traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) { | |
if (isLeaf) { | |
leaves.unshift({rev: pos + "-" + id, pos: pos, opts: opts}); | |
} | |
}); | |
leaves.sort(function (a, b) { | |
return b.pos - a.pos; | |
}); | |
leaves.map(function (leaf) { delete leaf.pos; }); | |
return leaves; | |
}; | |
// returns revs of all conflicts that is leaves such that | |
// 1. are not deleted and | |
// 2. are different than winning revision | |
PouchMerge.collectConflicts = function (metadata) { | |
var win = PouchMerge.winningRev(metadata); | |
var leaves = PouchMerge.collectLeaves(metadata.rev_tree); | |
var conflicts = []; | |
leaves.forEach(function (leaf) { | |
if (leaf.rev !== win && !leaf.opts.deleted) { | |
conflicts.push(leaf.rev); | |
} | |
}); | |
return conflicts; | |
}; | |
PouchMerge.rootToLeaf = function (tree) { | |
var paths = []; | |
PouchMerge.traverseRevTree(tree, function (isLeaf, pos, id, history, opts) { | |
history = history ? history.slice(0) : []; | |
history.push({id: id, opts: opts}); | |
if (isLeaf) { | |
var rootPos = pos + 1 - history.length; | |
paths.unshift({pos: rootPos, ids: history}); | |
} | |
return history; | |
}); | |
return paths; | |
}; | |
module.exports = PouchMerge; | |
},{"pouchdb-extend":96}],"adapter-config":[function(require,module,exports){ | |
module.exports=require('GfW1tc'); | |
},{}],"GfW1tc":[function(require,module,exports){ | |
'use strict'; | |
module.exports = { | |
name: 'memory', | |
valid: function () { | |
return true; | |
}, | |
use_prefix: false | |
}; | |
},{}],11:[function(require,module,exports){ | |
"use strict"; | |
var adapterConfig = require('adapter-config'); | |
var adapterName = adapterConfig.name; | |
var adapter = require('./levelalt'); | |
window.PouchDB.adapter(adapterName, adapter); | |
window.PouchDB.preferredAdapters.push(adapterName); | |
},{"./levelalt":12,"adapter-config":"GfW1tc"}],12:[function(require,module,exports){ | |
'use strict'; | |
var LevelPouch = require('../adapters/leveldb'); | |
var leveldown = require('leveldown'); | |
var adapterConfig = require('adapter-config'); | |
var utils = require('../utils'); | |
function LevelPouchAlt(opts, callback) { | |
var _opts = utils.extend({ | |
db: leveldown | |
}, opts); | |
LevelPouch.call(this, _opts, callback); | |
} | |
// overrides for normal LevelDB behavior on Node | |
LevelPouchAlt.valid = function () { | |
return adapterConfig.valid(); | |
}; | |
LevelPouchAlt.use_prefix = adapterConfig.use_prefix; | |
LevelPouchAlt.destroy = utils.toPromise(function (name, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
var _opts = utils.extend({ | |
db: leveldown | |
}, opts); | |
return LevelPouch.destroy(name, _opts, callback); | |
}); | |
module.exports = LevelPouchAlt; | |
},{"../adapters/leveldb":1,"../utils":15,"adapter-config":"GfW1tc","leveldown":"1XEYOF"}],"../adapters/../deps/migrate":[function(require,module,exports){ | |
module.exports=require('6ZjBAW'); | |
},{}],"6ZjBAW":[function(require,module,exports){ | |
(function (process){ | |
'use strict'; | |
// LevelAlt doesn't need the pre-2.2.0 LevelDB-specific migrations | |
exports.toSublevel = function (name, db, callback) { | |
process.nextTick(function () { | |
callback(); | |
}); | |
}; | |
exports.localAndMetaStores = function (db, stores, callback) { | |
process.nextTick(function () { | |
callback(); | |
}); | |
}; | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19}],15:[function(require,module,exports){ | |
(function (process,global){ | |
/*jshint strict: false */ | |
/*global chrome */ | |
var merge = require('./merge'); | |
exports.extend = require('pouchdb-extend'); | |
exports.ajax = require('./deps/ajax'); | |
exports.createBlob = require('./deps/blob'); | |
exports.uuid = require('./deps/uuid'); | |
exports.getArguments = require('argsarray'); | |
var buffer = require('./deps/buffer'); | |
var errors = require('./deps/errors'); | |
var EventEmitter = require('events').EventEmitter; | |
var collections = require('./deps/collections'); | |
exports.Map = collections.Map; | |
exports.Set = collections.Set; | |
if (typeof global.Promise === 'function') { | |
exports.Promise = global.Promise; | |
} else { | |
exports.Promise = require('bluebird'); | |
} | |
var Promise = exports.Promise; | |
function toObject(array) { | |
var obj = {}; | |
array.forEach(function (item) { obj[item] = true; }); | |
return obj; | |
} | |
// List of top level reserved words for doc | |
var reservedWords = toObject([ | |
'_id', | |
'_rev', | |
'_attachments', | |
'_deleted', | |
'_revisions', | |
'_revs_info', | |
'_conflicts', | |
'_deleted_conflicts', | |
'_local_seq', | |
'_rev_tree', | |
//replication documents | |
'_replication_id', | |
'_replication_state', | |
'_replication_state_time', | |
'_replication_state_reason', | |
'_replication_stats' | |
]); | |
// List of reserved words that should end up the document | |
var dataWords = toObject([ | |
'_attachments', | |
//replication documents | |
'_replication_id', | |
'_replication_state', | |
'_replication_state_time', | |
'_replication_state_reason', | |
'_replication_stats' | |
]); | |
exports.clone = function (obj) { | |
return exports.extend(true, {}, obj); | |
}; | |
exports.inherits = require('inherits'); | |
// Determine id an ID is valid | |
// - invalid IDs begin with an underescore that does not begin '_design' or | |
// '_local' | |
// - any other string value is a valid id | |
// Returns the specific error object for each case | |
exports.invalidIdError = function (id) { | |
var err; | |
if (!id) { | |
err = new TypeError(errors.MISSING_ID.message); | |
err.status = 412; | |
} else if (typeof id !== 'string') { | |
err = new TypeError(errors.INVALID_ID.message); | |
err.status = 400; | |
} else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) { | |
err = new TypeError(errors.RESERVED_ID.message); | |
err.status = 400; | |
} | |
if (err) { | |
throw err; | |
} | |
}; | |
function isChromeApp() { | |
return (typeof chrome !== "undefined" && | |
typeof chrome.storage !== "undefined" && | |
typeof chrome.storage.local !== "undefined"); | |
} | |
// Pretty dumb name for a function, just wraps callback calls so we dont | |
// to if (callback) callback() everywhere | |
exports.call = exports.getArguments(function (args) { | |
if (!args.length) { | |
return; | |
} | |
var fun = args.shift(); | |
if (typeof fun === 'function') { | |
fun.apply(this, args); | |
} | |
}); | |
exports.isLocalId = function (id) { | |
return (/^_local/).test(id); | |
}; | |
// check if a specific revision of a doc has been deleted | |
// - metadata: the metadata object from the doc store | |
// - rev: (optional) the revision to check. defaults to winning revision | |
exports.isDeleted = function (metadata, rev) { | |
if (!rev) { | |
rev = merge.winningRev(metadata); | |
} | |
var dashIndex = rev.indexOf('-'); | |
if (dashIndex !== -1) { | |
rev = rev.substring(dashIndex + 1); | |
} | |
var deleted = false; | |
merge.traverseRevTree(metadata.rev_tree, | |
function (isLeaf, pos, id, acc, opts) { | |
if (id === rev) { | |
deleted = !!opts.deleted; | |
} | |
}); | |
return deleted; | |
}; | |
exports.filterChange = function (opts) { | |
return function (change) { | |
var req = {}; | |
var hasFilter = opts.filter && typeof opts.filter === 'function'; | |
req.query = opts.query_params; | |
if (opts.filter && hasFilter && !opts.filter.call(this, change.doc, req)) { | |
return false; | |
} | |
if (opts.doc_ids && opts.doc_ids.indexOf(change.id) === -1) { | |
return false; | |
} | |
if (!opts.include_docs) { | |
delete change.doc; | |
} else { | |
for (var att in change.doc._attachments) { | |
if (change.doc._attachments.hasOwnProperty(att)) { | |
change.doc._attachments[att].stub = true; | |
} | |
} | |
} | |
return true; | |
}; | |
}; | |
// Preprocess documents, parse their revisions, assign an id and a | |
// revision for new writes that are missing them, etc | |
exports.parseDoc = function (doc, newEdits) { | |
var nRevNum; | |
var newRevId; | |
var revInfo; | |
var error; | |
var opts = {status: 'available'}; | |
if (doc._deleted) { | |
opts.deleted = true; | |
} | |
if (newEdits) { | |
if (!doc._id) { | |
doc._id = exports.uuid(); | |
} | |
newRevId = exports.uuid(32, 16).toLowerCase(); | |
if (doc._rev) { | |
revInfo = /^(\d+)-(.+)$/.exec(doc._rev); | |
if (!revInfo) { | |
var err = new TypeError("invalid value for property '_rev'"); | |
err.status = 400; | |
} | |
doc._rev_tree = [{ | |
pos: parseInt(revInfo[1], 10), | |
ids: [revInfo[2], {status: 'missing'}, [[newRevId, opts, []]]] | |
}]; | |
nRevNum = parseInt(revInfo[1], 10) + 1; | |
} else { | |
doc._rev_tree = [{ | |
pos: 1, | |
ids : [newRevId, opts, []] | |
}]; | |
nRevNum = 1; | |
} | |
} else { | |
if (doc._revisions) { | |
doc._rev_tree = [{ | |
pos: doc._revisions.start - doc._revisions.ids.length + 1, | |
ids: doc._revisions.ids.reduce(function (acc, x) { | |
if (acc === null) { | |
return [x, opts, []]; | |
} else { | |
return [x, {status: 'missing'}, [acc]]; | |
} | |
}, null) | |
}]; | |
nRevNum = doc._revisions.start; | |
newRevId = doc._revisions.ids[0]; | |
} | |
if (!doc._rev_tree) { | |
revInfo = /^(\d+)-(.+)$/.exec(doc._rev); | |
if (!revInfo) { | |
error = new TypeError(errors.BAD_ARG.message); | |
error.status = errors.BAD_ARG.status; | |
throw error; | |
} | |
nRevNum = parseInt(revInfo[1], 10); | |
newRevId = revInfo[2]; | |
doc._rev_tree = [{ | |
pos: parseInt(revInfo[1], 10), | |
ids: [revInfo[2], opts, []] | |
}]; | |
} | |
} | |
exports.invalidIdError(doc._id); | |
doc._rev = [nRevNum, newRevId].join('-'); | |
var result = {metadata : {}, data : {}}; | |
for (var key in doc) { | |
if (doc.hasOwnProperty(key)) { | |
var specialKey = key[0] === '_'; | |
if (specialKey && !reservedWords[key]) { | |
error = new Error(errors.DOC_VALIDATION.message + ': ' + key); | |
error.status = errors.DOC_VALIDATION.status; | |
throw error; | |
} else if (specialKey && !dataWords[key]) { | |
result.metadata[key.slice(1)] = doc[key]; | |
} else { | |
result.data[key] = doc[key]; | |
} | |
} | |
} | |
return result; | |
}; | |
exports.isCordova = function () { | |
return (typeof cordova !== "undefined" || | |
typeof PhoneGap !== "undefined" || | |
typeof phonegap !== "undefined"); | |
}; | |
exports.hasLocalStorage = function () { | |
if (isChromeApp()) { | |
return false; | |
} | |
try { | |
return global.localStorage; | |
} catch (e) { | |
return false; | |
} | |
}; | |
exports.Changes = Changes; | |
exports.inherits(Changes, EventEmitter); | |
function Changes() { | |
if (!(this instanceof Changes)) { | |
return new Changes(); | |
} | |
var self = this; | |
EventEmitter.call(this); | |
this.isChrome = isChromeApp(); | |
this.listeners = {}; | |
this.hasLocal = false; | |
if (!this.isChrome) { | |
this.hasLocal = exports.hasLocalStorage(); | |
} | |
if (this.isChrome) { | |
chrome.storage.onChanged.addListener(function (e) { | |
// make sure it's event addressed to us | |
if (e.db_name != null) { | |
//object only has oldValue, newValue members | |
self.emit(e.dbName.newValue); | |
} | |
}); | |
} else if (this.hasLocal) { | |
if (global.addEventListener) { | |
global.addEventListener("storage", function (e) { | |
self.emit(e.key); | |
}); | |
} else { | |
global.attachEvent("storage", function (e) { | |
self.emit(e.key); | |
}); | |
} | |
} | |
} | |
Changes.prototype.addListener = function (dbName, id, db, opts) { | |
if (this.listeners[id]) { | |
return; | |
} | |
function eventFunction() { | |
db.changes({ | |
include_docs: opts.include_docs, | |
conflicts: opts.conflicts, | |
continuous: false, | |
descending: false, | |
filter: opts.filter, | |
view: opts.view, | |
since: opts.since, | |
query_params: opts.query_params, | |
onChange: function (c) { | |
if (c.seq > opts.since && !opts.cancelled) { | |
opts.since = c.seq; | |
exports.call(opts.onChange, c); | |
} | |
} | |
}); | |
} | |
this.listeners[id] = eventFunction; | |
this.on(dbName, eventFunction); | |
}; | |
Changes.prototype.removeListener = function (dbName, id) { | |
if (!(id in this.listeners)) { | |
return; | |
} | |
EventEmitter.prototype.removeListener.call(this, dbName, | |
this.listeners[id]); | |
}; | |
Changes.prototype.notifyLocalWindows = function (dbName) { | |
//do a useless change on a storage thing | |
//in order to get other windows's listeners to activate | |
if (this.isChrome) { | |
chrome.storage.local.set({dbName: dbName}); | |
} else if (this.hasLocal) { | |
localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a"; | |
} | |
}; | |
Changes.prototype.notify = function (dbName) { | |
this.emit(dbName); | |
this.notifyLocalWindows(dbName); | |
}; | |
if (!process.browser || !('atob' in global)) { | |
exports.atob = function (str) { | |
var base64 = new buffer(str, 'base64'); | |
// Node.js will just skip the characters it can't encode instead of | |
// throwing and exception | |
if (base64.toString('base64') !== str) { | |
throw ("Cannot base64 encode full string"); | |
} | |
return base64.toString('binary'); | |
}; | |
} else { | |
exports.atob = function (str) { | |
return atob(str); | |
}; | |
} | |
if (!process.browser || !('btoa' in global)) { | |
exports.btoa = function (str) { | |
return new buffer(str, 'binary').toString('base64'); | |
}; | |
} else { | |
exports.btoa = function (str) { | |
return btoa(str); | |
}; | |
} | |
// From http://stackoverflow.com/questions/14967647/ (continues on next line) | |
// encode-decode-image-with-base64-breaks-image (2013-04-21) | |
exports.fixBinary = function (bin) { | |
if (!process.browser) { | |
// don't need to do this in Node | |
return bin; | |
} | |
var length = bin.length; | |
var buf = new ArrayBuffer(length); | |
var arr = new Uint8Array(buf); | |
for (var i = 0; i < length; i++) { | |
arr[i] = bin.charCodeAt(i); | |
} | |
return buf; | |
}; | |
exports.once = function (fun) { | |
var called = false; | |
return exports.getArguments(function (args) { | |
if (called) { | |
if (typeof console.trace === 'function') { | |
console.trace(); | |
} | |
throw new Error('once called more than once'); | |
} else { | |
called = true; | |
fun.apply(this, args); | |
} | |
}); | |
}; | |
exports.toPromise = function (func) { | |
//create the function we will be returning | |
return exports.getArguments(function (args) { | |
var self = this; | |
var tempCB = | |
(typeof args[args.length - 1] === 'function') ? args.pop() : false; | |
// if the last argument is a function, assume its a callback | |
var usedCB; | |
if (tempCB) { | |
// if it was a callback, create a new callback which calls it, | |
// but do so async so we don't trap any errors | |
usedCB = function (err, resp) { | |
process.nextTick(function () { | |
tempCB(err, resp); | |
}); | |
}; | |
} | |
var promise = new Promise(function (fulfill, reject) { | |
var resp; | |
try { | |
var callback = exports.once(function (err, mesg) { | |
if (err) { | |
reject(err); | |
} else { | |
fulfill(mesg); | |
} | |
}); | |
// create a callback for this invocation | |
// apply the function in the orig context | |
args.push(callback); | |
resp = func.apply(self, args); | |
if (resp && typeof resp.then === 'function') { | |
fulfill(resp); | |
} | |
} catch (e) { | |
reject(e); | |
} | |
}); | |
// if there is a callback, call it back | |
if (usedCB) { | |
promise.then(function (result) { | |
usedCB(null, result); | |
}, usedCB); | |
} | |
promise.cancel = function () { | |
return this; | |
}; | |
return promise; | |
}); | |
}; | |
exports.adapterFun = function (name, callback) { | |
return exports.toPromise(exports.getArguments(function (args) { | |
if (this._closed) { | |
return Promise.reject(new Error('database is closed')); | |
} | |
var self = this; | |
if (!this.taskqueue.isReady) { | |
return new exports.Promise(function (fulfill, reject) { | |
self.taskqueue.addTask(function (failed) { | |
if (failed) { | |
reject(failed); | |
} else { | |
fulfill(self[name].apply(self, args)); | |
} | |
}); | |
}); | |
} | |
return callback.apply(this, args); | |
})); | |
}; | |
//Can't find original post, but this is close | |
//http://stackoverflow.com/questions/6965107/ (continues on next line) | |
//converting-between-strings-and-arraybuffers | |
exports.arrayBufferToBinaryString = function (buffer) { | |
var binary = ""; | |
var bytes = new Uint8Array(buffer); | |
var length = bytes.byteLength; | |
for (var i = 0; i < length; i++) { | |
binary += String.fromCharCode(bytes[i]); | |
} | |
return binary; | |
}; | |
exports.cancellableFun = function (fun, self, opts) { | |
opts = opts ? exports.clone(true, {}, opts) : {}; | |
var emitter = new EventEmitter(); | |
var oldComplete = opts.complete || function () { }; | |
var complete = opts.complete = exports.once(function (err, resp) { | |
if (err) { | |
oldComplete(err); | |
} else { | |
emitter.emit('end', resp); | |
oldComplete(null, resp); | |
} | |
emitter.removeAllListeners(); | |
}); | |
var oldOnChange = opts.onChange || function () {}; | |
var lastChange = 0; | |
self.on('destroyed', function () { | |
emitter.removeAllListeners(); | |
}); | |
opts.onChange = function (change) { | |
oldOnChange(change); | |
if (change.seq <= lastChange) { | |
return; | |
} | |
lastChange = change.seq; | |
emitter.emit('change', change); | |
if (change.deleted) { | |
emitter.emit('delete', change); | |
} else if (change.changes.length === 1 && | |
change.changes[0].rev.slice(0, 1) === '1-') { | |
emitter.emit('create', change); | |
} else { | |
emitter.emit('update', change); | |
} | |
}; | |
var promise = new Promise(function (fulfill, reject) { | |
opts.complete = function (err, res) { | |
if (err) { | |
reject(err); | |
} else { | |
fulfill(res); | |
} | |
}; | |
}); | |
promise.then(function (result) { | |
complete(null, result); | |
}, complete); | |
// this needs to be overwridden by caller, dont fire complete until | |
// the task is ready | |
promise.cancel = function () { | |
promise.isCancelled = true; | |
if (self.taskqueue.isReady) { | |
opts.complete(null, {status: 'cancelled'}); | |
} | |
}; | |
if (!self.taskqueue.isReady) { | |
self.taskqueue.addTask(function () { | |
if (promise.isCancelled) { | |
opts.complete(null, {status: 'cancelled'}); | |
} else { | |
fun(self, opts, promise); | |
} | |
}); | |
} else { | |
fun(self, opts, promise); | |
} | |
promise.on = emitter.on.bind(emitter); | |
promise.once = emitter.once.bind(emitter); | |
promise.addListener = emitter.addListener.bind(emitter); | |
promise.removeListener = emitter.removeListener.bind(emitter); | |
promise.removeAllListeners = emitter.removeAllListeners.bind(emitter); | |
promise.setMaxListeners = emitter.setMaxListeners.bind(emitter); | |
promise.listeners = emitter.listeners.bind(emitter); | |
promise.emit = emitter.emit.bind(emitter); | |
return promise; | |
}; | |
exports.MD5 = exports.toPromise(require('./deps/md5')); | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"./deps/ajax":2,"./deps/blob":3,"./deps/buffer":17,"./deps/collections":4,"./deps/errors":5,"./deps/md5":6,"./deps/uuid":7,"./merge":8,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"argsarray":16,"bluebird":75,"events":18,"inherits":33,"pouchdb-extend":96}],16:[function(require,module,exports){ | |
'use strict'; | |
module.exports = argsArray; | |
function argsArray(fun) { | |
return function () { | |
var len = arguments.length; | |
if (len) { | |
var args = []; | |
var i = -1; | |
while (++i < len) { | |
args[i] = arguments[i]; | |
} | |
return fun.call(this, args); | |
} else { | |
return fun.call(this, []); | |
} | |
}; | |
} | |
},{}],17:[function(require,module,exports){ | |
},{}],18:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
function EventEmitter() { | |
this._events = this._events || {}; | |
this._maxListeners = this._maxListeners || undefined; | |
} | |
module.exports = EventEmitter; | |
// Backwards-compat with node 0.10.x | |
EventEmitter.EventEmitter = EventEmitter; | |
EventEmitter.prototype._events = undefined; | |
EventEmitter.prototype._maxListeners = undefined; | |
// By default EventEmitters will print a warning if more than 10 listeners are | |
// added to it. This is a useful default which helps finding memory leaks. | |
EventEmitter.defaultMaxListeners = 10; | |
// Obviously not all Emitters should be limited to 10. This function allows | |
// that to be increased. Set to zero for unlimited. | |
EventEmitter.prototype.setMaxListeners = function(n) { | |
if (!isNumber(n) || n < 0 || isNaN(n)) | |
throw TypeError('n must be a positive number'); | |
this._maxListeners = n; | |
return this; | |
}; | |
EventEmitter.prototype.emit = function(type) { | |
var er, handler, len, args, i, listeners; | |
if (!this._events) | |
this._events = {}; | |
// If there is no 'error' event listener then throw. | |
if (type === 'error') { | |
if (!this._events.error || | |
(isObject(this._events.error) && !this._events.error.length)) { | |
er = arguments[1]; | |
if (er instanceof Error) { | |
throw er; // Unhandled 'error' event | |
} | |
throw TypeError('Uncaught, unspecified "error" event.'); | |
} | |
} | |
handler = this._events[type]; | |
if (isUndefined(handler)) | |
return false; | |
if (isFunction(handler)) { | |
switch (arguments.length) { | |
// fast cases | |
case 1: | |
handler.call(this); | |
break; | |
case 2: | |
handler.call(this, arguments[1]); | |
break; | |
case 3: | |
handler.call(this, arguments[1], arguments[2]); | |
break; | |
// slower | |
default: | |
len = arguments.length; | |
args = new Array(len - 1); | |
for (i = 1; i < len; i++) | |
args[i - 1] = arguments[i]; | |
handler.apply(this, args); | |
} | |
} else if (isObject(handler)) { | |
len = arguments.length; | |
args = new Array(len - 1); | |
for (i = 1; i < len; i++) | |
args[i - 1] = arguments[i]; | |
listeners = handler.slice(); | |
len = listeners.length; | |
for (i = 0; i < len; i++) | |
listeners[i].apply(this, args); | |
} | |
return true; | |
}; | |
EventEmitter.prototype.addListener = function(type, listener) { | |
var m; | |
if (!isFunction(listener)) | |
throw TypeError('listener must be a function'); | |
if (!this._events) | |
this._events = {}; | |
// To avoid recursion in the case that type === "newListener"! Before | |
// adding it to the listeners, first emit "newListener". | |
if (this._events.newListener) | |
this.emit('newListener', type, | |
isFunction(listener.listener) ? | |
listener.listener : listener); | |
if (!this._events[type]) | |
// Optimize the case of one listener. Don't need the extra array object. | |
this._events[type] = listener; | |
else if (isObject(this._events[type])) | |
// If we've already got an array, just append. | |
this._events[type].push(listener); | |
else | |
// Adding the second element, need to change to array. | |
this._events[type] = [this._events[type], listener]; | |
// Check for listener leak | |
if (isObject(this._events[type]) && !this._events[type].warned) { | |
var m; | |
if (!isUndefined(this._maxListeners)) { | |
m = this._maxListeners; | |
} else { | |
m = EventEmitter.defaultMaxListeners; | |
} | |
if (m && m > 0 && this._events[type].length > m) { | |
this._events[type].warned = true; | |
console.error('(node) warning: possible EventEmitter memory ' + | |
'leak detected. %d listeners added. ' + | |
'Use emitter.setMaxListeners() to increase limit.', | |
this._events[type].length); | |
if (typeof console.trace === 'function') { | |
// not supported in IE 10 | |
console.trace(); | |
} | |
} | |
} | |
return this; | |
}; | |
EventEmitter.prototype.on = EventEmitter.prototype.addListener; | |
EventEmitter.prototype.once = function(type, listener) { | |
if (!isFunction(listener)) | |
throw TypeError('listener must be a function'); | |
var fired = false; | |
function g() { | |
this.removeListener(type, g); | |
if (!fired) { | |
fired = true; | |
listener.apply(this, arguments); | |
} | |
} | |
g.listener = listener; | |
this.on(type, g); | |
return this; | |
}; | |
// emits a 'removeListener' event iff the listener was removed | |
EventEmitter.prototype.removeListener = function(type, listener) { | |
var list, position, length, i; | |
if (!isFunction(listener)) | |
throw TypeError('listener must be a function'); | |
if (!this._events || !this._events[type]) | |
return this; | |
list = this._events[type]; | |
length = list.length; | |
position = -1; | |
if (list === listener || | |
(isFunction(list.listener) && list.listener === listener)) { | |
delete this._events[type]; | |
if (this._events.removeListener) | |
this.emit('removeListener', type, listener); | |
} else if (isObject(list)) { | |
for (i = length; i-- > 0;) { | |
if (list[i] === listener || | |
(list[i].listener && list[i].listener === listener)) { | |
position = i; | |
break; | |
} | |
} | |
if (position < 0) | |
return this; | |
if (list.length === 1) { | |
list.length = 0; | |
delete this._events[type]; | |
} else { | |
list.splice(position, 1); | |
} | |
if (this._events.removeListener) | |
this.emit('removeListener', type, listener); | |
} | |
return this; | |
}; | |
EventEmitter.prototype.removeAllListeners = function(type) { | |
var key, listeners; | |
if (!this._events) | |
return this; | |
// not listening for removeListener, no need to emit | |
if (!this._events.removeListener) { | |
if (arguments.length === 0) | |
this._events = {}; | |
else if (this._events[type]) | |
delete this._events[type]; | |
return this; | |
} | |
// emit removeListener for all listeners on all events | |
if (arguments.length === 0) { | |
for (key in this._events) { | |
if (key === 'removeListener') continue; | |
this.removeAllListeners(key); | |
} | |
this.removeAllListeners('removeListener'); | |
this._events = {}; | |
return this; | |
} | |
listeners = this._events[type]; | |
if (isFunction(listeners)) { | |
this.removeListener(type, listeners); | |
} else { | |
// LIFO order | |
while (listeners.length) | |
this.removeListener(type, listeners[listeners.length - 1]); | |
} | |
delete this._events[type]; | |
return this; | |
}; | |
EventEmitter.prototype.listeners = function(type) { | |
var ret; | |
if (!this._events || !this._events[type]) | |
ret = []; | |
else if (isFunction(this._events[type])) | |
ret = [this._events[type]]; | |
else | |
ret = this._events[type].slice(); | |
return ret; | |
}; | |
EventEmitter.listenerCount = function(emitter, type) { | |
var ret; | |
if (!emitter._events || !emitter._events[type]) | |
ret = 0; | |
else if (isFunction(emitter._events[type])) | |
ret = 1; | |
else | |
ret = emitter._events[type].length; | |
return ret; | |
}; | |
function isFunction(arg) { | |
return typeof arg === 'function'; | |
} | |
function isNumber(arg) { | |
return typeof arg === 'number'; | |
} | |
function isObject(arg) { | |
return typeof arg === 'object' && arg !== null; | |
} | |
function isUndefined(arg) { | |
return arg === void 0; | |
} | |
},{}],19:[function(require,module,exports){ | |
// shim for using process in browser | |
var process = module.exports = {}; | |
process.nextTick = (function () { | |
var canSetImmediate = typeof window !== 'undefined' | |
&& window.setImmediate; | |
var canPost = typeof window !== 'undefined' | |
&& window.postMessage && window.addEventListener | |
; | |
if (canSetImmediate) { | |
return function (f) { return window.setImmediate(f) }; | |
} | |
if (canPost) { | |
var queue = []; | |
window.addEventListener('message', function (ev) { | |
var source = ev.source; | |
if ((source === window || source === null) && ev.data === 'process-tick') { | |
ev.stopPropagation(); | |
if (queue.length > 0) { | |
var fn = queue.shift(); | |
fn(); | |
} | |
} | |
}, true); | |
return function nextTick(fn) { | |
queue.push(fn); | |
window.postMessage('process-tick', '*'); | |
}; | |
} | |
return function nextTick(fn) { | |
setTimeout(fn, 0); | |
}; | |
})(); | |
process.title = 'browser'; | |
process.browser = true; | |
process.env = {}; | |
process.argv = []; | |
process.binding = function (name) { | |
throw new Error('process.binding is not supported'); | |
} | |
// TODO(shtylman) | |
process.cwd = function () { return '/' }; | |
process.chdir = function (dir) { | |
throw new Error('process.chdir is not supported'); | |
}; | |
},{}],20:[function(require,module,exports){ | |
var base64 = require('base64-js') | |
var ieee754 = require('ieee754') | |
exports.Buffer = Buffer | |
exports.SlowBuffer = Buffer | |
exports.INSPECT_MAX_BYTES = 50 | |
Buffer.poolSize = 8192 | |
/** | |
* If `Buffer._useTypedArrays`: | |
* === true Use Uint8Array implementation (fastest) | |
* === false Use Object implementation (compatible down to IE6) | |
*/ | |
Buffer._useTypedArrays = (function () { | |
// Detect if browser supports Typed Arrays. Supported browsers are IE 10+, | |
// Firefox 4+, Chrome 7+, Safari 5.1+, Opera 11.6+, iOS 4.2+. | |
if (typeof Uint8Array === 'undefined' || typeof ArrayBuffer === 'undefined') | |
return false | |
// Does the browser support adding properties to `Uint8Array` instances? If | |
// not, then that's the same as no `Uint8Array` support. We need to be able to | |
// add all the node Buffer API methods. | |
// Relevant Firefox bug: https://bugzilla.mozilla.org/show_bug.cgi?id=695438 | |
try { | |
var arr = new Uint8Array(0) | |
arr.foo = function () { return 42 } | |
return 42 === arr.foo() && | |
typeof arr.subarray === 'function' // Chrome 9-10 lack `subarray` | |
} catch (e) { | |
return false | |
} | |
})() | |
/** | |
* Class: Buffer | |
* ============= | |
* | |
* The Buffer constructor returns instances of `Uint8Array` that are augmented | |
* with function properties for all the node `Buffer` API functions. We use | |
* `Uint8Array` so that square bracket notation works as expected -- it returns | |
* a single octet. | |
* | |
* By augmenting the instances, we can avoid modifying the `Uint8Array` | |
* prototype. | |
*/ | |
function Buffer (subject, encoding, noZero) { | |
if (!(this instanceof Buffer)) | |
return new Buffer(subject, encoding, noZero) | |
var type = typeof subject | |
// Workaround: node's base64 implementation allows for non-padded strings | |
// while base64-js does not. | |
if (encoding === 'base64' && type === 'string') { | |
subject = stringtrim(subject) | |
while (subject.length % 4 !== 0) { | |
subject = subject + '=' | |
} | |
} | |
// Find the length | |
var length | |
if (type === 'number') | |
length = coerce(subject) | |
else if (type === 'string') | |
length = Buffer.byteLength(subject, encoding) | |
else if (type === 'object') | |
length = coerce(subject.length) // Assume object is an array | |
else | |
throw new Error('First argument needs to be a number, array or string.') | |
var buf | |
if (Buffer._useTypedArrays) { | |
// Preferred: Return an augmented `Uint8Array` instance for best performance | |
buf = augment(new Uint8Array(length)) | |
} else { | |
// Fallback: Return THIS instance of Buffer (created by `new`) | |
buf = this | |
buf.length = length | |
buf._isBuffer = true | |
} | |
var i | |
if (Buffer._useTypedArrays && typeof Uint8Array === 'function' && | |
subject instanceof Uint8Array) { | |
// Speed optimization -- use set if we're copying from a Uint8Array | |
buf._set(subject) | |
} else if (isArrayish(subject)) { | |
// Treat array-ish objects as a byte array | |
for (i = 0; i < length; i++) { | |
if (Buffer.isBuffer(subject)) | |
buf[i] = subject.readUInt8(i) | |
else | |
buf[i] = subject[i] | |
} | |
} else if (type === 'string') { | |
buf.write(subject, 0, encoding) | |
} else if (type === 'number' && !Buffer._useTypedArrays && !noZero) { | |
for (i = 0; i < length; i++) { | |
buf[i] = 0 | |
} | |
} | |
return buf | |
} | |
// STATIC METHODS | |
// ============== | |
Buffer.isEncoding = function (encoding) { | |
switch (String(encoding).toLowerCase()) { | |
case 'hex': | |
case 'utf8': | |
case 'utf-8': | |
case 'ascii': | |
case 'binary': | |
case 'base64': | |
case 'raw': | |
case 'ucs2': | |
case 'ucs-2': | |
case 'utf16le': | |
case 'utf-16le': | |
return true | |
default: | |
return false | |
} | |
} | |
Buffer.isBuffer = function (b) { | |
return !!(b !== null && b !== undefined && b._isBuffer) | |
} | |
Buffer.byteLength = function (str, encoding) { | |
var ret | |
str = str + '' | |
switch (encoding || 'utf8') { | |
case 'hex': | |
ret = str.length / 2 | |
break | |
case 'utf8': | |
case 'utf-8': | |
ret = utf8ToBytes(str).length | |
break | |
case 'ascii': | |
case 'binary': | |
case 'raw': | |
ret = str.length | |
break | |
case 'base64': | |
ret = base64ToBytes(str).length | |
break | |
case 'ucs2': | |
case 'ucs-2': | |
case 'utf16le': | |
case 'utf-16le': | |
ret = str.length * 2 | |
break | |
default: | |
throw new Error('Unknown encoding') | |
} | |
return ret | |
} | |
Buffer.concat = function (list, totalLength) { | |
assert(isArray(list), 'Usage: Buffer.concat(list, [totalLength])\n' + | |
'list should be an Array.') | |
if (list.length === 0) { | |
return new Buffer(0) | |
} else if (list.length === 1) { | |
return list[0] | |
} | |
var i | |
if (typeof totalLength !== 'number') { | |
totalLength = 0 | |
for (i = 0; i < list.length; i++) { | |
totalLength += list[i].length | |
} | |
} | |
var buf = new Buffer(totalLength) | |
var pos = 0 | |
for (i = 0; i < list.length; i++) { | |
var item = list[i] | |
item.copy(buf, pos) | |
pos += item.length | |
} | |
return buf | |
} | |
// BUFFER INSTANCE METHODS | |
// ======================= | |
function _hexWrite (buf, string, offset, length) { | |
offset = Number(offset) || 0 | |
var remaining = buf.length - offset | |
if (!length) { | |
length = remaining | |
} else { | |
length = Number(length) | |
if (length > remaining) { | |
length = remaining | |
} | |
} | |
// must be an even number of digits | |
var strLen = string.length | |
assert(strLen % 2 === 0, 'Invalid hex string') | |
if (length > strLen / 2) { | |
length = strLen / 2 | |
} | |
for (var i = 0; i < length; i++) { | |
var byte = parseInt(string.substr(i * 2, 2), 16) | |
assert(!isNaN(byte), 'Invalid hex string') | |
buf[offset + i] = byte | |
} | |
Buffer._charsWritten = i * 2 | |
return i | |
} | |
function _utf8Write (buf, string, offset, length) { | |
var charsWritten = Buffer._charsWritten = | |
blitBuffer(utf8ToBytes(string), buf, offset, length) | |
return charsWritten | |
} | |
function _asciiWrite (buf, string, offset, length) { | |
var charsWritten = Buffer._charsWritten = | |
blitBuffer(asciiToBytes(string), buf, offset, length) | |
return charsWritten | |
} | |
function _binaryWrite (buf, string, offset, length) { | |
return _asciiWrite(buf, string, offset, length) | |
} | |
function _base64Write (buf, string, offset, length) { | |
var charsWritten = Buffer._charsWritten = | |
blitBuffer(base64ToBytes(string), buf, offset, length) | |
return charsWritten | |
} | |
Buffer.prototype.write = function (string, offset, length, encoding) { | |
// Support both (string, offset, length, encoding) | |
// and the legacy (string, encoding, offset, length) | |
if (isFinite(offset)) { | |
if (!isFinite(length)) { | |
encoding = length | |
length = undefined | |
} | |
} else { // legacy | |
var swap = encoding | |
encoding = offset | |
offset = length | |
length = swap | |
} | |
offset = Number(offset) || 0 | |
var remaining = this.length - offset | |
if (!length) { | |
length = remaining | |
} else { | |
length = Number(length) | |
if (length > remaining) { | |
length = remaining | |
} | |
} | |
encoding = String(encoding || 'utf8').toLowerCase() | |
switch (encoding) { | |
case 'hex': | |
return _hexWrite(this, string, offset, length) | |
case 'utf8': | |
case 'utf-8': | |
case 'ucs2': // TODO: No support for ucs2 or utf16le encodings yet | |
case 'ucs-2': | |
case 'utf16le': | |
case 'utf-16le': | |
return _utf8Write(this, string, offset, length) | |
case 'ascii': | |
return _asciiWrite(this, string, offset, length) | |
case 'binary': | |
return _binaryWrite(this, string, offset, length) | |
case 'base64': | |
return _base64Write(this, string, offset, length) | |
default: | |
throw new Error('Unknown encoding') | |
} | |
} | |
Buffer.prototype.toString = function (encoding, start, end) { | |
var self = this | |
encoding = String(encoding || 'utf8').toLowerCase() | |
start = Number(start) || 0 | |
end = (end !== undefined) | |
? Number(end) | |
: end = self.length | |
// Fastpath empty strings | |
if (end === start) | |
return '' | |
switch (encoding) { | |
case 'hex': | |
return _hexSlice(self, start, end) | |
case 'utf8': | |
case 'utf-8': | |
case 'ucs2': // TODO: No support for ucs2 or utf16le encodings yet | |
case 'ucs-2': | |
case 'utf16le': | |
case 'utf-16le': | |
return _utf8Slice(self, start, end) | |
case 'ascii': | |
return _asciiSlice(self, start, end) | |
case 'binary': | |
return _binarySlice(self, start, end) | |
case 'base64': | |
return _base64Slice(self, start, end) | |
default: | |
throw new Error('Unknown encoding') | |
} | |
} | |
Buffer.prototype.toJSON = function () { | |
return { | |
type: 'Buffer', | |
data: Array.prototype.slice.call(this._arr || this, 0) | |
} | |
} | |
// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) | |
Buffer.prototype.copy = function (target, target_start, start, end) { | |
var source = this | |
if (!start) start = 0 | |
if (!end && end !== 0) end = this.length | |
if (!target_start) target_start = 0 | |
// Copy 0 bytes; we're done | |
if (end === start) return | |
if (target.length === 0 || source.length === 0) return | |
// Fatal error conditions | |
assert(end >= start, 'sourceEnd < sourceStart') | |
assert(target_start >= 0 && target_start < target.length, | |
'targetStart out of bounds') | |
assert(start >= 0 && start < source.length, 'sourceStart out of bounds') | |
assert(end >= 0 && end <= source.length, 'sourceEnd out of bounds') | |
// Are we oob? | |
if (end > this.length) | |
end = this.length | |
if (target.length - target_start < end - start) | |
end = target.length - target_start + start | |
// copy! | |
for (var i = 0; i < end - start; i++) | |
target[i + target_start] = this[i + start] | |
} | |
function _base64Slice (buf, start, end) { | |
if (start === 0 && end === buf.length) { | |
return base64.fromByteArray(buf) | |
} else { | |
return base64.fromByteArray(buf.slice(start, end)) | |
} | |
} | |
function _utf8Slice (buf, start, end) { | |
var res = '' | |
var tmp = '' | |
end = Math.min(buf.length, end) | |
for (var i = start; i < end; i++) { | |
if (buf[i] <= 0x7F) { | |
res += decodeUtf8Char(tmp) + String.fromCharCode(buf[i]) | |
tmp = '' | |
} else { | |
tmp += '%' + buf[i].toString(16) | |
} | |
} | |
return res + decodeUtf8Char(tmp) | |
} | |
function _asciiSlice (buf, start, end) { | |
var ret = '' | |
end = Math.min(buf.length, end) | |
for (var i = start; i < end; i++) | |
ret += String.fromCharCode(buf[i]) | |
return ret | |
} | |
function _binarySlice (buf, start, end) { | |
return _asciiSlice(buf, start, end) | |
} | |
function _hexSlice (buf, start, end) { | |
var len = buf.length | |
if (!start || start < 0) start = 0 | |
if (!end || end < 0 || end > len) end = len | |
var out = '' | |
for (var i = start; i < end; i++) { | |
out += toHex(buf[i]) | |
} | |
return out | |
} | |
// http://nodejs.org/api/buffer.html#buffer_buf_slice_start_end | |
Buffer.prototype.slice = function (start, end) { | |
var len = this.length | |
start = clamp(start, len, 0) | |
end = clamp(end, len, len) | |
if (Buffer._useTypedArrays) { | |
return augment(this.subarray(start, end)) | |
} else { | |
var sliceLen = end - start | |
var newBuf = new Buffer(sliceLen, undefined, true) | |
for (var i = 0; i < sliceLen; i++) { | |
newBuf[i] = this[i + start] | |
} | |
return newBuf | |
} | |
} | |
// `get` will be removed in Node 0.13+ | |
Buffer.prototype.get = function (offset) { | |
console.log('.get() is deprecated. Access using array indexes instead.') | |
return this.readUInt8(offset) | |
} | |
// `set` will be removed in Node 0.13+ | |
Buffer.prototype.set = function (v, offset) { | |
console.log('.set() is deprecated. Access using array indexes instead.') | |
return this.writeUInt8(v, offset) | |
} | |
Buffer.prototype.readUInt8 = function (offset, noAssert) { | |
if (!noAssert) { | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset < this.length, 'Trying to read beyond buffer length') | |
} | |
if (offset >= this.length) | |
return | |
return this[offset] | |
} | |
function _readUInt16 (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 1 < buf.length, 'Trying to read beyond buffer length') | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
var val | |
if (littleEndian) { | |
val = buf[offset] | |
if (offset + 1 < len) | |
val |= buf[offset + 1] << 8 | |
} else { | |
val = buf[offset] << 8 | |
if (offset + 1 < len) | |
val |= buf[offset + 1] | |
} | |
return val | |
} | |
Buffer.prototype.readUInt16LE = function (offset, noAssert) { | |
return _readUInt16(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readUInt16BE = function (offset, noAssert) { | |
return _readUInt16(this, offset, false, noAssert) | |
} | |
function _readUInt32 (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'Trying to read beyond buffer length') | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
var val | |
if (littleEndian) { | |
if (offset + 2 < len) | |
val = buf[offset + 2] << 16 | |
if (offset + 1 < len) | |
val |= buf[offset + 1] << 8 | |
val |= buf[offset] | |
if (offset + 3 < len) | |
val = val + (buf[offset + 3] << 24 >>> 0) | |
} else { | |
if (offset + 1 < len) | |
val = buf[offset + 1] << 16 | |
if (offset + 2 < len) | |
val |= buf[offset + 2] << 8 | |
if (offset + 3 < len) | |
val |= buf[offset + 3] | |
val = val + (buf[offset] << 24 >>> 0) | |
} | |
return val | |
} | |
Buffer.prototype.readUInt32LE = function (offset, noAssert) { | |
return _readUInt32(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readUInt32BE = function (offset, noAssert) { | |
return _readUInt32(this, offset, false, noAssert) | |
} | |
Buffer.prototype.readInt8 = function (offset, noAssert) { | |
if (!noAssert) { | |
assert(offset !== undefined && offset !== null, | |
'missing offset') | |
assert(offset < this.length, 'Trying to read beyond buffer length') | |
} | |
if (offset >= this.length) | |
return | |
var neg = this[offset] & 0x80 | |
if (neg) | |
return (0xff - this[offset] + 1) * -1 | |
else | |
return this[offset] | |
} | |
function _readInt16 (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 1 < buf.length, 'Trying to read beyond buffer length') | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
var val = _readUInt16(buf, offset, littleEndian, true) | |
var neg = val & 0x8000 | |
if (neg) | |
return (0xffff - val + 1) * -1 | |
else | |
return val | |
} | |
Buffer.prototype.readInt16LE = function (offset, noAssert) { | |
return _readInt16(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readInt16BE = function (offset, noAssert) { | |
return _readInt16(this, offset, false, noAssert) | |
} | |
function _readInt32 (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'Trying to read beyond buffer length') | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
var val = _readUInt32(buf, offset, littleEndian, true) | |
var neg = val & 0x80000000 | |
if (neg) | |
return (0xffffffff - val + 1) * -1 | |
else | |
return val | |
} | |
Buffer.prototype.readInt32LE = function (offset, noAssert) { | |
return _readInt32(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readInt32BE = function (offset, noAssert) { | |
return _readInt32(this, offset, false, noAssert) | |
} | |
function _readFloat (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset + 3 < buf.length, 'Trying to read beyond buffer length') | |
} | |
return ieee754.read(buf, offset, littleEndian, 23, 4) | |
} | |
Buffer.prototype.readFloatLE = function (offset, noAssert) { | |
return _readFloat(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readFloatBE = function (offset, noAssert) { | |
return _readFloat(this, offset, false, noAssert) | |
} | |
function _readDouble (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset + 7 < buf.length, 'Trying to read beyond buffer length') | |
} | |
return ieee754.read(buf, offset, littleEndian, 52, 8) | |
} | |
Buffer.prototype.readDoubleLE = function (offset, noAssert) { | |
return _readDouble(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readDoubleBE = function (offset, noAssert) { | |
return _readDouble(this, offset, false, noAssert) | |
} | |
Buffer.prototype.writeUInt8 = function (value, offset, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset < this.length, 'trying to write beyond buffer length') | |
verifuint(value, 0xff) | |
} | |
if (offset >= this.length) return | |
this[offset] = value | |
} | |
function _writeUInt16 (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 1 < buf.length, 'trying to write beyond buffer length') | |
verifuint(value, 0xffff) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
for (var i = 0, j = Math.min(len - offset, 2); i < j; i++) { | |
buf[offset + i] = | |
(value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>> | |
(littleEndian ? i : 1 - i) * 8 | |
} | |
} | |
Buffer.prototype.writeUInt16LE = function (value, offset, noAssert) { | |
_writeUInt16(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeUInt16BE = function (value, offset, noAssert) { | |
_writeUInt16(this, value, offset, false, noAssert) | |
} | |
function _writeUInt32 (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'trying to write beyond buffer length') | |
verifuint(value, 0xffffffff) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
for (var i = 0, j = Math.min(len - offset, 4); i < j; i++) { | |
buf[offset + i] = | |
(value >>> (littleEndian ? i : 3 - i) * 8) & 0xff | |
} | |
} | |
Buffer.prototype.writeUInt32LE = function (value, offset, noAssert) { | |
_writeUInt32(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeUInt32BE = function (value, offset, noAssert) { | |
_writeUInt32(this, value, offset, false, noAssert) | |
} | |
Buffer.prototype.writeInt8 = function (value, offset, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset < this.length, 'Trying to write beyond buffer length') | |
verifsint(value, 0x7f, -0x80) | |
} | |
if (offset >= this.length) | |
return | |
if (value >= 0) | |
this.writeUInt8(value, offset, noAssert) | |
else | |
this.writeUInt8(0xff + value + 1, offset, noAssert) | |
} | |
function _writeInt16 (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 1 < buf.length, 'Trying to write beyond buffer length') | |
verifsint(value, 0x7fff, -0x8000) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
if (value >= 0) | |
_writeUInt16(buf, value, offset, littleEndian, noAssert) | |
else | |
_writeUInt16(buf, 0xffff + value + 1, offset, littleEndian, noAssert) | |
} | |
Buffer.prototype.writeInt16LE = function (value, offset, noAssert) { | |
_writeInt16(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeInt16BE = function (value, offset, noAssert) { | |
_writeInt16(this, value, offset, false, noAssert) | |
} | |
function _writeInt32 (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'Trying to write beyond buffer length') | |
verifsint(value, 0x7fffffff, -0x80000000) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
if (value >= 0) | |
_writeUInt32(buf, value, offset, littleEndian, noAssert) | |
else | |
_writeUInt32(buf, 0xffffffff + value + 1, offset, littleEndian, noAssert) | |
} | |
Buffer.prototype.writeInt32LE = function (value, offset, noAssert) { | |
_writeInt32(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeInt32BE = function (value, offset, noAssert) { | |
_writeInt32(this, value, offset, false, noAssert) | |
} | |
function _writeFloat (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'Trying to write beyond buffer length') | |
verifIEEE754(value, 3.4028234663852886e+38, -3.4028234663852886e+38) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
ieee754.write(buf, value, offset, littleEndian, 23, 4) | |
} | |
Buffer.prototype.writeFloatLE = function (value, offset, noAssert) { | |
_writeFloat(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeFloatBE = function (value, offset, noAssert) { | |
_writeFloat(this, value, offset, false, noAssert) | |
} | |
function _writeDouble (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 7 < buf.length, | |
'Trying to write beyond buffer length') | |
verifIEEE754(value, 1.7976931348623157E+308, -1.7976931348623157E+308) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
ieee754.write(buf, value, offset, littleEndian, 52, 8) | |
} | |
Buffer.prototype.writeDoubleLE = function (value, offset, noAssert) { | |
_writeDouble(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeDoubleBE = function (value, offset, noAssert) { | |
_writeDouble(this, value, offset, false, noAssert) | |
} | |
// fill(value, start=0, end=buffer.length) | |
Buffer.prototype.fill = function (value, start, end) { | |
if (!value) value = 0 | |
if (!start) start = 0 | |
if (!end) end = this.length | |
if (typeof value === 'string') { | |
value = value.charCodeAt(0) | |
} | |
assert(typeof value === 'number' && !isNaN(value), 'value is not a number') | |
assert(end >= start, 'end < start') | |
// Fill 0 bytes; we're done | |
if (end === start) return | |
if (this.length === 0) return | |
assert(start >= 0 && start < this.length, 'start out of bounds') | |
assert(end >= 0 && end <= this.length, 'end out of bounds') | |
for (var i = start; i < end; i++) { | |
this[i] = value | |
} | |
} | |
Buffer.prototype.inspect = function () { | |
var out = [] | |
var len = this.length | |
for (var i = 0; i < len; i++) { | |
out[i] = toHex(this[i]) | |
if (i === exports.INSPECT_MAX_BYTES) { | |
out[i + 1] = '...' | |
break | |
} | |
} | |
return '<Buffer ' + out.join(' ') + '>' | |
} | |
/** | |
* Creates a new `ArrayBuffer` with the *copied* memory of the buffer instance. | |
* Added in Node 0.12. Only available in browsers that support ArrayBuffer. | |
*/ | |
Buffer.prototype.toArrayBuffer = function () { | |
if (typeof Uint8Array === 'function') { | |
if (Buffer._useTypedArrays) { | |
return (new Buffer(this)).buffer | |
} else { | |
var buf = new Uint8Array(this.length) | |
for (var i = 0, len = buf.length; i < len; i += 1) | |
buf[i] = this[i] | |
return buf.buffer | |
} | |
} else { | |
throw new Error('Buffer.toArrayBuffer not supported in this browser') | |
} | |
} | |
// HELPER FUNCTIONS | |
// ================ | |
function stringtrim (str) { | |
if (str.trim) return str.trim() | |
return str.replace(/^\s+|\s+$/g, '') | |
} | |
var BP = Buffer.prototype | |
/** | |
* Augment the Uint8Array *instance* (not the class!) with Buffer methods | |
*/ | |
function augment (arr) { | |
arr._isBuffer = true | |
// save reference to original Uint8Array get/set methods before overwriting | |
arr._get = arr.get | |
arr._set = arr.set | |
// deprecated, will be removed in node 0.13+ | |
arr.get = BP.get | |
arr.set = BP.set | |
arr.write = BP.write | |
arr.toString = BP.toString | |
arr.toLocaleString = BP.toString | |
arr.toJSON = BP.toJSON | |
arr.copy = BP.copy | |
arr.slice = BP.slice | |
arr.readUInt8 = BP.readUInt8 | |
arr.readUInt16LE = BP.readUInt16LE | |
arr.readUInt16BE = BP.readUInt16BE | |
arr.readUInt32LE = BP.readUInt32LE | |
arr.readUInt32BE = BP.readUInt32BE | |
arr.readInt8 = BP.readInt8 | |
arr.readInt16LE = BP.readInt16LE | |
arr.readInt16BE = BP.readInt16BE | |
arr.readInt32LE = BP.readInt32LE | |
arr.readInt32BE = BP.readInt32BE | |
arr.readFloatLE = BP.readFloatLE | |
arr.readFloatBE = BP.readFloatBE | |
arr.readDoubleLE = BP.readDoubleLE | |
arr.readDoubleBE = BP.readDoubleBE | |
arr.writeUInt8 = BP.writeUInt8 | |
arr.writeUInt16LE = BP.writeUInt16LE | |
arr.writeUInt16BE = BP.writeUInt16BE | |
arr.writeUInt32LE = BP.writeUInt32LE | |
arr.writeUInt32BE = BP.writeUInt32BE | |
arr.writeInt8 = BP.writeInt8 | |
arr.writeInt16LE = BP.writeInt16LE | |
arr.writeInt16BE = BP.writeInt16BE | |
arr.writeInt32LE = BP.writeInt32LE | |
arr.writeInt32BE = BP.writeInt32BE | |
arr.writeFloatLE = BP.writeFloatLE | |
arr.writeFloatBE = BP.writeFloatBE | |
arr.writeDoubleLE = BP.writeDoubleLE | |
arr.writeDoubleBE = BP.writeDoubleBE | |
arr.fill = BP.fill | |
arr.inspect = BP.inspect | |
arr.toArrayBuffer = BP.toArrayBuffer | |
return arr | |
} | |
// slice(start, end) | |
function clamp (index, len, defaultValue) { | |
if (typeof index !== 'number') return defaultValue | |
index = ~~index; // Coerce to integer. | |
if (index >= len) return len | |
if (index >= 0) return index | |
index += len | |
if (index >= 0) return index | |
return 0 | |
} | |
function coerce (length) { | |
// Coerce length to a number (possibly NaN), round up | |
// in case it's fractional (e.g. 123.456) then do a | |
// double negate to coerce a NaN to 0. Easy, right? | |
length = ~~Math.ceil(+length) | |
return length < 0 ? 0 : length | |
} | |
function isArray (subject) { | |
return (Array.isArray || function (subject) { | |
return Object.prototype.toString.call(subject) === '[object Array]' | |
})(subject) | |
} | |
function isArrayish (subject) { | |
return isArray(subject) || Buffer.isBuffer(subject) || | |
subject && typeof subject === 'object' && | |
typeof subject.length === 'number' | |
} | |
function toHex (n) { | |
if (n < 16) return '0' + n.toString(16) | |
return n.toString(16) | |
} | |
function utf8ToBytes (str) { | |
var byteArray = [] | |
for (var i = 0; i < str.length; i++) { | |
var b = str.charCodeAt(i) | |
if (b <= 0x7F) | |
byteArray.push(str.charCodeAt(i)) | |
else { | |
var start = i | |
if (b >= 0xD800 && b <= 0xDFFF) i++ | |
var h = encodeURIComponent(str.slice(start, i+1)).substr(1).split('%') | |
for (var j = 0; j < h.length; j++) | |
byteArray.push(parseInt(h[j], 16)) | |
} | |
} | |
return byteArray | |
} | |
function asciiToBytes (str) { | |
var byteArray = [] | |
for (var i = 0; i < str.length; i++) { | |
// Node's code seems to be doing this and not & 0x7F.. | |
byteArray.push(str.charCodeAt(i) & 0xFF) | |
} | |
return byteArray | |
} | |
function base64ToBytes (str) { | |
return base64.toByteArray(str) | |
} | |
function blitBuffer (src, dst, offset, length) { | |
var pos | |
for (var i = 0; i < length; i++) { | |
if ((i + offset >= dst.length) || (i >= src.length)) | |
break | |
dst[i + offset] = src[i] | |
} | |
return i | |
} | |
function decodeUtf8Char (str) { | |
try { | |
return decodeURIComponent(str) | |
} catch (err) { | |
return String.fromCharCode(0xFFFD) // UTF 8 invalid char | |
} | |
} | |
/* | |
* We have to make sure that the value is a valid integer. This means that it | |
* is non-negative. It has no fractional component and that it does not | |
* exceed the maximum allowed value. | |
*/ | |
function verifuint (value, max) { | |
assert(typeof value == 'number', 'cannot write a non-number as a number') | |
assert(value >= 0, | |
'specified a negative value for writing an unsigned value') | |
assert(value <= max, 'value is larger than maximum value for type') | |
assert(Math.floor(value) === value, 'value has a fractional component') | |
} | |
function verifsint(value, max, min) { | |
assert(typeof value == 'number', 'cannot write a non-number as a number') | |
assert(value <= max, 'value larger than maximum allowed value') | |
assert(value >= min, 'value smaller than minimum allowed value') | |
assert(Math.floor(value) === value, 'value has a fractional component') | |
} | |
function verifIEEE754(value, max, min) { | |
assert(typeof value == 'number', 'cannot write a non-number as a number') | |
assert(value <= max, 'value larger than maximum allowed value') | |
assert(value >= min, 'value smaller than minimum allowed value') | |
} | |
function assert (test, message) { | |
if (!test) throw new Error(message || 'Failed assertion') | |
} | |
},{"base64-js":21,"ieee754":22}],21:[function(require,module,exports){ | |
var lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; | |
;(function (exports) { | |
'use strict'; | |
var Arr = (typeof Uint8Array !== 'undefined') | |
? Uint8Array | |
: Array | |
var PLUS = '+'.charCodeAt(0) | |
var SLASH = '/'.charCodeAt(0) | |
var NUMBER = '0'.charCodeAt(0) | |
var LOWER = 'a'.charCodeAt(0) | |
var UPPER = 'A'.charCodeAt(0) | |
function decode (elt) { | |
var code = elt.charCodeAt(0) | |
if (code === PLUS) | |
return 62 // '+' | |
if (code === SLASH) | |
return 63 // '/' | |
if (code < NUMBER) | |
return -1 //no match | |
if (code < NUMBER + 10) | |
return code - NUMBER + 26 + 26 | |
if (code < UPPER + 26) | |
return code - UPPER | |
if (code < LOWER + 26) | |
return code - LOWER + 26 | |
} | |
function b64ToByteArray (b64) { | |
var i, j, l, tmp, placeHolders, arr | |
if (b64.length % 4 > 0) { | |
throw new Error('Invalid string. Length must be a multiple of 4') | |
} | |
// the number of equal signs (place holders) | |
// if there are two placeholders, than the two characters before it | |
// represent one byte | |
// if there is only one, then the three characters before it represent 2 bytes | |
// this is just a cheap hack to not do indexOf twice | |
var len = b64.length | |
placeHolders = '=' === b64.charAt(len - 2) ? 2 : '=' === b64.charAt(len - 1) ? 1 : 0 | |
// base64 is 4/3 + up to two characters of the original data | |
arr = new Arr(b64.length * 3 / 4 - placeHolders) | |
// if there are placeholders, only get up to the last complete 4 chars | |
l = placeHolders > 0 ? b64.length - 4 : b64.length | |
var L = 0 | |
function push (v) { | |
arr[L++] = v | |
} | |
for (i = 0, j = 0; i < l; i += 4, j += 3) { | |
tmp = (decode(b64.charAt(i)) << 18) | (decode(b64.charAt(i + 1)) << 12) | (decode(b64.charAt(i + 2)) << 6) | decode(b64.charAt(i + 3)) | |
push((tmp & 0xFF0000) >> 16) | |
push((tmp & 0xFF00) >> 8) | |
push(tmp & 0xFF) | |
} | |
if (placeHolders === 2) { | |
tmp = (decode(b64.charAt(i)) << 2) | (decode(b64.charAt(i + 1)) >> 4) | |
push(tmp & 0xFF) | |
} else if (placeHolders === 1) { | |
tmp = (decode(b64.charAt(i)) << 10) | (decode(b64.charAt(i + 1)) << 4) | (decode(b64.charAt(i + 2)) >> 2) | |
push((tmp >> 8) & 0xFF) | |
push(tmp & 0xFF) | |
} | |
return arr | |
} | |
function uint8ToBase64 (uint8) { | |
var i, | |
extraBytes = uint8.length % 3, // if we have 1 byte left, pad 2 bytes | |
output = "", | |
temp, length | |
function encode (num) { | |
return lookup.charAt(num) | |
} | |
function tripletToBase64 (num) { | |
return encode(num >> 18 & 0x3F) + encode(num >> 12 & 0x3F) + encode(num >> 6 & 0x3F) + encode(num & 0x3F) | |
} | |
// go through the array every three bytes, we'll deal with trailing stuff later | |
for (i = 0, length = uint8.length - extraBytes; i < length; i += 3) { | |
temp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2]) | |
output += tripletToBase64(temp) | |
} | |
// pad the end with zeros, but make sure to not forget the extra bytes | |
switch (extraBytes) { | |
case 1: | |
temp = uint8[uint8.length - 1] | |
output += encode(temp >> 2) | |
output += encode((temp << 4) & 0x3F) | |
output += '==' | |
break | |
case 2: | |
temp = (uint8[uint8.length - 2] << 8) + (uint8[uint8.length - 1]) | |
output += encode(temp >> 10) | |
output += encode((temp >> 4) & 0x3F) | |
output += encode((temp << 2) & 0x3F) | |
output += '=' | |
break | |
} | |
return output | |
} | |
exports.toByteArray = b64ToByteArray | |
exports.fromByteArray = uint8ToBase64 | |
}(typeof exports === 'undefined' ? (this.base64js = {}) : exports)) | |
},{}],22:[function(require,module,exports){ | |
exports.read = function(buffer, offset, isLE, mLen, nBytes) { | |
var e, m, | |
eLen = nBytes * 8 - mLen - 1, | |
eMax = (1 << eLen) - 1, | |
eBias = eMax >> 1, | |
nBits = -7, | |
i = isLE ? (nBytes - 1) : 0, | |
d = isLE ? -1 : 1, | |
s = buffer[offset + i]; | |
i += d; | |
e = s & ((1 << (-nBits)) - 1); | |
s >>= (-nBits); | |
nBits += eLen; | |
for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8); | |
m = e & ((1 << (-nBits)) - 1); | |
e >>= (-nBits); | |
nBits += mLen; | |
for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8); | |
if (e === 0) { | |
e = 1 - eBias; | |
} else if (e === eMax) { | |
return m ? NaN : ((s ? -1 : 1) * Infinity); | |
} else { | |
m = m + Math.pow(2, mLen); | |
e = e - eBias; | |
} | |
return (s ? -1 : 1) * m * Math.pow(2, e - mLen); | |
}; | |
exports.write = function(buffer, value, offset, isLE, mLen, nBytes) { | |
var e, m, c, | |
eLen = nBytes * 8 - mLen - 1, | |
eMax = (1 << eLen) - 1, | |
eBias = eMax >> 1, | |
rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0), | |
i = isLE ? 0 : (nBytes - 1), | |
d = isLE ? 1 : -1, | |
s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0; | |
value = Math.abs(value); | |
if (isNaN(value) || value === Infinity) { | |
m = isNaN(value) ? 1 : 0; | |
e = eMax; | |
} else { | |
e = Math.floor(Math.log(value) / Math.LN2); | |
if (value * (c = Math.pow(2, -e)) < 1) { | |
e--; | |
c *= 2; | |
} | |
if (e + eBias >= 1) { | |
value += rt / c; | |
} else { | |
value += rt * Math.pow(2, 1 - eBias); | |
} | |
if (value * c >= 2) { | |
e++; | |
c /= 2; | |
} | |
if (e + eBias >= eMax) { | |
m = 0; | |
e = eMax; | |
} else if (e + eBias >= 1) { | |
m = (value * c - 1) * Math.pow(2, mLen); | |
e = e + eBias; | |
} else { | |
m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen); | |
e = 0; | |
} | |
} | |
for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8); | |
e = (e << mLen) | m; | |
eLen += mLen; | |
for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8); | |
buffer[offset + i - d] |= s * 128; | |
}; | |
},{}],23:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// a duplex stream is just a stream that is both readable and writable. | |
// Since JS doesn't have multiple prototypal inheritance, this class | |
// prototypally inherits from Readable, and then parasitically from | |
// Writable. | |
module.exports = Duplex; | |
var inherits = require('inherits'); | |
var setImmediate = require('process/browser.js').nextTick; | |
var Readable = require('./readable.js'); | |
var Writable = require('./writable.js'); | |
inherits(Duplex, Readable); | |
Duplex.prototype.write = Writable.prototype.write; | |
Duplex.prototype.end = Writable.prototype.end; | |
Duplex.prototype._write = Writable.prototype._write; | |
function Duplex(options) { | |
if (!(this instanceof Duplex)) | |
return new Duplex(options); | |
Readable.call(this, options); | |
Writable.call(this, options); | |
if (options && options.readable === false) | |
this.readable = false; | |
if (options && options.writable === false) | |
this.writable = false; | |
this.allowHalfOpen = true; | |
if (options && options.allowHalfOpen === false) | |
this.allowHalfOpen = false; | |
this.once('end', onend); | |
} | |
// the no-half-open enforcer | |
function onend() { | |
// if we allow half-open state, or if the writable side ended, | |
// then we're ok. | |
if (this.allowHalfOpen || this._writableState.ended) | |
return; | |
// no more data can be written. | |
// But allow more writes to happen in this tick. | |
var self = this; | |
setImmediate(function () { | |
self.end(); | |
}); | |
} | |
},{"./readable.js":27,"./writable.js":29,"inherits":33,"process/browser.js":25}],24:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
module.exports = Stream; | |
var EE = require('events').EventEmitter; | |
var inherits = require('inherits'); | |
inherits(Stream, EE); | |
Stream.Readable = require('./readable.js'); | |
Stream.Writable = require('./writable.js'); | |
Stream.Duplex = require('./duplex.js'); | |
Stream.Transform = require('./transform.js'); | |
Stream.PassThrough = require('./passthrough.js'); | |
// Backwards-compat with node 0.4.x | |
Stream.Stream = Stream; | |
// old-style streams. Note that the pipe method (the only relevant | |
// part of this class) is overridden in the Readable class. | |
function Stream() { | |
EE.call(this); | |
} | |
Stream.prototype.pipe = function(dest, options) { | |
var source = this; | |
function ondata(chunk) { | |
if (dest.writable) { | |
if (false === dest.write(chunk) && source.pause) { | |
source.pause(); | |
} | |
} | |
} | |
source.on('data', ondata); | |
function ondrain() { | |
if (source.readable && source.resume) { | |
source.resume(); | |
} | |
} | |
dest.on('drain', ondrain); | |
// If the 'end' option is not supplied, dest.end() will be called when | |
// source gets the 'end' or 'close' events. Only dest.end() once. | |
if (!dest._isStdio && (!options || options.end !== false)) { | |
source.on('end', onend); | |
source.on('close', onclose); | |
} | |
var didOnEnd = false; | |
function onend() { | |
if (didOnEnd) return; | |
didOnEnd = true; | |
dest.end(); | |
} | |
function onclose() { | |
if (didOnEnd) return; | |
didOnEnd = true; | |
if (typeof dest.destroy === 'function') dest.destroy(); | |
} | |
// don't leave dangling pipes when there are errors. | |
function onerror(er) { | |
cleanup(); | |
if (EE.listenerCount(this, 'error') === 0) { | |
throw er; // Unhandled stream error in pipe. | |
} | |
} | |
source.on('error', onerror); | |
dest.on('error', onerror); | |
// remove all the event listeners that were added. | |
function cleanup() { | |
source.removeListener('data', ondata); | |
dest.removeListener('drain', ondrain); | |
source.removeListener('end', onend); | |
source.removeListener('close', onclose); | |
source.removeListener('error', onerror); | |
dest.removeListener('error', onerror); | |
source.removeListener('end', cleanup); | |
source.removeListener('close', cleanup); | |
dest.removeListener('close', cleanup); | |
} | |
source.on('end', cleanup); | |
source.on('close', cleanup); | |
dest.on('close', cleanup); | |
dest.emit('pipe', source); | |
// Allow for unix-like usage: A.pipe(B).pipe(C) | |
return dest; | |
}; | |
},{"./duplex.js":23,"./passthrough.js":26,"./readable.js":27,"./transform.js":28,"./writable.js":29,"events":18,"inherits":33}],25:[function(require,module,exports){ | |
module.exports=require(19) | |
},{}],26:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// a passthrough stream. | |
// basically just the most minimal sort of Transform stream. | |
// Every written chunk gets output as-is. | |
module.exports = PassThrough; | |
var Transform = require('./transform.js'); | |
var inherits = require('inherits'); | |
inherits(PassThrough, Transform); | |
function PassThrough(options) { | |
if (!(this instanceof PassThrough)) | |
return new PassThrough(options); | |
Transform.call(this, options); | |
} | |
PassThrough.prototype._transform = function(chunk, encoding, cb) { | |
cb(null, chunk); | |
}; | |
},{"./transform.js":28,"inherits":33}],27:[function(require,module,exports){ | |
(function (process){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
module.exports = Readable; | |
Readable.ReadableState = ReadableState; | |
var EE = require('events').EventEmitter; | |
var Stream = require('./index.js'); | |
var Buffer = require('buffer').Buffer; | |
var setImmediate = require('process/browser.js').nextTick; | |
var StringDecoder; | |
var inherits = require('inherits'); | |
inherits(Readable, Stream); | |
function ReadableState(options, stream) { | |
options = options || {}; | |
// the point at which it stops calling _read() to fill the buffer | |
// Note: 0 is a valid value, means "don't call _read preemptively ever" | |
var hwm = options.highWaterMark; | |
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024; | |
// cast to ints. | |
this.highWaterMark = ~~this.highWaterMark; | |
this.buffer = []; | |
this.length = 0; | |
this.pipes = null; | |
this.pipesCount = 0; | |
this.flowing = false; | |
this.ended = false; | |
this.endEmitted = false; | |
this.reading = false; | |
// In streams that never have any data, and do push(null) right away, | |
// the consumer can miss the 'end' event if they do some I/O before | |
// consuming the stream. So, we don't emit('end') until some reading | |
// happens. | |
this.calledRead = false; | |
// a flag to be able to tell if the onwrite cb is called immediately, | |
// or on a later tick. We set this to true at first, becuase any | |
// actions that shouldn't happen until "later" should generally also | |
// not happen before the first write call. | |
this.sync = true; | |
// whenever we return null, then we set a flag to say | |
// that we're awaiting a 'readable' event emission. | |
this.needReadable = false; | |
this.emittedReadable = false; | |
this.readableListening = false; | |
// object stream flag. Used to make read(n) ignore n and to | |
// make all the buffer merging and length checks go away | |
this.objectMode = !!options.objectMode; | |
// Crypto is kind of old and crusty. Historically, its default string | |
// encoding is 'binary' so we have to make this configurable. | |
// Everything else in the universe uses 'utf8', though. | |
this.defaultEncoding = options.defaultEncoding || 'utf8'; | |
// when piping, we only care about 'readable' events that happen | |
// after read()ing all the bytes and not getting any pushback. | |
this.ranOut = false; | |
// the number of writers that are awaiting a drain event in .pipe()s | |
this.awaitDrain = 0; | |
// if true, a maybeReadMore has been scheduled | |
this.readingMore = false; | |
this.decoder = null; | |
this.encoding = null; | |
if (options.encoding) { | |
if (!StringDecoder) | |
StringDecoder = require('string_decoder').StringDecoder; | |
this.decoder = new StringDecoder(options.encoding); | |
this.encoding = options.encoding; | |
} | |
} | |
function Readable(options) { | |
if (!(this instanceof Readable)) | |
return new Readable(options); | |
this._readableState = new ReadableState(options, this); | |
// legacy | |
this.readable = true; | |
Stream.call(this); | |
} | |
// Manually shove something into the read() buffer. | |
// This returns true if the highWaterMark has not been hit yet, | |
// similar to how Writable.write() returns true if you should | |
// write() some more. | |
Readable.prototype.push = function(chunk, encoding) { | |
var state = this._readableState; | |
if (typeof chunk === 'string' && !state.objectMode) { | |
encoding = encoding || state.defaultEncoding; | |
if (encoding !== state.encoding) { | |
chunk = new Buffer(chunk, encoding); | |
encoding = ''; | |
} | |
} | |
return readableAddChunk(this, state, chunk, encoding, false); | |
}; | |
// Unshift should *always* be something directly out of read() | |
Readable.prototype.unshift = function(chunk) { | |
var state = this._readableState; | |
return readableAddChunk(this, state, chunk, '', true); | |
}; | |
function readableAddChunk(stream, state, chunk, encoding, addToFront) { | |
var er = chunkInvalid(state, chunk); | |
if (er) { | |
stream.emit('error', er); | |
} else if (chunk === null || chunk === undefined) { | |
state.reading = false; | |
if (!state.ended) | |
onEofChunk(stream, state); | |
} else if (state.objectMode || chunk && chunk.length > 0) { | |
if (state.ended && !addToFront) { | |
var e = new Error('stream.push() after EOF'); | |
stream.emit('error', e); | |
} else if (state.endEmitted && addToFront) { | |
var e = new Error('stream.unshift() after end event'); | |
stream.emit('error', e); | |
} else { | |
if (state.decoder && !addToFront && !encoding) | |
chunk = state.decoder.write(chunk); | |
// update the buffer info. | |
state.length += state.objectMode ? 1 : chunk.length; | |
if (addToFront) { | |
state.buffer.unshift(chunk); | |
} else { | |
state.reading = false; | |
state.buffer.push(chunk); | |
} | |
if (state.needReadable) | |
emitReadable(stream); | |
maybeReadMore(stream, state); | |
} | |
} else if (!addToFront) { | |
state.reading = false; | |
} | |
return needMoreData(state); | |
} | |
// if it's past the high water mark, we can push in some more. | |
// Also, if we have no data yet, we can stand some | |
// more bytes. This is to work around cases where hwm=0, | |
// such as the repl. Also, if the push() triggered a | |
// readable event, and the user called read(largeNumber) such that | |
// needReadable was set, then we ought to push more, so that another | |
// 'readable' event will be triggered. | |
function needMoreData(state) { | |
return !state.ended && | |
(state.needReadable || | |
state.length < state.highWaterMark || | |
state.length === 0); | |
} | |
// backwards compatibility. | |
Readable.prototype.setEncoding = function(enc) { | |
if (!StringDecoder) | |
StringDecoder = require('string_decoder').StringDecoder; | |
this._readableState.decoder = new StringDecoder(enc); | |
this._readableState.encoding = enc; | |
}; | |
// Don't raise the hwm > 128MB | |
var MAX_HWM = 0x800000; | |
function roundUpToNextPowerOf2(n) { | |
if (n >= MAX_HWM) { | |
n = MAX_HWM; | |
} else { | |
// Get the next highest power of 2 | |
n--; | |
for (var p = 1; p < 32; p <<= 1) n |= n >> p; | |
n++; | |
} | |
return n; | |
} | |
function howMuchToRead(n, state) { | |
if (state.length === 0 && state.ended) | |
return 0; | |
if (state.objectMode) | |
return n === 0 ? 0 : 1; | |
if (isNaN(n) || n === null) { | |
// only flow one buffer at a time | |
if (state.flowing && state.buffer.length) | |
return state.buffer[0].length; | |
else | |
return state.length; | |
} | |
if (n <= 0) | |
return 0; | |
// If we're asking for more than the target buffer level, | |
// then raise the water mark. Bump up to the next highest | |
// power of 2, to prevent increasing it excessively in tiny | |
// amounts. | |
if (n > state.highWaterMark) | |
state.highWaterMark = roundUpToNextPowerOf2(n); | |
// don't have that much. return null, unless we've ended. | |
if (n > state.length) { | |
if (!state.ended) { | |
state.needReadable = true; | |
return 0; | |
} else | |
return state.length; | |
} | |
return n; | |
} | |
// you can override either this method, or the async _read(n) below. | |
Readable.prototype.read = function(n) { | |
var state = this._readableState; | |
state.calledRead = true; | |
var nOrig = n; | |
if (typeof n !== 'number' || n > 0) | |
state.emittedReadable = false; | |
// if we're doing read(0) to trigger a readable event, but we | |
// already have a bunch of data in the buffer, then just trigger | |
// the 'readable' event and move on. | |
if (n === 0 && | |
state.needReadable && | |
(state.length >= state.highWaterMark || state.ended)) { | |
emitReadable(this); | |
return null; | |
} | |
n = howMuchToRead(n, state); | |
// if we've ended, and we're now clear, then finish it up. | |
if (n === 0 && state.ended) { | |
if (state.length === 0) | |
endReadable(this); | |
return null; | |
} | |
// All the actual chunk generation logic needs to be | |
// *below* the call to _read. The reason is that in certain | |
// synthetic stream cases, such as passthrough streams, _read | |
// may be a completely synchronous operation which may change | |
// the state of the read buffer, providing enough data when | |
// before there was *not* enough. | |
// | |
// So, the steps are: | |
// 1. Figure out what the state of things will be after we do | |
// a read from the buffer. | |
// | |
// 2. If that resulting state will trigger a _read, then call _read. | |
// Note that this may be asynchronous, or synchronous. Yes, it is | |
// deeply ugly to write APIs this way, but that still doesn't mean | |
// that the Readable class should behave improperly, as streams are | |
// designed to be sync/async agnostic. | |
// Take note if the _read call is sync or async (ie, if the read call | |
// has returned yet), so that we know whether or not it's safe to emit | |
// 'readable' etc. | |
// | |
// 3. Actually pull the requested chunks out of the buffer and return. | |
// if we need a readable event, then we need to do some reading. | |
var doRead = state.needReadable; | |
// if we currently have less than the highWaterMark, then also read some | |
if (state.length - n <= state.highWaterMark) | |
doRead = true; | |
// however, if we've ended, then there's no point, and if we're already | |
// reading, then it's unnecessary. | |
if (state.ended || state.reading) | |
doRead = false; | |
if (doRead) { | |
state.reading = true; | |
state.sync = true; | |
// if the length is currently zero, then we *need* a readable event. | |
if (state.length === 0) | |
state.needReadable = true; | |
// call internal read method | |
this._read(state.highWaterMark); | |
state.sync = false; | |
} | |
// If _read called its callback synchronously, then `reading` | |
// will be false, and we need to re-evaluate how much data we | |
// can return to the user. | |
if (doRead && !state.reading) | |
n = howMuchToRead(nOrig, state); | |
var ret; | |
if (n > 0) | |
ret = fromList(n, state); | |
else | |
ret = null; | |
if (ret === null) { | |
state.needReadable = true; | |
n = 0; | |
} | |
state.length -= n; | |
// If we have nothing in the buffer, then we want to know | |
// as soon as we *do* get something into the buffer. | |
if (state.length === 0 && !state.ended) | |
state.needReadable = true; | |
// If we happened to read() exactly the remaining amount in the | |
// buffer, and the EOF has been seen at this point, then make sure | |
// that we emit 'end' on the very next tick. | |
if (state.ended && !state.endEmitted && state.length === 0) | |
endReadable(this); | |
return ret; | |
}; | |
function chunkInvalid(state, chunk) { | |
var er = null; | |
if (!Buffer.isBuffer(chunk) && | |
'string' !== typeof chunk && | |
chunk !== null && | |
chunk !== undefined && | |
!state.objectMode && | |
!er) { | |
er = new TypeError('Invalid non-string/buffer chunk'); | |
} | |
return er; | |
} | |
function onEofChunk(stream, state) { | |
if (state.decoder && !state.ended) { | |
var chunk = state.decoder.end(); | |
if (chunk && chunk.length) { | |
state.buffer.push(chunk); | |
state.length += state.objectMode ? 1 : chunk.length; | |
} | |
} | |
state.ended = true; | |
// if we've ended and we have some data left, then emit | |
// 'readable' now to make sure it gets picked up. | |
if (state.length > 0) | |
emitReadable(stream); | |
else | |
endReadable(stream); | |
} | |
// Don't emit readable right away in sync mode, because this can trigger | |
// another read() call => stack overflow. This way, it might trigger | |
// a nextTick recursion warning, but that's not so bad. | |
function emitReadable(stream) { | |
var state = stream._readableState; | |
state.needReadable = false; | |
if (state.emittedReadable) | |
return; | |
state.emittedReadable = true; | |
if (state.sync) | |
setImmediate(function() { | |
emitReadable_(stream); | |
}); | |
else | |
emitReadable_(stream); | |
} | |
function emitReadable_(stream) { | |
stream.emit('readable'); | |
} | |
// at this point, the user has presumably seen the 'readable' event, | |
// and called read() to consume some data. that may have triggered | |
// in turn another _read(n) call, in which case reading = true if | |
// it's in progress. | |
// However, if we're not ended, or reading, and the length < hwm, | |
// then go ahead and try to read some more preemptively. | |
function maybeReadMore(stream, state) { | |
if (!state.readingMore) { | |
state.readingMore = true; | |
setImmediate(function() { | |
maybeReadMore_(stream, state); | |
}); | |
} | |
} | |
function maybeReadMore_(stream, state) { | |
var len = state.length; | |
while (!state.reading && !state.flowing && !state.ended && | |
state.length < state.highWaterMark) { | |
stream.read(0); | |
if (len === state.length) | |
// didn't get any data, stop spinning. | |
break; | |
else | |
len = state.length; | |
} | |
state.readingMore = false; | |
} | |
// abstract method. to be overridden in specific implementation classes. | |
// call cb(er, data) where data is <= n in length. | |
// for virtual (non-string, non-buffer) streams, "length" is somewhat | |
// arbitrary, and perhaps not very meaningful. | |
Readable.prototype._read = function(n) { | |
this.emit('error', new Error('not implemented')); | |
}; | |
Readable.prototype.pipe = function(dest, pipeOpts) { | |
var src = this; | |
var state = this._readableState; | |
switch (state.pipesCount) { | |
case 0: | |
state.pipes = dest; | |
break; | |
case 1: | |
state.pipes = [state.pipes, dest]; | |
break; | |
default: | |
state.pipes.push(dest); | |
break; | |
} | |
state.pipesCount += 1; | |
var doEnd = (!pipeOpts || pipeOpts.end !== false) && | |
dest !== process.stdout && | |
dest !== process.stderr; | |
var endFn = doEnd ? onend : cleanup; | |
if (state.endEmitted) | |
setImmediate(endFn); | |
else | |
src.once('end', endFn); | |
dest.on('unpipe', onunpipe); | |
function onunpipe(readable) { | |
if (readable !== src) return; | |
cleanup(); | |
} | |
function onend() { | |
dest.end(); | |
} | |
// when the dest drains, it reduces the awaitDrain counter | |
// on the source. This would be more elegant with a .once() | |
// handler in flow(), but adding and removing repeatedly is | |
// too slow. | |
var ondrain = pipeOnDrain(src); | |
dest.on('drain', ondrain); | |
function cleanup() { | |
// cleanup event handlers once the pipe is broken | |
dest.removeListener('close', onclose); | |
dest.removeListener('finish', onfinish); | |
dest.removeListener('drain', ondrain); | |
dest.removeListener('error', onerror); | |
dest.removeListener('unpipe', onunpipe); | |
src.removeListener('end', onend); | |
src.removeListener('end', cleanup); | |
// if the reader is waiting for a drain event from this | |
// specific writer, then it would cause it to never start | |
// flowing again. | |
// So, if this is awaiting a drain, then we just call it now. | |
// If we don't know, then assume that we are waiting for one. | |
if (!dest._writableState || dest._writableState.needDrain) | |
ondrain(); | |
} | |
// if the dest has an error, then stop piping into it. | |
// however, don't suppress the throwing behavior for this. | |
// check for listeners before emit removes one-time listeners. | |
var errListeners = EE.listenerCount(dest, 'error'); | |
function onerror(er) { | |
unpipe(); | |
if (errListeners === 0 && EE.listenerCount(dest, 'error') === 0) | |
dest.emit('error', er); | |
} | |
dest.once('error', onerror); | |
// Both close and finish should trigger unpipe, but only once. | |
function onclose() { | |
dest.removeListener('finish', onfinish); | |
unpipe(); | |
} | |
dest.once('close', onclose); | |
function onfinish() { | |
dest.removeListener('close', onclose); | |
unpipe(); | |
} | |
dest.once('finish', onfinish); | |
function unpipe() { | |
src.unpipe(dest); | |
} | |
// tell the dest that it's being piped to | |
dest.emit('pipe', src); | |
// start the flow if it hasn't been started already. | |
if (!state.flowing) { | |
// the handler that waits for readable events after all | |
// the data gets sucked out in flow. | |
// This would be easier to follow with a .once() handler | |
// in flow(), but that is too slow. | |
this.on('readable', pipeOnReadable); | |
state.flowing = true; | |
setImmediate(function() { | |
flow(src); | |
}); | |
} | |
return dest; | |
}; | |
function pipeOnDrain(src) { | |
return function() { | |
var dest = this; | |
var state = src._readableState; | |
state.awaitDrain--; | |
if (state.awaitDrain === 0) | |
flow(src); | |
}; | |
} | |
function flow(src) { | |
var state = src._readableState; | |
var chunk; | |
state.awaitDrain = 0; | |
function write(dest, i, list) { | |
var written = dest.write(chunk); | |
if (false === written) { | |
state.awaitDrain++; | |
} | |
} | |
while (state.pipesCount && null !== (chunk = src.read())) { | |
if (state.pipesCount === 1) | |
write(state.pipes, 0, null); | |
else | |
forEach(state.pipes, write); | |
src.emit('data', chunk); | |
// if anyone needs a drain, then we have to wait for that. | |
if (state.awaitDrain > 0) | |
return; | |
} | |
// if every destination was unpiped, either before entering this | |
// function, or in the while loop, then stop flowing. | |
// | |
// NB: This is a pretty rare edge case. | |
if (state.pipesCount === 0) { | |
state.flowing = false; | |
// if there were data event listeners added, then switch to old mode. | |
if (EE.listenerCount(src, 'data') > 0) | |
emitDataEvents(src); | |
return; | |
} | |
// at this point, no one needed a drain, so we just ran out of data | |
// on the next readable event, start it over again. | |
state.ranOut = true; | |
} | |
function pipeOnReadable() { | |
if (this._readableState.ranOut) { | |
this._readableState.ranOut = false; | |
flow(this); | |
} | |
} | |
Readable.prototype.unpipe = function(dest) { | |
var state = this._readableState; | |
// if we're not piping anywhere, then do nothing. | |
if (state.pipesCount === 0) | |
return this; | |
// just one destination. most common case. | |
if (state.pipesCount === 1) { | |
// passed in one, but it's not the right one. | |
if (dest && dest !== state.pipes) | |
return this; | |
if (!dest) | |
dest = state.pipes; | |
// got a match. | |
state.pipes = null; | |
state.pipesCount = 0; | |
this.removeListener('readable', pipeOnReadable); | |
state.flowing = false; | |
if (dest) | |
dest.emit('unpipe', this); | |
return this; | |
} | |
// slow case. multiple pipe destinations. | |
if (!dest) { | |
// remove all. | |
var dests = state.pipes; | |
var len = state.pipesCount; | |
state.pipes = null; | |
state.pipesCount = 0; | |
this.removeListener('readable', pipeOnReadable); | |
state.flowing = false; | |
for (var i = 0; i < len; i++) | |
dests[i].emit('unpipe', this); | |
return this; | |
} | |
// try to find the right one. | |
var i = indexOf(state.pipes, dest); | |
if (i === -1) | |
return this; | |
state.pipes.splice(i, 1); | |
state.pipesCount -= 1; | |
if (state.pipesCount === 1) | |
state.pipes = state.pipes[0]; | |
dest.emit('unpipe', this); | |
return this; | |
}; | |
// set up data events if they are asked for | |
// Ensure readable listeners eventually get something | |
Readable.prototype.on = function(ev, fn) { | |
var res = Stream.prototype.on.call(this, ev, fn); | |
if (ev === 'data' && !this._readableState.flowing) | |
emitDataEvents(this); | |
if (ev === 'readable' && this.readable) { | |
var state = this._readableState; | |
if (!state.readableListening) { | |
state.readableListening = true; | |
state.emittedReadable = false; | |
state.needReadable = true; | |
if (!state.reading) { | |
this.read(0); | |
} else if (state.length) { | |
emitReadable(this, state); | |
} | |
} | |
} | |
return res; | |
}; | |
Readable.prototype.addListener = Readable.prototype.on; | |
// pause() and resume() are remnants of the legacy readable stream API | |
// If the user uses them, then switch into old mode. | |
Readable.prototype.resume = function() { | |
emitDataEvents(this); | |
this.read(0); | |
this.emit('resume'); | |
}; | |
Readable.prototype.pause = function() { | |
emitDataEvents(this, true); | |
this.emit('pause'); | |
}; | |
function emitDataEvents(stream, startPaused) { | |
var state = stream._readableState; | |
if (state.flowing) { | |
// https://github.com/isaacs/readable-stream/issues/16 | |
throw new Error('Cannot switch to old mode now.'); | |
} | |
var paused = startPaused || false; | |
var readable = false; | |
// convert to an old-style stream. | |
stream.readable = true; | |
stream.pipe = Stream.prototype.pipe; | |
stream.on = stream.addListener = Stream.prototype.on; | |
stream.on('readable', function() { | |
readable = true; | |
var c; | |
while (!paused && (null !== (c = stream.read()))) | |
stream.emit('data', c); | |
if (c === null) { | |
readable = false; | |
stream._readableState.needReadable = true; | |
} | |
}); | |
stream.pause = function() { | |
paused = true; | |
this.emit('pause'); | |
}; | |
stream.resume = function() { | |
paused = false; | |
if (readable) | |
setImmediate(function() { | |
stream.emit('readable'); | |
}); | |
else | |
this.read(0); | |
this.emit('resume'); | |
}; | |
// now make it start, just in case it hadn't already. | |
stream.emit('readable'); | |
} | |
// wrap an old-style stream as the async data source. | |
// This is *not* part of the readable stream interface. | |
// It is an ugly unfortunate mess of history. | |
Readable.prototype.wrap = function(stream) { | |
var state = this._readableState; | |
var paused = false; | |
var self = this; | |
stream.on('end', function() { | |
if (state.decoder && !state.ended) { | |
var chunk = state.decoder.end(); | |
if (chunk && chunk.length) | |
self.push(chunk); | |
} | |
self.push(null); | |
}); | |
stream.on('data', function(chunk) { | |
if (state.decoder) | |
chunk = state.decoder.write(chunk); | |
if (!chunk || !state.objectMode && !chunk.length) | |
return; | |
var ret = self.push(chunk); | |
if (!ret) { | |
paused = true; | |
stream.pause(); | |
} | |
}); | |
// proxy all the other methods. | |
// important when wrapping filters and duplexes. | |
for (var i in stream) { | |
if (typeof stream[i] === 'function' && | |
typeof this[i] === 'undefined') { | |
this[i] = function(method) { return function() { | |
return stream[method].apply(stream, arguments); | |
}}(i); | |
} | |
} | |
// proxy certain important events. | |
var events = ['error', 'close', 'destroy', 'pause', 'resume']; | |
forEach(events, function(ev) { | |
stream.on(ev, function (x) { | |
return self.emit.apply(self, ev, x); | |
}); | |
}); | |
// when we try to consume some more bytes, simply unpause the | |
// underlying stream. | |
self._read = function(n) { | |
if (paused) { | |
paused = false; | |
stream.resume(); | |
} | |
}; | |
return self; | |
}; | |
// exposed for testing purposes only. | |
Readable._fromList = fromList; | |
// Pluck off n bytes from an array of buffers. | |
// Length is the combined lengths of all the buffers in the list. | |
function fromList(n, state) { | |
var list = state.buffer; | |
var length = state.length; | |
var stringMode = !!state.decoder; | |
var objectMode = !!state.objectMode; | |
var ret; | |
// nothing in the list, definitely empty. | |
if (list.length === 0) | |
return null; | |
if (length === 0) | |
ret = null; | |
else if (objectMode) | |
ret = list.shift(); | |
else if (!n || n >= length) { | |
// read it all, truncate the array. | |
if (stringMode) | |
ret = list.join(''); | |
else | |
ret = Buffer.concat(list, length); | |
list.length = 0; | |
} else { | |
// read just some of it. | |
if (n < list[0].length) { | |
// just take a part of the first list item. | |
// slice is the same for buffers and strings. | |
var buf = list[0]; | |
ret = buf.slice(0, n); | |
list[0] = buf.slice(n); | |
} else if (n === list[0].length) { | |
// first list is a perfect match | |
ret = list.shift(); | |
} else { | |
// complex case. | |
// we have enough to cover it, but it spans past the first buffer. | |
if (stringMode) | |
ret = ''; | |
else | |
ret = new Buffer(n); | |
var c = 0; | |
for (var i = 0, l = list.length; i < l && c < n; i++) { | |
var buf = list[0]; | |
var cpy = Math.min(n - c, buf.length); | |
if (stringMode) | |
ret += buf.slice(0, cpy); | |
else | |
buf.copy(ret, c, 0, cpy); | |
if (cpy < buf.length) | |
list[0] = buf.slice(cpy); | |
else | |
list.shift(); | |
c += cpy; | |
} | |
} | |
} | |
return ret; | |
} | |
function endReadable(stream) { | |
var state = stream._readableState; | |
// If we get here before consuming all the bytes, then that is a | |
// bug in node. Should never happen. | |
if (state.length > 0) | |
throw new Error('endReadable called on non-empty stream'); | |
if (!state.endEmitted && state.calledRead) { | |
state.ended = true; | |
setImmediate(function() { | |
// Check that we didn't get one last unshift. | |
if (!state.endEmitted && state.length === 0) { | |
state.endEmitted = true; | |
stream.readable = false; | |
stream.emit('end'); | |
} | |
}); | |
} | |
} | |
function forEach (xs, f) { | |
for (var i = 0, l = xs.length; i < l; i++) { | |
f(xs[i], i); | |
} | |
} | |
function indexOf (xs, x) { | |
for (var i = 0, l = xs.length; i < l; i++) { | |
if (xs[i] === x) return i; | |
} | |
return -1; | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"./index.js":24,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"buffer":20,"events":18,"inherits":33,"process/browser.js":25,"string_decoder":30}],28:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// a transform stream is a readable/writable stream where you do | |
// something with the data. Sometimes it's called a "filter", | |
// but that's not a great name for it, since that implies a thing where | |
// some bits pass through, and others are simply ignored. (That would | |
// be a valid example of a transform, of course.) | |
// | |
// While the output is causally related to the input, it's not a | |
// necessarily symmetric or synchronous transformation. For example, | |
// a zlib stream might take multiple plain-text writes(), and then | |
// emit a single compressed chunk some time in the future. | |
// | |
// Here's how this works: | |
// | |
// The Transform stream has all the aspects of the readable and writable | |
// stream classes. When you write(chunk), that calls _write(chunk,cb) | |
// internally, and returns false if there's a lot of pending writes | |
// buffered up. When you call read(), that calls _read(n) until | |
// there's enough pending readable data buffered up. | |
// | |
// In a transform stream, the written data is placed in a buffer. When | |
// _read(n) is called, it transforms the queued up data, calling the | |
// buffered _write cb's as it consumes chunks. If consuming a single | |
// written chunk would result in multiple output chunks, then the first | |
// outputted bit calls the readcb, and subsequent chunks just go into | |
// the read buffer, and will cause it to emit 'readable' if necessary. | |
// | |
// This way, back-pressure is actually determined by the reading side, | |
// since _read has to be called to start processing a new chunk. However, | |
// a pathological inflate type of transform can cause excessive buffering | |
// here. For example, imagine a stream where every byte of input is | |
// interpreted as an integer from 0-255, and then results in that many | |
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in | |
// 1kb of data being output. In this case, you could write a very small | |
// amount of input, and end up with a very large amount of output. In | |
// such a pathological inflating mechanism, there'd be no way to tell | |
// the system to stop doing the transform. A single 4MB write could | |
// cause the system to run out of memory. | |
// | |
// However, even in such a pathological case, only a single written chunk | |
// would be consumed, and then the rest would wait (un-transformed) until | |
// the results of the previous transformed chunk were consumed. | |
module.exports = Transform; | |
var Duplex = require('./duplex.js'); | |
var inherits = require('inherits'); | |
inherits(Transform, Duplex); | |
function TransformState(options, stream) { | |
this.afterTransform = function(er, data) { | |
return afterTransform(stream, er, data); | |
}; | |
this.needTransform = false; | |
this.transforming = false; | |
this.writecb = null; | |
this.writechunk = null; | |
} | |
function afterTransform(stream, er, data) { | |
var ts = stream._transformState; | |
ts.transforming = false; | |
var cb = ts.writecb; | |
if (!cb) | |
return stream.emit('error', new Error('no writecb in Transform class')); | |
ts.writechunk = null; | |
ts.writecb = null; | |
if (data !== null && data !== undefined) | |
stream.push(data); | |
if (cb) | |
cb(er); | |
var rs = stream._readableState; | |
rs.reading = false; | |
if (rs.needReadable || rs.length < rs.highWaterMark) { | |
stream._read(rs.highWaterMark); | |
} | |
} | |
function Transform(options) { | |
if (!(this instanceof Transform)) | |
return new Transform(options); | |
Duplex.call(this, options); | |
var ts = this._transformState = new TransformState(options, this); | |
// when the writable side finishes, then flush out anything remaining. | |
var stream = this; | |
// start out asking for a readable event once data is transformed. | |
this._readableState.needReadable = true; | |
// we have implemented the _read method, and done the other things | |
// that Readable wants before the first _read call, so unset the | |
// sync guard flag. | |
this._readableState.sync = false; | |
this.once('finish', function() { | |
if ('function' === typeof this._flush) | |
this._flush(function(er) { | |
done(stream, er); | |
}); | |
else | |
done(stream); | |
}); | |
} | |
Transform.prototype.push = function(chunk, encoding) { | |
this._transformState.needTransform = false; | |
return Duplex.prototype.push.call(this, chunk, encoding); | |
}; | |
// This is the part where you do stuff! | |
// override this function in implementation classes. | |
// 'chunk' is an input chunk. | |
// | |
// Call `push(newChunk)` to pass along transformed output | |
// to the readable side. You may call 'push' zero or more times. | |
// | |
// Call `cb(err)` when you are done with this chunk. If you pass | |
// an error, then that'll put the hurt on the whole operation. If you | |
// never call cb(), then you'll never get another chunk. | |
Transform.prototype._transform = function(chunk, encoding, cb) { | |
throw new Error('not implemented'); | |
}; | |
Transform.prototype._write = function(chunk, encoding, cb) { | |
var ts = this._transformState; | |
ts.writecb = cb; | |
ts.writechunk = chunk; | |
ts.writeencoding = encoding; | |
if (!ts.transforming) { | |
var rs = this._readableState; | |
if (ts.needTransform || | |
rs.needReadable || | |
rs.length < rs.highWaterMark) | |
this._read(rs.highWaterMark); | |
} | |
}; | |
// Doesn't matter what the args are here. | |
// _transform does all the work. | |
// That we got here means that the readable side wants more data. | |
Transform.prototype._read = function(n) { | |
var ts = this._transformState; | |
if (ts.writechunk && ts.writecb && !ts.transforming) { | |
ts.transforming = true; | |
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); | |
} else { | |
// mark that we need a transform, so that any data that comes in | |
// will get processed, now that we've asked for it. | |
ts.needTransform = true; | |
} | |
}; | |
function done(stream, er) { | |
if (er) | |
return stream.emit('error', er); | |
// if there's nothing in the write buffer, then that means | |
// that nothing more will ever be provided | |
var ws = stream._writableState; | |
var rs = stream._readableState; | |
var ts = stream._transformState; | |
if (ws.length) | |
throw new Error('calling transform done when ws.length != 0'); | |
if (ts.transforming) | |
throw new Error('calling transform done when still transforming'); | |
return stream.push(null); | |
} | |
},{"./duplex.js":23,"inherits":33}],29:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// A bit simpler than readable streams. | |
// Implement an async ._write(chunk, cb), and it'll handle all | |
// the drain event emission and buffering. | |
module.exports = Writable; | |
Writable.WritableState = WritableState; | |
var isUint8Array = typeof Uint8Array !== 'undefined' | |
? function (x) { return x instanceof Uint8Array } | |
: function (x) { | |
return x && x.constructor && x.constructor.name === 'Uint8Array' | |
} | |
; | |
var isArrayBuffer = typeof ArrayBuffer !== 'undefined' | |
? function (x) { return x instanceof ArrayBuffer } | |
: function (x) { | |
return x && x.constructor && x.constructor.name === 'ArrayBuffer' | |
} | |
; | |
var inherits = require('inherits'); | |
var Stream = require('./index.js'); | |
var setImmediate = require('process/browser.js').nextTick; | |
var Buffer = require('buffer').Buffer; | |
inherits(Writable, Stream); | |
function WriteReq(chunk, encoding, cb) { | |
this.chunk = chunk; | |
this.encoding = encoding; | |
this.callback = cb; | |
} | |
function WritableState(options, stream) { | |
options = options || {}; | |
// the point at which write() starts returning false | |
// Note: 0 is a valid value, means that we always return false if | |
// the entire buffer is not flushed immediately on write() | |
var hwm = options.highWaterMark; | |
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024; | |
// object stream flag to indicate whether or not this stream | |
// contains buffers or objects. | |
this.objectMode = !!options.objectMode; | |
// cast to ints. | |
this.highWaterMark = ~~this.highWaterMark; | |
this.needDrain = false; | |
// at the start of calling end() | |
this.ending = false; | |
// when end() has been called, and returned | |
this.ended = false; | |
// when 'finish' is emitted | |
this.finished = false; | |
// should we decode strings into buffers before passing to _write? | |
// this is here so that some node-core streams can optimize string | |
// handling at a lower level. | |
var noDecode = options.decodeStrings === false; | |
this.decodeStrings = !noDecode; | |
// Crypto is kind of old and crusty. Historically, its default string | |
// encoding is 'binary' so we have to make this configurable. | |
// Everything else in the universe uses 'utf8', though. | |
this.defaultEncoding = options.defaultEncoding || 'utf8'; | |
// not an actual buffer we keep track of, but a measurement | |
// of how much we're waiting to get pushed to some underlying | |
// socket or file. | |
this.length = 0; | |
// a flag to see when we're in the middle of a write. | |
this.writing = false; | |
// a flag to be able to tell if the onwrite cb is called immediately, | |
// or on a later tick. We set this to true at first, becuase any | |
// actions that shouldn't happen until "later" should generally also | |
// not happen before the first write call. | |
this.sync = true; | |
// a flag to know if we're processing previously buffered items, which | |
// may call the _write() callback in the same tick, so that we don't | |
// end up in an overlapped onwrite situation. | |
this.bufferProcessing = false; | |
// the callback that's passed to _write(chunk,cb) | |
this.onwrite = function(er) { | |
onwrite(stream, er); | |
}; | |
// the callback that the user supplies to write(chunk,encoding,cb) | |
this.writecb = null; | |
// the amount that is being written when _write is called. | |
this.writelen = 0; | |
this.buffer = []; | |
} | |
function Writable(options) { | |
// Writable ctor is applied to Duplexes, though they're not | |
// instanceof Writable, they're instanceof Readable. | |
if (!(this instanceof Writable) && !(this instanceof Stream.Duplex)) | |
return new Writable(options); | |
this._writableState = new WritableState(options, this); | |
// legacy. | |
this.writable = true; | |
Stream.call(this); | |
} | |
// Otherwise people can pipe Writable streams, which is just wrong. | |
Writable.prototype.pipe = function() { | |
this.emit('error', new Error('Cannot pipe. Not readable.')); | |
}; | |
function writeAfterEnd(stream, state, cb) { | |
var er = new Error('write after end'); | |
// TODO: defer error events consistently everywhere, not just the cb | |
stream.emit('error', er); | |
setImmediate(function() { | |
cb(er); | |
}); | |
} | |
// If we get something that is not a buffer, string, null, or undefined, | |
// and we're not in objectMode, then that's an error. | |
// Otherwise stream chunks are all considered to be of length=1, and the | |
// watermarks determine how many objects to keep in the buffer, rather than | |
// how many bytes or characters. | |
function validChunk(stream, state, chunk, cb) { | |
var valid = true; | |
if (!Buffer.isBuffer(chunk) && | |
'string' !== typeof chunk && | |
chunk !== null && | |
chunk !== undefined && | |
!state.objectMode) { | |
var er = new TypeError('Invalid non-string/buffer chunk'); | |
stream.emit('error', er); | |
setImmediate(function() { | |
cb(er); | |
}); | |
valid = false; | |
} | |
return valid; | |
} | |
Writable.prototype.write = function(chunk, encoding, cb) { | |
var state = this._writableState; | |
var ret = false; | |
if (typeof encoding === 'function') { | |
cb = encoding; | |
encoding = null; | |
} | |
if (!Buffer.isBuffer(chunk) && isUint8Array(chunk)) | |
chunk = new Buffer(chunk); | |
if (isArrayBuffer(chunk) && typeof Uint8Array !== 'undefined') | |
chunk = new Buffer(new Uint8Array(chunk)); | |
if (Buffer.isBuffer(chunk)) | |
encoding = 'buffer'; | |
else if (!encoding) | |
encoding = state.defaultEncoding; | |
if (typeof cb !== 'function') | |
cb = function() {}; | |
if (state.ended) | |
writeAfterEnd(this, state, cb); | |
else if (validChunk(this, state, chunk, cb)) | |
ret = writeOrBuffer(this, state, chunk, encoding, cb); | |
return ret; | |
}; | |
function decodeChunk(state, chunk, encoding) { | |
if (!state.objectMode && | |
state.decodeStrings !== false && | |
typeof chunk === 'string') { | |
chunk = new Buffer(chunk, encoding); | |
} | |
return chunk; | |
} | |
// if we're already writing something, then just put this | |
// in the queue, and wait our turn. Otherwise, call _write | |
// If we return false, then we need a drain event, so set that flag. | |
function writeOrBuffer(stream, state, chunk, encoding, cb) { | |
chunk = decodeChunk(state, chunk, encoding); | |
var len = state.objectMode ? 1 : chunk.length; | |
state.length += len; | |
var ret = state.length < state.highWaterMark; | |
state.needDrain = !ret; | |
if (state.writing) | |
state.buffer.push(new WriteReq(chunk, encoding, cb)); | |
else | |
doWrite(stream, state, len, chunk, encoding, cb); | |
return ret; | |
} | |
function doWrite(stream, state, len, chunk, encoding, cb) { | |
state.writelen = len; | |
state.writecb = cb; | |
state.writing = true; | |
state.sync = true; | |
stream._write(chunk, encoding, state.onwrite); | |
state.sync = false; | |
} | |
function onwriteError(stream, state, sync, er, cb) { | |
if (sync) | |
setImmediate(function() { | |
cb(er); | |
}); | |
else | |
cb(er); | |
stream.emit('error', er); | |
} | |
function onwriteStateUpdate(state) { | |
state.writing = false; | |
state.writecb = null; | |
state.length -= state.writelen; | |
state.writelen = 0; | |
} | |
function onwrite(stream, er) { | |
var state = stream._writableState; | |
var sync = state.sync; | |
var cb = state.writecb; | |
onwriteStateUpdate(state); | |
if (er) | |
onwriteError(stream, state, sync, er, cb); | |
else { | |
// Check if we're actually ready to finish, but don't emit yet | |
var finished = needFinish(stream, state); | |
if (!finished && !state.bufferProcessing && state.buffer.length) | |
clearBuffer(stream, state); | |
if (sync) { | |
setImmediate(function() { | |
afterWrite(stream, state, finished, cb); | |
}); | |
} else { | |
afterWrite(stream, state, finished, cb); | |
} | |
} | |
} | |
function afterWrite(stream, state, finished, cb) { | |
if (!finished) | |
onwriteDrain(stream, state); | |
cb(); | |
if (finished) | |
finishMaybe(stream, state); | |
} | |
// Must force callback to be called on nextTick, so that we don't | |
// emit 'drain' before the write() consumer gets the 'false' return | |
// value, and has a chance to attach a 'drain' listener. | |
function onwriteDrain(stream, state) { | |
if (state.length === 0 && state.needDrain) { | |
state.needDrain = false; | |
stream.emit('drain'); | |
} | |
} | |
// if there's something in the buffer waiting, then process it | |
function clearBuffer(stream, state) { | |
state.bufferProcessing = true; | |
for (var c = 0; c < state.buffer.length; c++) { | |
var entry = state.buffer[c]; | |
var chunk = entry.chunk; | |
var encoding = entry.encoding; | |
var cb = entry.callback; | |
var len = state.objectMode ? 1 : chunk.length; | |
doWrite(stream, state, len, chunk, encoding, cb); | |
// if we didn't call the onwrite immediately, then | |
// it means that we need to wait until it does. | |
// also, that means that the chunk and cb are currently | |
// being processed, so move the buffer counter past them. | |
if (state.writing) { | |
c++; | |
break; | |
} | |
} | |
state.bufferProcessing = false; | |
if (c < state.buffer.length) | |
state.buffer = state.buffer.slice(c); | |
else | |
state.buffer.length = 0; | |
} | |
Writable.prototype._write = function(chunk, encoding, cb) { | |
cb(new Error('not implemented')); | |
}; | |
Writable.prototype.end = function(chunk, encoding, cb) { | |
var state = this._writableState; | |
if (typeof chunk === 'function') { | |
cb = chunk; | |
chunk = null; | |
encoding = null; | |
} else if (typeof encoding === 'function') { | |
cb = encoding; | |
encoding = null; | |
} | |
if (typeof chunk !== 'undefined' && chunk !== null) | |
this.write(chunk, encoding); | |
// ignore unnecessary end() calls. | |
if (!state.ending && !state.finished) | |
endWritable(this, state, cb); | |
}; | |
function needFinish(stream, state) { | |
return (state.ending && | |
state.length === 0 && | |
!state.finished && | |
!state.writing); | |
} | |
function finishMaybe(stream, state) { | |
var need = needFinish(stream, state); | |
if (need) { | |
state.finished = true; | |
stream.emit('finish'); | |
} | |
return need; | |
} | |
function endWritable(stream, state, cb) { | |
state.ending = true; | |
finishMaybe(stream, state); | |
if (cb) { | |
if (state.finished) | |
setImmediate(cb); | |
else | |
stream.once('finish', cb); | |
} | |
state.ended = true; | |
} | |
},{"./index.js":24,"buffer":20,"inherits":33,"process/browser.js":25}],30:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
var Buffer = require('buffer').Buffer; | |
function assertEncoding(encoding) { | |
if (encoding && !Buffer.isEncoding(encoding)) { | |
throw new Error('Unknown encoding: ' + encoding); | |
} | |
} | |
var StringDecoder = exports.StringDecoder = function(encoding) { | |
this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); | |
assertEncoding(encoding); | |
switch (this.encoding) { | |
case 'utf8': | |
// CESU-8 represents each of Surrogate Pair by 3-bytes | |
this.surrogateSize = 3; | |
break; | |
case 'ucs2': | |
case 'utf16le': | |
// UTF-16 represents each of Surrogate Pair by 2-bytes | |
this.surrogateSize = 2; | |
this.detectIncompleteChar = utf16DetectIncompleteChar; | |
break; | |
case 'base64': | |
// Base-64 stores 3 bytes in 4 chars, and pads the remainder. | |
this.surrogateSize = 3; | |
this.detectIncompleteChar = base64DetectIncompleteChar; | |
break; | |
default: | |
this.write = passThroughWrite; | |
return; | |
} | |
this.charBuffer = new Buffer(6); | |
this.charReceived = 0; | |
this.charLength = 0; | |
}; | |
StringDecoder.prototype.write = function(buffer) { | |
var charStr = ''; | |
var offset = 0; | |
// if our last write ended with an incomplete multibyte character | |
while (this.charLength) { | |
// determine how many remaining bytes this buffer has to offer for this char | |
var i = (buffer.length >= this.charLength - this.charReceived) ? | |
this.charLength - this.charReceived : | |
buffer.length; | |
// add the new bytes to the char buffer | |
buffer.copy(this.charBuffer, this.charReceived, offset, i); | |
this.charReceived += (i - offset); | |
offset = i; | |
if (this.charReceived < this.charLength) { | |
// still not enough chars in this buffer? wait for more ... | |
return ''; | |
} | |
// get the character that was split | |
charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); | |
// lead surrogate (D800-DBFF) is also the incomplete character | |
var charCode = charStr.charCodeAt(charStr.length - 1); | |
if (charCode >= 0xD800 && charCode <= 0xDBFF) { | |
this.charLength += this.surrogateSize; | |
charStr = ''; | |
continue; | |
} | |
this.charReceived = this.charLength = 0; | |
// if there are no more bytes in this buffer, just emit our char | |
if (i == buffer.length) return charStr; | |
// otherwise cut off the characters end from the beginning of this buffer | |
buffer = buffer.slice(i, buffer.length); | |
break; | |
} | |
var lenIncomplete = this.detectIncompleteChar(buffer); | |
var end = buffer.length; | |
if (this.charLength) { | |
// buffer the incomplete character bytes we got | |
buffer.copy(this.charBuffer, 0, buffer.length - lenIncomplete, end); | |
this.charReceived = lenIncomplete; | |
end -= lenIncomplete; | |
} | |
charStr += buffer.toString(this.encoding, 0, end); | |
var end = charStr.length - 1; | |
var charCode = charStr.charCodeAt(end); | |
// lead surrogate (D800-DBFF) is also the incomplete character | |
if (charCode >= 0xD800 && charCode <= 0xDBFF) { | |
var size = this.surrogateSize; | |
this.charLength += size; | |
this.charReceived += size; | |
this.charBuffer.copy(this.charBuffer, size, 0, size); | |
this.charBuffer.write(charStr.charAt(charStr.length - 1), this.encoding); | |
return charStr.substring(0, end); | |
} | |
// or just emit the charStr | |
return charStr; | |
}; | |
StringDecoder.prototype.detectIncompleteChar = function(buffer) { | |
// determine how many bytes we have to check at the end of this buffer | |
var i = (buffer.length >= 3) ? 3 : buffer.length; | |
// Figure out if one of the last i bytes of our buffer announces an | |
// incomplete char. | |
for (; i > 0; i--) { | |
var c = buffer[buffer.length - i]; | |
// See http://en.wikipedia.org/wiki/UTF-8#Description | |
// 110XXXXX | |
if (i == 1 && c >> 5 == 0x06) { | |
this.charLength = 2; | |
break; | |
} | |
// 1110XXXX | |
if (i <= 2 && c >> 4 == 0x0E) { | |
this.charLength = 3; | |
break; | |
} | |
// 11110XXX | |
if (i <= 3 && c >> 3 == 0x1E) { | |
this.charLength = 4; | |
break; | |
} | |
} | |
return i; | |
}; | |
StringDecoder.prototype.end = function(buffer) { | |
var res = ''; | |
if (buffer && buffer.length) | |
res = this.write(buffer); | |
if (this.charReceived) { | |
var cr = this.charReceived; | |
var buf = this.charBuffer; | |
var enc = this.encoding; | |
res += buf.slice(0, cr).toString(enc); | |
} | |
return res; | |
}; | |
function passThroughWrite(buffer) { | |
return buffer.toString(this.encoding); | |
} | |
function utf16DetectIncompleteChar(buffer) { | |
var incomplete = this.charReceived = buffer.length % 2; | |
this.charLength = incomplete ? 2 : 0; | |
return incomplete; | |
} | |
function base64DetectIncompleteChar(buffer) { | |
var incomplete = this.charReceived = buffer.length % 3; | |
this.charLength = incomplete ? 3 : 0; | |
return incomplete; | |
} | |
},{"buffer":20}],31:[function(require,module,exports){ | |
module.exports = function isBuffer(arg) { | |
return arg && typeof arg === 'object' | |
&& typeof arg.copy === 'function' | |
&& typeof arg.fill === 'function' | |
&& typeof arg.readUInt8 === 'function'; | |
} | |
},{}],32:[function(require,module,exports){ | |
(function (process,global){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
var formatRegExp = /%[sdj%]/g; | |
exports.format = function(f) { | |
if (!isString(f)) { | |
var objects = []; | |
for (var i = 0; i < arguments.length; i++) { | |
objects.push(inspect(arguments[i])); | |
} | |
return objects.join(' '); | |
} | |
var i = 1; | |
var args = arguments; | |
var len = args.length; | |
var str = String(f).replace(formatRegExp, function(x) { | |
if (x === '%%') return '%'; | |
if (i >= len) return x; | |
switch (x) { | |
case '%s': return String(args[i++]); | |
case '%d': return Number(args[i++]); | |
case '%j': | |
try { | |
return JSON.stringify(args[i++]); | |
} catch (_) { | |
return '[Circular]'; | |
} | |
default: | |
return x; | |
} | |
}); | |
for (var x = args[i]; i < len; x = args[++i]) { | |
if (isNull(x) || !isObject(x)) { | |
str += ' ' + x; | |
} else { | |
str += ' ' + inspect(x); | |
} | |
} | |
return str; | |
}; | |
// Mark that a method should not be used. | |
// Returns a modified function which warns once by default. | |
// If --no-deprecation is set, then it is a no-op. | |
exports.deprecate = function(fn, msg) { | |
// Allow for deprecating things in the process of starting up. | |
if (isUndefined(global.process)) { | |
return function() { | |
return exports.deprecate(fn, msg).apply(this, arguments); | |
}; | |
} | |
if (process.noDeprecation === true) { | |
return fn; | |
} | |
var warned = false; | |
function deprecated() { | |
if (!warned) { | |
if (process.throwDeprecation) { | |
throw new Error(msg); | |
} else if (process.traceDeprecation) { | |
console.trace(msg); | |
} else { | |
console.error(msg); | |
} | |
warned = true; | |
} | |
return fn.apply(this, arguments); | |
} | |
return deprecated; | |
}; | |
var debugs = {}; | |
var debugEnviron; | |
exports.debuglog = function(set) { | |
if (isUndefined(debugEnviron)) | |
debugEnviron = process.env.NODE_DEBUG || ''; | |
set = set.toUpperCase(); | |
if (!debugs[set]) { | |
if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) { | |
var pid = process.pid; | |
debugs[set] = function() { | |
var msg = exports.format.apply(exports, arguments); | |
console.error('%s %d: %s', set, pid, msg); | |
}; | |
} else { | |
debugs[set] = function() {}; | |
} | |
} | |
return debugs[set]; | |
}; | |
/** | |
* Echos the value of a value. Trys to print the value out | |
* in the best way possible given the different types. | |
* | |
* @param {Object} obj The object to print out. | |
* @param {Object} opts Optional options object that alters the output. | |
*/ | |
/* legacy: obj, showHidden, depth, colors*/ | |
function inspect(obj, opts) { | |
// default options | |
var ctx = { | |
seen: [], | |
stylize: stylizeNoColor | |
}; | |
// legacy... | |
if (arguments.length >= 3) ctx.depth = arguments[2]; | |
if (arguments.length >= 4) ctx.colors = arguments[3]; | |
if (isBoolean(opts)) { | |
// legacy... | |
ctx.showHidden = opts; | |
} else if (opts) { | |
// got an "options" object | |
exports._extend(ctx, opts); | |
} | |
// set default options | |
if (isUndefined(ctx.showHidden)) ctx.showHidden = false; | |
if (isUndefined(ctx.depth)) ctx.depth = 2; | |
if (isUndefined(ctx.colors)) ctx.colors = false; | |
if (isUndefined(ctx.customInspect)) ctx.customInspect = true; | |
if (ctx.colors) ctx.stylize = stylizeWithColor; | |
return formatValue(ctx, obj, ctx.depth); | |
} | |
exports.inspect = inspect; | |
// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics | |
inspect.colors = { | |
'bold' : [1, 22], | |
'italic' : [3, 23], | |
'underline' : [4, 24], | |
'inverse' : [7, 27], | |
'white' : [37, 39], | |
'grey' : [90, 39], | |
'black' : [30, 39], | |
'blue' : [34, 39], | |
'cyan' : [36, 39], | |
'green' : [32, 39], | |
'magenta' : [35, 39], | |
'red' : [31, 39], | |
'yellow' : [33, 39] | |
}; | |
// Don't use 'blue' not visible on cmd.exe | |
inspect.styles = { | |
'special': 'cyan', | |
'number': 'yellow', | |
'boolean': 'yellow', | |
'undefined': 'grey', | |
'null': 'bold', | |
'string': 'green', | |
'date': 'magenta', | |
// "name": intentionally not styling | |
'regexp': 'red' | |
}; | |
function stylizeWithColor(str, styleType) { | |
var style = inspect.styles[styleType]; | |
if (style) { | |
return '\u001b[' + inspect.colors[style][0] + 'm' + str + | |
'\u001b[' + inspect.colors[style][1] + 'm'; | |
} else { | |
return str; | |
} | |
} | |
function stylizeNoColor(str, styleType) { | |
return str; | |
} | |
function arrayToHash(array) { | |
var hash = {}; | |
array.forEach(function(val, idx) { | |
hash[val] = true; | |
}); | |
return hash; | |
} | |
function formatValue(ctx, value, recurseTimes) { | |
// Provide a hook for user-specified inspect functions. | |
// Check that value is an object with an inspect function on it | |
if (ctx.customInspect && | |
value && | |
isFunction(value.inspect) && | |
// Filter out the util module, it's inspect function is special | |
value.inspect !== exports.inspect && | |
// Also filter out any prototype objects using the circular check. | |
!(value.constructor && value.constructor.prototype === value)) { | |
var ret = value.inspect(recurseTimes, ctx); | |
if (!isString(ret)) { | |
ret = formatValue(ctx, ret, recurseTimes); | |
} | |
return ret; | |
} | |
// Primitive types cannot have properties | |
var primitive = formatPrimitive(ctx, value); | |
if (primitive) { | |
return primitive; | |
} | |
// Look up the keys of the object. | |
var keys = Object.keys(value); | |
var visibleKeys = arrayToHash(keys); | |
if (ctx.showHidden) { | |
keys = Object.getOwnPropertyNames(value); | |
} | |
// IE doesn't make error fields non-enumerable | |
// http://msdn.microsoft.com/en-us/library/ie/dww52sbt(v=vs.94).aspx | |
if (isError(value) | |
&& (keys.indexOf('message') >= 0 || keys.indexOf('description') >= 0)) { | |
return formatError(value); | |
} | |
// Some type of object without properties can be shortcutted. | |
if (keys.length === 0) { | |
if (isFunction(value)) { | |
var name = value.name ? ': ' + value.name : ''; | |
return ctx.stylize('[Function' + name + ']', 'special'); | |
} | |
if (isRegExp(value)) { | |
return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); | |
} | |
if (isDate(value)) { | |
return ctx.stylize(Date.prototype.toString.call(value), 'date'); | |
} | |
if (isError(value)) { | |
return formatError(value); | |
} | |
} | |
var base = '', array = false, braces = ['{', '}']; | |
// Make Array say that they are Array | |
if (isArray(value)) { | |
array = true; | |
braces = ['[', ']']; | |
} | |
// Make functions say that they are functions | |
if (isFunction(value)) { | |
var n = value.name ? ': ' + value.name : ''; | |
base = ' [Function' + n + ']'; | |
} | |
// Make RegExps say that they are RegExps | |
if (isRegExp(value)) { | |
base = ' ' + RegExp.prototype.toString.call(value); | |
} | |
// Make dates with properties first say the date | |
if (isDate(value)) { | |
base = ' ' + Date.prototype.toUTCString.call(value); | |
} | |
// Make error with message first say the error | |
if (isError(value)) { | |
base = ' ' + formatError(value); | |
} | |
if (keys.length === 0 && (!array || value.length == 0)) { | |
return braces[0] + base + braces[1]; | |
} | |
if (recurseTimes < 0) { | |
if (isRegExp(value)) { | |
return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); | |
} else { | |
return ctx.stylize('[Object]', 'special'); | |
} | |
} | |
ctx.seen.push(value); | |
var output; | |
if (array) { | |
output = formatArray(ctx, value, recurseTimes, visibleKeys, keys); | |
} else { | |
output = keys.map(function(key) { | |
return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array); | |
}); | |
} | |
ctx.seen.pop(); | |
return reduceToSingleString(output, base, braces); | |
} | |
function formatPrimitive(ctx, value) { | |
if (isUndefined(value)) | |
return ctx.stylize('undefined', 'undefined'); | |
if (isString(value)) { | |
var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '') | |
.replace(/'/g, "\\'") | |
.replace(/\\"/g, '"') + '\''; | |
return ctx.stylize(simple, 'string'); | |
} | |
if (isNumber(value)) | |
return ctx.stylize('' + value, 'number'); | |
if (isBoolean(value)) | |
return ctx.stylize('' + value, 'boolean'); | |
// For some reason typeof null is "object", so special case here. | |
if (isNull(value)) | |
return ctx.stylize('null', 'null'); | |
} | |
function formatError(value) { | |
return '[' + Error.prototype.toString.call(value) + ']'; | |
} | |
function formatArray(ctx, value, recurseTimes, visibleKeys, keys) { | |
var output = []; | |
for (var i = 0, l = value.length; i < l; ++i) { | |
if (hasOwnProperty(value, String(i))) { | |
output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, | |
String(i), true)); | |
} else { | |
output.push(''); | |
} | |
} | |
keys.forEach(function(key) { | |
if (!key.match(/^\d+$/)) { | |
output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, | |
key, true)); | |
} | |
}); | |
return output; | |
} | |
function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) { | |
var name, str, desc; | |
desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] }; | |
if (desc.get) { | |
if (desc.set) { | |
str = ctx.stylize('[Getter/Setter]', 'special'); | |
} else { | |
str = ctx.stylize('[Getter]', 'special'); | |
} | |
} else { | |
if (desc.set) { | |
str = ctx.stylize('[Setter]', 'special'); | |
} | |
} | |
if (!hasOwnProperty(visibleKeys, key)) { | |
name = '[' + key + ']'; | |
} | |
if (!str) { | |
if (ctx.seen.indexOf(desc.value) < 0) { | |
if (isNull(recurseTimes)) { | |
str = formatValue(ctx, desc.value, null); | |
} else { | |
str = formatValue(ctx, desc.value, recurseTimes - 1); | |
} | |
if (str.indexOf('\n') > -1) { | |
if (array) { | |
str = str.split('\n').map(function(line) { | |
return ' ' + line; | |
}).join('\n').substr(2); | |
} else { | |
str = '\n' + str.split('\n').map(function(line) { | |
return ' ' + line; | |
}).join('\n'); | |
} | |
} | |
} else { | |
str = ctx.stylize('[Circular]', 'special'); | |
} | |
} | |
if (isUndefined(name)) { | |
if (array && key.match(/^\d+$/)) { | |
return str; | |
} | |
name = JSON.stringify('' + key); | |
if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) { | |
name = name.substr(1, name.length - 2); | |
name = ctx.stylize(name, 'name'); | |
} else { | |
name = name.replace(/'/g, "\\'") | |
.replace(/\\"/g, '"') | |
.replace(/(^"|"$)/g, "'"); | |
name = ctx.stylize(name, 'string'); | |
} | |
} | |
return name + ': ' + str; | |
} | |
function reduceToSingleString(output, base, braces) { | |
var numLinesEst = 0; | |
var length = output.reduce(function(prev, cur) { | |
numLinesEst++; | |
if (cur.indexOf('\n') >= 0) numLinesEst++; | |
return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1; | |
}, 0); | |
if (length > 60) { | |
return braces[0] + | |
(base === '' ? '' : base + '\n ') + | |
' ' + | |
output.join(',\n ') + | |
' ' + | |
braces[1]; | |
} | |
return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1]; | |
} | |
// NOTE: These type checking functions intentionally don't use `instanceof` | |
// because it is fragile and can be easily faked with `Object.create()`. | |
function isArray(ar) { | |
return Array.isArray(ar); | |
} | |
exports.isArray = isArray; | |
function isBoolean(arg) { | |
return typeof arg === 'boolean'; | |
} | |
exports.isBoolean = isBoolean; | |
function isNull(arg) { | |
return arg === null; | |
} | |
exports.isNull = isNull; | |
function isNullOrUndefined(arg) { | |
return arg == null; | |
} | |
exports.isNullOrUndefined = isNullOrUndefined; | |
function isNumber(arg) { | |
return typeof arg === 'number'; | |
} | |
exports.isNumber = isNumber; | |
function isString(arg) { | |
return typeof arg === 'string'; | |
} | |
exports.isString = isString; | |
function isSymbol(arg) { | |
return typeof arg === 'symbol'; | |
} | |
exports.isSymbol = isSymbol; | |
function isUndefined(arg) { | |
return arg === void 0; | |
} | |
exports.isUndefined = isUndefined; | |
function isRegExp(re) { | |
return isObject(re) && objectToString(re) === '[object RegExp]'; | |
} | |
exports.isRegExp = isRegExp; | |
function isObject(arg) { | |
return typeof arg === 'object' && arg !== null; | |
} | |
exports.isObject = isObject; | |
function isDate(d) { | |
return isObject(d) && objectToString(d) === '[object Date]'; | |
} | |
exports.isDate = isDate; | |
function isError(e) { | |
return isObject(e) && | |
(objectToString(e) === '[object Error]' || e instanceof Error); | |
} | |
exports.isError = isError; | |
function isFunction(arg) { | |
return typeof arg === 'function'; | |
} | |
exports.isFunction = isFunction; | |
function isPrimitive(arg) { | |
return arg === null || | |
typeof arg === 'boolean' || | |
typeof arg === 'number' || | |
typeof arg === 'string' || | |
typeof arg === 'symbol' || // ES6 symbol | |
typeof arg === 'undefined'; | |
} | |
exports.isPrimitive = isPrimitive; | |
exports.isBuffer = require('./support/isBuffer'); | |
function objectToString(o) { | |
return Object.prototype.toString.call(o); | |
} | |
function pad(n) { | |
return n < 10 ? '0' + n.toString(10) : n.toString(10); | |
} | |
var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', | |
'Oct', 'Nov', 'Dec']; | |
// 26 Feb 16:19:34 | |
function timestamp() { | |
var d = new Date(); | |
var time = [pad(d.getHours()), | |
pad(d.getMinutes()), | |
pad(d.getSeconds())].join(':'); | |
return [d.getDate(), months[d.getMonth()], time].join(' '); | |
} | |
// log is just a thin wrapper to console.log that prepends a timestamp | |
exports.log = function() { | |
console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments)); | |
}; | |
/** | |
* Inherit the prototype methods from one constructor into another. | |
* | |
* The Function.prototype.inherits from lang.js rewritten as a standalone | |
* function (not on Function.prototype). NOTE: If this file is to be loaded | |
* during bootstrapping this function needs to be rewritten using some native | |
* functions as prototype setup using normal JavaScript does not work as | |
* expected during bootstrapping (see mirror.js in r114903). | |
* | |
* @param {function} ctor Constructor function which needs to inherit the | |
* prototype. | |
* @param {function} superCtor Constructor function to inherit prototype from. | |
*/ | |
exports.inherits = require('inherits'); | |
exports._extend = function(origin, add) { | |
// Don't do anything if add isn't an object | |
if (!add || !isObject(add)) return origin; | |
var keys = Object.keys(add); | |
var i = keys.length; | |
while (i--) { | |
origin[keys[i]] = add[keys[i]]; | |
} | |
return origin; | |
}; | |
function hasOwnProperty(obj, prop) { | |
return Object.prototype.hasOwnProperty.call(obj, prop); | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"./support/isBuffer":31,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"inherits":33}],33:[function(require,module,exports){ | |
if (typeof Object.create === 'function') { | |
// implementation from standard node.js 'util' module | |
module.exports = function inherits(ctor, superCtor) { | |
ctor.super_ = superCtor | |
ctor.prototype = Object.create(superCtor.prototype, { | |
constructor: { | |
value: ctor, | |
enumerable: false, | |
writable: true, | |
configurable: true | |
} | |
}); | |
}; | |
} else { | |
// old school shim for old browsers | |
module.exports = function inherits(ctor, superCtor) { | |
ctor.super_ = superCtor | |
var TempCtor = function () {} | |
TempCtor.prototype = superCtor.prototype | |
ctor.prototype = new TempCtor() | |
ctor.prototype.constructor = ctor | |
} | |
} | |
},{}],34:[function(require,module,exports){ | |
function addOperation (type, key, value, options) { | |
var operation = { | |
type: type, | |
key: key, | |
value: value, | |
options: options | |
} | |
if (options && options.prefix) { | |
operation.prefix = options.prefix | |
delete options.prefix | |
} | |
this._operations.push(operation) | |
return this | |
} | |
function Batch(sdb) { | |
this._operations = [] | |
this._sdb = sdb | |
this.put = addOperation.bind(this, 'put') | |
this.del = addOperation.bind(this, 'del') | |
} | |
var B = Batch.prototype | |
B.clear = function () { | |
this._operations = [] | |
} | |
B.write = function (cb) { | |
this._sdb.batch(this._operations, cb) | |
} | |
module.exports = Batch | |
},{}],35:[function(require,module,exports){ | |
(function (process){ | |
var EventEmitter = require('events').EventEmitter | |
var next = process.nextTick | |
var SubDb = require('./sub') | |
var Batch = require('./batch') | |
var fixRange = require('level-fix-range') | |
var Hooks = require('level-hooks') | |
module.exports = function (_db, options) { | |
function DB () {} | |
DB.prototype = _db | |
var db = new DB() | |
if (db.sublevel) return db | |
options = options || {} | |
//use \xff (255) as the seperator, | |
//so that sections of the database will sort after the regular keys | |
var sep = options.sep = options.sep || '\xff' | |
db._options = options | |
Hooks(db) | |
db.sublevels = {} | |
db.sublevel = function (prefix, options) { | |
if(db.sublevels[prefix]) | |
return db.sublevels[prefix] | |
return new SubDb(db, prefix, options || this._options) | |
} | |
db.methods = {} | |
db.prefix = function (key) { | |
return '' + (key || '') | |
} | |
db.pre = function (range, hook) { | |
if(!hook) | |
hook = range, range = { | |
max : sep | |
} | |
return db.hooks.pre(range, hook) | |
} | |
db.post = function (range, hook) { | |
if(!hook) | |
hook = range, range = { | |
max : sep | |
} | |
return db.hooks.post(range, hook) | |
} | |
function safeRange(fun) { | |
return function (opts) { | |
opts = opts || {} | |
opts = fixRange(opts) | |
if(opts.reverse) opts.start = opts.start || sep | |
else opts.end = opts.end || sep | |
return fun.call(db, opts) | |
} | |
} | |
db.readStream = | |
db.createReadStream = safeRange(db.createReadStream) | |
db.keyStream = | |
db.createKeyStream = safeRange(db.createKeyStream) | |
db.valuesStream = | |
db.createValueStream = safeRange(db.createValueStream) | |
var batch = db.batch | |
db.batch = function (changes, opts, cb) { | |
if(!Array.isArray(changes)) | |
return new Batch(db) | |
changes.forEach(function (e) { | |
if(e.prefix) { | |
if('function' === typeof e.prefix.prefix) | |
e.key = e.prefix.prefix(e.key) | |
else if('string' === typeof e.prefix) | |
e.key = e.prefix + e.key | |
} | |
}) | |
batch.call(db, changes, opts, cb) | |
} | |
return db | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"./batch":34,"./sub":46,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"events":18,"level-fix-range":36,"level-hooks":38}],36:[function(require,module,exports){ | |
var clone = require('clone') | |
module.exports = | |
function fixRange(opts) { | |
opts = clone(opts) | |
var reverse = opts.reverse | |
var end = opts.max || opts.end | |
var start = opts.min || opts.start | |
var range = [start, end] | |
if(start != null && end != null) | |
range.sort() | |
if(reverse) | |
range = range.reverse() | |
opts.start = range[0] | |
opts.end = range[1] | |
delete opts.min | |
delete opts.max | |
return opts | |
} | |
},{"clone":37}],37:[function(require,module,exports){ | |
(function (Buffer){ | |
'use strict'; | |
function objectToString(o) { | |
return Object.prototype.toString.call(o); | |
} | |
// shim for Node's 'util' package | |
// DO NOT REMOVE THIS! It is required for compatibility with EnderJS (http://enderjs.com/). | |
var util = { | |
isArray: function (ar) { | |
return Array.isArray(ar) || (typeof ar === 'object' && objectToString(ar) === '[object Array]'); | |
}, | |
isDate: function (d) { | |
return typeof d === 'object' && objectToString(d) === '[object Date]'; | |
}, | |
isRegExp: function (re) { | |
return typeof re === 'object' && objectToString(re) === '[object RegExp]'; | |
}, | |
getRegExpFlags: function (re) { | |
var flags = ''; | |
re.global && (flags += 'g'); | |
re.ignoreCase && (flags += 'i'); | |
re.multiline && (flags += 'm'); | |
return flags; | |
} | |
}; | |
if (typeof module === 'object') | |
module.exports = clone; | |
/** | |
* Clones (copies) an Object using deep copying. | |
* | |
* This function supports circular references by default, but if you are certain | |
* there are no circular references in your object, you can save some CPU time | |
* by calling clone(obj, false). | |
* | |
* Caution: if `circular` is false and `parent` contains circular references, | |
* your program may enter an infinite loop and crash. | |
* | |
* @param `parent` - the object to be cloned | |
* @param `circular` - set to true if the object to be cloned may contain | |
* circular references. (optional - true by default) | |
* @param `depth` - set to a number if the object is only to be cloned to | |
* a particular depth. (optional - defaults to Infinity) | |
* @param `prototype` - sets the prototype to be used when cloning an object. | |
* (optional - defaults to parent prototype). | |
*/ | |
function clone(parent, circular, depth, prototype) { | |
// maintain two arrays for circular references, where corresponding parents | |
// and children have the same index | |
var allParents = []; | |
var allChildren = []; | |
var useBuffer = typeof Buffer != 'undefined'; | |
if (typeof circular == 'undefined') | |
circular = true; | |
if (typeof depth == 'undefined') | |
depth = Infinity; | |
// recurse this function so we don't reset allParents and allChildren | |
function _clone(parent, depth) { | |
// cloning null always returns null | |
if (parent === null) | |
return null; | |
if (depth == 0) | |
return parent; | |
var child; | |
if (typeof parent != 'object') { | |
return parent; | |
} | |
if (util.isArray(parent)) { | |
child = []; | |
} else if (util.isRegExp(parent)) { | |
child = new RegExp(parent.source, util.getRegExpFlags(parent)); | |
if (parent.lastIndex) child.lastIndex = parent.lastIndex; | |
} else if (util.isDate(parent)) { | |
child = new Date(parent.getTime()); | |
} else if (useBuffer && Buffer.isBuffer(parent)) { | |
child = new Buffer(parent.length); | |
parent.copy(child); | |
return child; | |
} else { | |
if (typeof prototype == 'undefined') child = Object.create(Object.getPrototypeOf(parent)); | |
else child = Object.create(prototype); | |
} | |
if (circular) { | |
var index = allParents.indexOf(parent); | |
if (index != -1) { | |
return allChildren[index]; | |
} | |
allParents.push(parent); | |
allChildren.push(child); | |
} | |
for (var i in parent) { | |
child[i] = _clone(parent[i], depth - 1); | |
} | |
return child; | |
} | |
return _clone(parent, depth); | |
} | |
/** | |
* Simple flat clone using prototype, accepts only objects, usefull for property | |
* override on FLAT configuration object (no nested props). | |
* | |
* USE WITH CAUTION! This may not behave as you wish if you do not know how this | |
* works. | |
*/ | |
clone.clonePrototype = function(parent) { | |
if (parent === null) | |
return null; | |
var c = function () {}; | |
c.prototype = parent; | |
return new c(); | |
}; | |
}).call(this,require("buffer").Buffer) | |
},{"buffer":20}],38:[function(require,module,exports){ | |
var ranges = require('string-range') | |
module.exports = function (db) { | |
if(db.hooks) { | |
return | |
} | |
var posthooks = [] | |
var prehooks = [] | |
function getPrefix (p) { | |
return p && ( | |
'string' === typeof p ? p | |
: 'string' === typeof p.prefix ? p.prefix | |
: 'function' === typeof p.prefix ? p.prefix() | |
: '' | |
) | |
} | |
function getKeyEncoding (db) { | |
if(db && db._getKeyEncoding) | |
return db._getKeyEncoding(db) | |
} | |
function getValueEncoding (db) { | |
if(db && db._getValueEncoding) | |
return db._getValueEncoding(db) | |
} | |
function remover (array, item) { | |
return function () { | |
var i = array.indexOf(item) | |
if(!~i) return false | |
array.splice(i, 1) | |
return true | |
} | |
} | |
db.hooks = { | |
post: function (prefix, hook) { | |
if(!hook) hook = prefix, prefix = '' | |
var h = {test: ranges.checker(prefix), hook: hook} | |
posthooks.push(h) | |
return remover(posthooks, h) | |
}, | |
pre: function (prefix, hook) { | |
if(!hook) hook = prefix, prefix = '' | |
var h = { | |
test: ranges.checker(prefix), | |
hook: hook, | |
safe: false !== prefix.safe | |
} | |
prehooks.push(h) | |
return remover(prehooks, h) | |
}, | |
posthooks: posthooks, | |
prehooks: prehooks | |
} | |
//POST HOOKS | |
function each (e) { | |
if(e && e.type) { | |
posthooks.forEach(function (h) { | |
if(h.test(e.key)) h.hook(e) | |
}) | |
} | |
} | |
db.on('put', function (key, val) { | |
each({type: 'put', key: key, value: val}) | |
}) | |
db.on('del', function (key, val) { | |
each({type: 'del', key: key, value: val}) | |
}) | |
db.on('batch', function onBatch (ary) { | |
ary.forEach(each) | |
}) | |
//PRE HOOKS | |
var put = db.put | |
var del = db.del | |
var batch = db.batch | |
function callHooks (isBatch, b, opts, cb) { | |
try { | |
b.forEach(function hook(e, i) { | |
prehooks.forEach(function (h) { | |
if(h.test(String(e.key))) { | |
//optimize this? | |
//maybe faster to not create a new object each time? | |
//have one object and expose scope to it? | |
var context = { | |
add: function (ch, db) { | |
if(typeof ch === 'undefined') { | |
return this | |
} | |
if(ch === false) | |
return delete b[i] | |
var prefix = ( | |
getPrefix(ch.prefix) || | |
getPrefix(db) || | |
h.prefix || '' | |
) | |
//don't leave a circular json object there incase using multilevel. | |
if(prefix) ch.prefix = prefix | |
ch.key = prefix + ch.key | |
if(h.safe && h.test(String(ch.key))) { | |
//this usually means a stack overflow. | |
throw new Error('prehook cannot insert into own range') | |
} | |
var ke = ch.keyEncoding || getKeyEncoding(ch.prefix) | |
var ve = ch.valueEncoding || getValueEncoding(ch.prefix) | |
if(ke) ch.keyEncoding = ke | |
if(ve) ch.valueEncoding = ve | |
b.push(ch) | |
hook(ch, b.length - 1) | |
return this | |
}, | |
put: function (ch, db) { | |
if('object' === typeof ch) ch.type = 'put' | |
return this.add(ch, db) | |
}, | |
del: function (ch, db) { | |
if('object' === typeof ch) ch.type = 'del' | |
return this.add(ch, db) | |
}, | |
veto: function () { | |
return this.add(false) | |
} | |
} | |
h.hook.call(context, e, context.add, b) | |
} | |
}) | |
}) | |
} catch (err) { | |
return (cb || opts)(err) | |
} | |
b = b.filter(function (e) { | |
return e && e.type //filter out empty items | |
}) | |
if(b.length == 1 && !isBatch) { | |
var change = b[0] | |
return change.type == 'put' | |
? put.call(db, change.key, change.value, opts, cb) | |
: del.call(db, change.key, opts, cb) | |
} | |
return batch.call(db, b, opts, cb) | |
} | |
db.put = function (key, value, opts, cb ) { | |
var batch = [{key: key, value: value, type: 'put'}] | |
return callHooks(false, batch, opts, cb) | |
} | |
db.del = function (key, opts, cb) { | |
var batch = [{key: key, type: 'del'}] | |
return callHooks(false, batch, opts, cb) | |
} | |
db.batch = function (batch, opts, cb) { | |
return callHooks(true, batch, opts, cb) | |
} | |
} | |
},{"string-range":39}],39:[function(require,module,exports){ | |
//force to a valid range | |
var range = exports.range = function (obj) { | |
return null == obj ? {} : 'string' === typeof range ? { | |
min: range, max: range + '\xff' | |
} : obj | |
} | |
//turn into a sub range. | |
var prefix = exports.prefix = function (range, within, term) { | |
range = exports.range(range) | |
var _range = {} | |
term = term || '\xff' | |
if(range instanceof RegExp || 'function' == typeof range) { | |
_range.min = within | |
_range.max = within + term, | |
_range.inner = function (k) { | |
var j = k.substring(within.length) | |
if(range.test) | |
return range.test(j) | |
return range(j) | |
} | |
} | |
else if('object' === typeof range) { | |
_range.min = within + (range.min || range.start || '') | |
_range.max = within + (range.max || range.end || (term || '~')) | |
_range.reverse = !!range.reverse | |
} | |
return _range | |
} | |
//return a function that checks a range | |
var checker = exports.checker = function (range) { | |
if(!range) range = {} | |
if ('string' === typeof range) | |
return function (key) { | |
return key.indexOf(range) == 0 | |
} | |
else if(range instanceof RegExp) | |
return function (key) { | |
return range.test(key) | |
} | |
else if('object' === typeof range) | |
return function (key) { | |
var min = range.min || range.start | |
var max = range.max || range.end | |
// fixes keys passed as ints from sublevels | |
key = String(key) | |
return ( | |
!min || key >= min | |
) && ( | |
!max || key <= max | |
) && ( | |
!range.inner || ( | |
range.inner.test | |
? range.inner.test(key) | |
: range.inner(key) | |
) | |
) | |
} | |
else if('function' === typeof range) | |
return range | |
} | |
//check if a key is within a range. | |
var satifies = exports.satisfies = function (key, range) { | |
return checker(range)(key) | |
} | |
},{}],40:[function(require,module,exports){ | |
module.exports = hasKeys | |
function hasKeys(source) { | |
return source !== null && | |
(typeof source === "object" || | |
typeof source === "function") | |
} | |
},{}],41:[function(require,module,exports){ | |
var Keys = require("object-keys") | |
var hasKeys = require("./has-keys") | |
module.exports = extend | |
function extend() { | |
var target = {} | |
for (var i = 0; i < arguments.length; i++) { | |
var source = arguments[i] | |
if (!hasKeys(source)) { | |
continue | |
} | |
var keys = Keys(source) | |
for (var j = 0; j < keys.length; j++) { | |
var name = keys[j] | |
target[name] = source[name] | |
} | |
} | |
return target | |
} | |
},{"./has-keys":40,"object-keys":42}],42:[function(require,module,exports){ | |
module.exports = Object.keys || require('./shim'); | |
},{"./shim":45}],43:[function(require,module,exports){ | |
var hasOwn = Object.prototype.hasOwnProperty; | |
var toString = Object.prototype.toString; | |
module.exports = function forEach (obj, fn, ctx) { | |
if (toString.call(fn) !== '[object Function]') { | |
throw new TypeError('iterator must be a function'); | |
} | |
var l = obj.length; | |
if (l === +l) { | |
for (var i = 0; i < l; i++) { | |
fn.call(ctx, obj[i], i, obj); | |
} | |
} else { | |
for (var k in obj) { | |
if (hasOwn.call(obj, k)) { | |
fn.call(ctx, obj[k], k, obj); | |
} | |
} | |
} | |
}; | |
},{}],44:[function(require,module,exports){ | |
/**! | |
* is | |
* the definitive JavaScript type testing library | |
* | |
* @copyright 2013 Enrico Marino | |
* @license MIT | |
*/ | |
var objProto = Object.prototype; | |
var owns = objProto.hasOwnProperty; | |
var toString = objProto.toString; | |
var isActualNaN = function (value) { | |
return value !== value; | |
}; | |
var NON_HOST_TYPES = { | |
"boolean": 1, | |
"number": 1, | |
"string": 1, | |
"undefined": 1 | |
}; | |
/** | |
* Expose `is` | |
*/ | |
var is = module.exports = {}; | |
/** | |
* Test general. | |
*/ | |
/** | |
* is.type | |
* Test if `value` is a type of `type`. | |
* | |
* @param {Mixed} value value to test | |
* @param {String} type type | |
* @return {Boolean} true if `value` is a type of `type`, false otherwise | |
* @api public | |
*/ | |
is.a = | |
is.type = function (value, type) { | |
return typeof value === type; | |
}; | |
/** | |
* is.defined | |
* Test if `value` is defined. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if 'value' is defined, false otherwise | |
* @api public | |
*/ | |
is.defined = function (value) { | |
return value !== undefined; | |
}; | |
/** | |
* is.empty | |
* Test if `value` is empty. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is empty, false otherwise | |
* @api public | |
*/ | |
is.empty = function (value) { | |
var type = toString.call(value); | |
var key; | |
if ('[object Array]' === type || '[object Arguments]' === type) { | |
return value.length === 0; | |
} | |
if ('[object Object]' === type) { | |
for (key in value) if (owns.call(value, key)) return false; | |
return true; | |
} | |
if ('[object String]' === type) { | |
return '' === value; | |
} | |
return false; | |
}; | |
/** | |
* is.equal | |
* Test if `value` is equal to `other`. | |
* | |
* @param {Mixed} value value to test | |
* @param {Mixed} other value to compare with | |
* @return {Boolean} true if `value` is equal to `other`, false otherwise | |
*/ | |
is.equal = function (value, other) { | |
var type = toString.call(value) | |
var key; | |
if (type !== toString.call(other)) { | |
return false; | |
} | |
if ('[object Object]' === type) { | |
for (key in value) { | |
if (!is.equal(value[key], other[key])) { | |
return false; | |
} | |
} | |
return true; | |
} | |
if ('[object Array]' === type) { | |
key = value.length; | |
if (key !== other.length) { | |
return false; | |
} | |
while (--key) { | |
if (!is.equal(value[key], other[key])) { | |
return false; | |
} | |
} | |
return true; | |
} | |
if ('[object Function]' === type) { | |
return value.prototype === other.prototype; | |
} | |
if ('[object Date]' === type) { | |
return value.getTime() === other.getTime(); | |
} | |
return value === other; | |
}; | |
/** | |
* is.hosted | |
* Test if `value` is hosted by `host`. | |
* | |
* @param {Mixed} value to test | |
* @param {Mixed} host host to test with | |
* @return {Boolean} true if `value` is hosted by `host`, false otherwise | |
* @api public | |
*/ | |
is.hosted = function (value, host) { | |
var type = typeof host[value]; | |
return type === 'object' ? !!host[value] : !NON_HOST_TYPES[type]; | |
}; | |
/** | |
* is.instance | |
* Test if `value` is an instance of `constructor`. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an instance of `constructor` | |
* @api public | |
*/ | |
is.instance = is['instanceof'] = function (value, constructor) { | |
return value instanceof constructor; | |
}; | |
/** | |
* is.null | |
* Test if `value` is null. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is null, false otherwise | |
* @api public | |
*/ | |
is['null'] = function (value) { | |
return value === null; | |
}; | |
/** | |
* is.undefined | |
* Test if `value` is undefined. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is undefined, false otherwise | |
* @api public | |
*/ | |
is.undefined = function (value) { | |
return value === undefined; | |
}; | |
/** | |
* Test arguments. | |
*/ | |
/** | |
* is.arguments | |
* Test if `value` is an arguments object. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an arguments object, false otherwise | |
* @api public | |
*/ | |
is.arguments = function (value) { | |
var isStandardArguments = '[object Arguments]' === toString.call(value); | |
var isOldArguments = !is.array(value) && is.arraylike(value) && is.object(value) && is.fn(value.callee); | |
return isStandardArguments || isOldArguments; | |
}; | |
/** | |
* Test array. | |
*/ | |
/** | |
* is.array | |
* Test if 'value' is an array. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an array, false otherwise | |
* @api public | |
*/ | |
is.array = function (value) { | |
return '[object Array]' === toString.call(value); | |
}; | |
/** | |
* is.arguments.empty | |
* Test if `value` is an empty arguments object. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an empty arguments object, false otherwise | |
* @api public | |
*/ | |
is.arguments.empty = function (value) { | |
return is.arguments(value) && value.length === 0; | |
}; | |
/** | |
* is.array.empty | |
* Test if `value` is an empty array. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an empty array, false otherwise | |
* @api public | |
*/ | |
is.array.empty = function (value) { | |
return is.array(value) && value.length === 0; | |
}; | |
/** | |
* is.arraylike | |
* Test if `value` is an arraylike object. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an arguments object, false otherwise | |
* @api public | |
*/ | |
is.arraylike = function (value) { | |
return !!value && !is.boolean(value) | |
&& owns.call(value, 'length') | |
&& isFinite(value.length) | |
&& is.number(value.length) | |
&& value.length >= 0; | |
}; | |
/** | |
* Test boolean. | |
*/ | |
/** | |
* is.boolean | |
* Test if `value` is a boolean. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a boolean, false otherwise | |
* @api public | |
*/ | |
is.boolean = function (value) { | |
return '[object Boolean]' === toString.call(value); | |
}; | |
/** | |
* is.false | |
* Test if `value` is false. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is false, false otherwise | |
* @api public | |
*/ | |
is['false'] = function (value) { | |
return is.boolean(value) && (value === false || value.valueOf() === false); | |
}; | |
/** | |
* is.true | |
* Test if `value` is true. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is true, false otherwise | |
* @api public | |
*/ | |
is['true'] = function (value) { | |
return is.boolean(value) && (value === true || value.valueOf() === true); | |
}; | |
/** | |
* Test date. | |
*/ | |
/** | |
* is.date | |
* Test if `value` is a date. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a date, false otherwise | |
* @api public | |
*/ | |
is.date = function (value) { | |
return '[object Date]' === toString.call(value); | |
}; | |
/** | |
* Test element. | |
*/ | |
/** | |
* is.element | |
* Test if `value` is an html element. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an HTML Element, false otherwise | |
* @api public | |
*/ | |
is.element = function (value) { | |
return value !== undefined | |
&& typeof HTMLElement !== 'undefined' | |
&& value instanceof HTMLElement | |
&& value.nodeType === 1; | |
}; | |
/** | |
* Test error. | |
*/ | |
/** | |
* is.error | |
* Test if `value` is an error object. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an error object, false otherwise | |
* @api public | |
*/ | |
is.error = function (value) { | |
return '[object Error]' === toString.call(value); | |
}; | |
/** | |
* Test function. | |
*/ | |
/** | |
* is.fn / is.function (deprecated) | |
* Test if `value` is a function. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a function, false otherwise | |
* @api public | |
*/ | |
is.fn = is['function'] = function (value) { | |
var isAlert = typeof window !== 'undefined' && value === window.alert; | |
return isAlert || '[object Function]' === toString.call(value); | |
}; | |
/** | |
* Test number. | |
*/ | |
/** | |
* is.number | |
* Test if `value` is a number. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a number, false otherwise | |
* @api public | |
*/ | |
is.number = function (value) { | |
return '[object Number]' === toString.call(value); | |
}; | |
/** | |
* is.infinite | |
* Test if `value` is positive or negative infinity. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is positive or negative Infinity, false otherwise | |
* @api public | |
*/ | |
is.infinite = function (value) { | |
return value === Infinity || value === -Infinity; | |
}; | |
/** | |
* is.decimal | |
* Test if `value` is a decimal number. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a decimal number, false otherwise | |
* @api public | |
*/ | |
is.decimal = function (value) { | |
return is.number(value) && !isActualNaN(value) && !is.infinite(value) && value % 1 !== 0; | |
}; | |
/** | |
* is.divisibleBy | |
* Test if `value` is divisible by `n`. | |
* | |
* @param {Number} value value to test | |
* @param {Number} n dividend | |
* @return {Boolean} true if `value` is divisible by `n`, false otherwise | |
* @api public | |
*/ | |
is.divisibleBy = function (value, n) { | |
var isDividendInfinite = is.infinite(value); | |
var isDivisorInfinite = is.infinite(n); | |
var isNonZeroNumber = is.number(value) && !isActualNaN(value) && is.number(n) && !isActualNaN(n) && n !== 0; | |
return isDividendInfinite || isDivisorInfinite || (isNonZeroNumber && value % n === 0); | |
}; | |
/** | |
* is.int | |
* Test if `value` is an integer. | |
* | |
* @param value to test | |
* @return {Boolean} true if `value` is an integer, false otherwise | |
* @api public | |
*/ | |
is.int = function (value) { | |
return is.number(value) && !isActualNaN(value) && value % 1 === 0; | |
}; | |
/** | |
* is.maximum | |
* Test if `value` is greater than 'others' values. | |
* | |
* @param {Number} value value to test | |
* @param {Array} others values to compare with | |
* @return {Boolean} true if `value` is greater than `others` values | |
* @api public | |
*/ | |
is.maximum = function (value, others) { | |
if (isActualNaN(value)) { | |
throw new TypeError('NaN is not a valid value'); | |
} else if (!is.arraylike(others)) { | |
throw new TypeError('second argument must be array-like'); | |
} | |
var len = others.length; | |
while (--len >= 0) { | |
if (value < others[len]) { | |
return false; | |
} | |
} | |
return true; | |
}; | |
/** | |
* is.minimum | |
* Test if `value` is less than `others` values. | |
* | |
* @param {Number} value value to test | |
* @param {Array} others values to compare with | |
* @return {Boolean} true if `value` is less than `others` values | |
* @api public | |
*/ | |
is.minimum = function (value, others) { | |
if (isActualNaN(value)) { | |
throw new TypeError('NaN is not a valid value'); | |
} else if (!is.arraylike(others)) { | |
throw new TypeError('second argument must be array-like'); | |
} | |
var len = others.length; | |
while (--len >= 0) { | |
if (value > others[len]) { | |
return false; | |
} | |
} | |
return true; | |
}; | |
/** | |
* is.nan | |
* Test if `value` is not a number. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is not a number, false otherwise | |
* @api public | |
*/ | |
is.nan = function (value) { | |
return !is.number(value) || value !== value; | |
}; | |
/** | |
* is.even | |
* Test if `value` is an even number. | |
* | |
* @param {Number} value value to test | |
* @return {Boolean} true if `value` is an even number, false otherwise | |
* @api public | |
*/ | |
is.even = function (value) { | |
return is.infinite(value) || (is.number(value) && value === value && value % 2 === 0); | |
}; | |
/** | |
* is.odd | |
* Test if `value` is an odd number. | |
* | |
* @param {Number} value value to test | |
* @return {Boolean} true if `value` is an odd number, false otherwise | |
* @api public | |
*/ | |
is.odd = function (value) { | |
return is.infinite(value) || (is.number(value) && value === value && value % 2 !== 0); | |
}; | |
/** | |
* is.ge | |
* Test if `value` is greater than or equal to `other`. | |
* | |
* @param {Number} value value to test | |
* @param {Number} other value to compare with | |
* @return {Boolean} | |
* @api public | |
*/ | |
is.ge = function (value, other) { | |
if (isActualNaN(value) || isActualNaN(other)) { | |
throw new TypeError('NaN is not a valid value'); | |
} | |
return !is.infinite(value) && !is.infinite(other) && value >= other; | |
}; | |
/** | |
* is.gt | |
* Test if `value` is greater than `other`. | |
* | |
* @param {Number} value value to test | |
* @param {Number} other value to compare with | |
* @return {Boolean} | |
* @api public | |
*/ | |
is.gt = function (value, other) { | |
if (isActualNaN(value) || isActualNaN(other)) { | |
throw new TypeError('NaN is not a valid value'); | |
} | |
return !is.infinite(value) && !is.infinite(other) && value > other; | |
}; | |
/** | |
* is.le | |
* Test if `value` is less than or equal to `other`. | |
* | |
* @param {Number} value value to test | |
* @param {Number} other value to compare with | |
* @return {Boolean} if 'value' is less than or equal to 'other' | |
* @api public | |
*/ | |
is.le = function (value, other) { | |
if (isActualNaN(value) || isActualNaN(other)) { | |
throw new TypeError('NaN is not a valid value'); | |
} | |
return !is.infinite(value) && !is.infinite(other) && value <= other; | |
}; | |
/** | |
* is.lt | |
* Test if `value` is less than `other`. | |
* | |
* @param {Number} value value to test | |
* @param {Number} other value to compare with | |
* @return {Boolean} if `value` is less than `other` | |
* @api public | |
*/ | |
is.lt = function (value, other) { | |
if (isActualNaN(value) || isActualNaN(other)) { | |
throw new TypeError('NaN is not a valid value'); | |
} | |
return !is.infinite(value) && !is.infinite(other) && value < other; | |
}; | |
/** | |
* is.within | |
* Test if `value` is within `start` and `finish`. | |
* | |
* @param {Number} value value to test | |
* @param {Number} start lower bound | |
* @param {Number} finish upper bound | |
* @return {Boolean} true if 'value' is is within 'start' and 'finish' | |
* @api public | |
*/ | |
is.within = function (value, start, finish) { | |
if (isActualNaN(value) || isActualNaN(start) || isActualNaN(finish)) { | |
throw new TypeError('NaN is not a valid value'); | |
} else if (!is.number(value) || !is.number(start) || !is.number(finish)) { | |
throw new TypeError('all arguments must be numbers'); | |
} | |
var isAnyInfinite = is.infinite(value) || is.infinite(start) || is.infinite(finish); | |
return isAnyInfinite || (value >= start && value <= finish); | |
}; | |
/** | |
* Test object. | |
*/ | |
/** | |
* is.object | |
* Test if `value` is an object. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an object, false otherwise | |
* @api public | |
*/ | |
is.object = function (value) { | |
return value && '[object Object]' === toString.call(value); | |
}; | |
/** | |
* is.hash | |
* Test if `value` is a hash - a plain object literal. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a hash, false otherwise | |
* @api public | |
*/ | |
is.hash = function (value) { | |
return is.object(value) && value.constructor === Object && !value.nodeType && !value.setInterval; | |
}; | |
/** | |
* Test regexp. | |
*/ | |
/** | |
* is.regexp | |
* Test if `value` is a regular expression. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a regexp, false otherwise | |
* @api public | |
*/ | |
is.regexp = function (value) { | |
return '[object RegExp]' === toString.call(value); | |
}; | |
/** | |
* Test string. | |
*/ | |
/** | |
* is.string | |
* Test if `value` is a string. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if 'value' is a string, false otherwise | |
* @api public | |
*/ | |
is.string = function (value) { | |
return '[object String]' === toString.call(value); | |
}; | |
},{}],45:[function(require,module,exports){ | |
(function () { | |
"use strict"; | |
// modified from https://github.com/kriskowal/es5-shim | |
var has = Object.prototype.hasOwnProperty, | |
is = require('is'), | |
forEach = require('foreach'), | |
hasDontEnumBug = !({'toString': null}).propertyIsEnumerable('toString'), | |
dontEnums = [ | |
"toString", | |
"toLocaleString", | |
"valueOf", | |
"hasOwnProperty", | |
"isPrototypeOf", | |
"propertyIsEnumerable", | |
"constructor" | |
], | |
keysShim; | |
keysShim = function keys(object) { | |
if (!is.object(object) && !is.array(object)) { | |
throw new TypeError("Object.keys called on a non-object"); | |
} | |
var name, theKeys = []; | |
for (name in object) { | |
if (has.call(object, name)) { | |
theKeys.push(name); | |
} | |
} | |
if (hasDontEnumBug) { | |
forEach(dontEnums, function (dontEnum) { | |
if (has.call(object, dontEnum)) { | |
theKeys.push(dontEnum); | |
} | |
}); | |
} | |
return theKeys; | |
}; | |
module.exports = keysShim; | |
}()); | |
},{"foreach":43,"is":44}],46:[function(require,module,exports){ | |
var EventEmitter = require('events').EventEmitter | |
var inherits = require('util').inherits | |
var ranges = require('string-range') | |
var fixRange = require('level-fix-range') | |
var xtend = require('xtend') | |
var Batch = require('./batch') | |
inherits(SubDB, EventEmitter) | |
function SubDB (db, prefix, options) { | |
if('string' === typeof options) { | |
console.error('db.sublevel(name, seperator<string>) is depreciated') | |
console.error('use db.sublevel(name, {sep: separator})) if you must') | |
options = {sep: options} | |
} | |
if(!(this instanceof SubDB)) return new SubDB(db, prefix, options) | |
if(!db) throw new Error('must provide db') | |
if(!prefix) throw new Error('must provide prefix') | |
options = options || {} | |
options.sep = options.sep || '\xff' | |
this._parent = db | |
this._options = options | |
this.options = options | |
this._prefix = prefix | |
this._root = root(this) | |
db.sublevels[prefix] = this | |
this.sublevels = {} | |
this.methods = {} | |
var self = this | |
this.hooks = { | |
pre: function () { | |
return self.pre.apply(self, arguments) | |
}, | |
post: function () { | |
return self.post.apply(self, arguments) | |
} | |
} | |
} | |
var SDB = SubDB.prototype | |
SDB._key = function (key) { | |
var sep = this._options.sep | |
return sep | |
+ this._prefix | |
+ sep | |
+ key | |
} | |
SDB._getOptsAndCb = function (opts, cb) { | |
if (typeof opts == 'function') { | |
cb = opts | |
opts = {} | |
} | |
return { opts: xtend(opts, this._options), cb: cb } | |
} | |
SDB.sublevel = function (prefix, options) { | |
if(this.sublevels[prefix]) | |
return this.sublevels[prefix] | |
return new SubDB(this, prefix, options || this._options) | |
} | |
SDB.put = function (key, value, opts, cb) { | |
var res = this._getOptsAndCb(opts, cb) | |
this._root.put(this.prefix(key), value, res.opts, res.cb) | |
} | |
SDB.get = function (key, opts, cb) { | |
var res = this._getOptsAndCb(opts, cb) | |
this._root.get(this.prefix(key), res.opts, res.cb) | |
} | |
SDB.del = function (key, opts, cb) { | |
var res = this._getOptsAndCb(opts, cb) | |
this._root.del(this.prefix(key), res.opts, res.cb) | |
} | |
SDB.batch = function (changes, opts, cb) { | |
if(!Array.isArray(changes)) | |
return new Batch(this) | |
var self = this, | |
res = this._getOptsAndCb(opts, cb) | |
changes.forEach(function (ch) { | |
//OH YEAH, WE NEED TO VALIDATE THAT UPDATING THIS KEY/PREFIX IS ALLOWED | |
if('string' === typeof ch.prefix) | |
ch.key = ch.prefix + ch.key | |
else | |
ch.key = (ch.prefix || self).prefix(ch.key) | |
if(ch.prefix) ch.prefix = null | |
}) | |
this._root.batch(changes, res.opts, res.cb) | |
} | |
SDB._getKeyEncoding = function () { | |
if(this.options.keyEncoding) | |
return this.options.keyEncoding | |
if(this._parent && this._parent._getKeyEncoding) | |
return this._parent._getKeyEncoding() | |
} | |
SDB._getValueEncoding = function () { | |
if(this.options.valueEncoding) | |
return this.options.valueEncoding | |
if(this._parent && this._parent._getValueEncoding) | |
return this._parent._getValueEncoding() | |
} | |
SDB.prefix = function (key) { | |
var sep = this._options.sep | |
return this._parent.prefix() + sep + this._prefix + sep + (key || '') | |
} | |
SDB.keyStream = | |
SDB.createKeyStream = function (opts) { | |
opts = opts || {} | |
opts.keys = true | |
opts.values = false | |
return this.createReadStream(opts) | |
} | |
SDB.valueStream = | |
SDB.createValueStream = function (opts) { | |
opts = opts || {} | |
opts.keys = false | |
opts.values = true | |
opts.keys = false | |
return this.createReadStream(opts) | |
} | |
function selectivelyMerge(_opts, opts) { | |
[ 'valueEncoding' | |
, 'encoding' | |
, 'keyEncoding' | |
, 'reverse' | |
, 'values' | |
, 'keys' | |
, 'limit' | |
, 'fillCache' | |
] | |
.forEach(function (k) { | |
if (opts.hasOwnProperty(k)) _opts[k] = opts[k] | |
}) | |
} | |
SDB.readStream = | |
SDB.createReadStream = function (opts) { | |
opts = opts || {} | |
var r = root(this) | |
var p = this.prefix() | |
var _opts = ranges.prefix(opts, p) | |
selectivelyMerge(_opts, xtend(opts, this._options)) | |
var s = r.createReadStream(_opts) | |
if(_opts.values === false) { | |
var read = s.read | |
if (read) { | |
s.read = function (size) { | |
var val = read.call(this, size) | |
if (val) val = val.substring(p.length) | |
return val | |
} | |
} else { | |
var emit = s.emit | |
s.emit = function (event, val) { | |
if(event === 'data') { | |
emit.call(this, 'data', val.substring(p.length)) | |
} else | |
emit.call(this, event, val) | |
} | |
} | |
return s | |
} else if(_opts.keys === false) | |
return s | |
else { | |
var read = s.read | |
if (read) { | |
s.read = function (size) { | |
var d = read.call(this, size) | |
if (d) d.key = d.key.substring(p.length) | |
return d | |
} | |
} else { | |
s.on('data', function (d) { | |
//mutate the prefix! | |
//this doesn't work for createKeyStream admittedly. | |
d.key = d.key.substring(p.length) | |
}) | |
} | |
return s | |
} | |
} | |
SDB.writeStream = | |
SDB.createWriteStream = function () { | |
var r = root(this) | |
var p = this.prefix() | |
var ws = r.createWriteStream.apply(r, arguments) | |
var write = ws.write | |
var encoding = this._options.encoding | |
var valueEncoding = this._options.valueEncoding | |
var keyEncoding = this._options.keyEncoding | |
// slight optimization, if no encoding was specified at all, | |
// which will be the case most times, make write not check at all | |
var nocheck = !encoding && !valueEncoding && !keyEncoding | |
ws.write = nocheck | |
? function (data) { | |
data.key = p + data.key | |
return write.call(ws, data) | |
} | |
: function (data) { | |
data.key = p + data.key | |
// not merging all options here since this happens on every write and things could get slowed down | |
// at this point we only consider encoding important to propagate | |
if (encoding && typeof data.encoding === 'undefined') | |
data.encoding = encoding | |
if (valueEncoding && typeof data.valueEncoding === 'undefined') | |
data.valueEncoding = valueEncoding | |
if (keyEncoding && typeof data.keyEncoding === 'undefined') | |
data.keyEncoding = keyEncoding | |
return write.call(ws, data) | |
} | |
return ws | |
} | |
SDB.approximateSize = function () { | |
var r = root(db) | |
return r.approximateSize.apply(r, arguments) | |
} | |
function root(db) { | |
if(!db._parent) return db | |
return root(db._parent) | |
} | |
SDB.pre = function (range, hook) { | |
if(!hook) hook = range, range = null | |
range = ranges.prefix(range, this.prefix(), this._options.sep) | |
var r = root(this._parent) | |
var p = this.prefix() | |
return r.hooks.pre(fixRange(range), function (ch, add, batch) { | |
hook({ | |
key: ch.key.substring(p.length), | |
value: ch.value, | |
type: ch.type | |
}, function (ch, _p) { | |
//maybe remove the second add arg now | |
//that op can have prefix? | |
add(ch, ch.prefix ? _p : (_p || p)) | |
}, batch) | |
}) | |
} | |
SDB.post = function (range, hook) { | |
if(!hook) hook = range, range = null | |
var r = root(this._parent) | |
var p = this.prefix() | |
range = ranges.prefix(range, p, this._options.sep) | |
return r.hooks.post(fixRange(range), function (data) { | |
hook({key: data.key.substring(p.length), value: data.value, type: data.type}) | |
}) | |
} | |
var exports = module.exports = SubDB | |
},{"./batch":34,"events":18,"level-fix-range":36,"string-range":39,"util":32,"xtend":41}],47:[function(require,module,exports){ | |
/* Copyright (c) 2012-2014 LevelUP contributors | |
* See list at <https://github.com/rvagg/node-levelup#contributing> | |
* MIT License | |
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> | |
*/ | |
var util = require('./util') | |
, WriteError = require('./errors').WriteError | |
, getOptions = util.getOptions | |
, dispatchError = util.dispatchError | |
function Batch (levelup) { | |
this._levelup = levelup | |
this.batch = levelup.db.batch() | |
this.ops = [] | |
} | |
Batch.prototype.put = function (key_, value_, options) { | |
options = getOptions(this._levelup, options) | |
var key = util.encodeKey(key_, options) | |
, value = util.encodeValue(value_, options) | |
try { | |
this.batch.put(key, value) | |
} catch (e) { | |
throw new WriteError(e) | |
} | |
this.ops.push({ type : 'put', key : key, value : value }) | |
return this | |
} | |
Batch.prototype.del = function (key_, options) { | |
options = getOptions(this._levelup, options) | |
var key = util.encodeKey(key_, options) | |
try { | |
this.batch.del(key) | |
} catch (err) { | |
throw new WriteError(err) | |
} | |
this.ops.push({ type : 'del', key : key }) | |
return this | |
} | |
Batch.prototype.clear = function () { | |
try { | |
this.batch.clear() | |
} catch (err) { | |
throw new WriteError(err) | |
} | |
this.ops = [] | |
return this | |
} | |
Batch.prototype.write = function (callback) { | |
var levelup = this._levelup | |
, ops = this.ops | |
try { | |
this.batch.write(function (err) { | |
if (err) | |
return dispatchError(levelup, new WriteError(err), callback) | |
levelup.emit('batch', ops) | |
if (callback) | |
callback() | |
}) | |
} catch (err) { | |
throw new WriteError(err) | |
} | |
} | |
module.exports = Batch | |
},{"./errors":48,"./util":51}],48:[function(require,module,exports){ | |
/* Copyright (c) 2012-2014 LevelUP contributors | |
* See list at <https://github.com/rvagg/node-levelup#contributing> | |
* MIT License | |
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> | |
*/ | |
var createError = require('errno').create | |
, LevelUPError = createError('LevelUPError') | |
, NotFoundError = createError('NotFoundError', LevelUPError) | |
NotFoundError.prototype.notFound = true | |
NotFoundError.prototype.status = 404 | |
module.exports = { | |
LevelUPError : LevelUPError | |
, InitializationError : createError('InitializationError', LevelUPError) | |
, OpenError : createError('OpenError', LevelUPError) | |
, ReadError : createError('ReadError', LevelUPError) | |
, WriteError : createError('WriteError', LevelUPError) | |
, NotFoundError : NotFoundError | |
, EncodingError : createError('EncodingError', LevelUPError) | |
} | |
},{"errno":59}],49:[function(require,module,exports){ | |
(function (process){ | |
/* Copyright (c) 2012-2014 LevelUP contributors | |
* See list at <https://github.com/rvagg/node-levelup#contributing> | |
* MIT License | |
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> | |
*/ | |
var EventEmitter = require('events').EventEmitter | |
, inherits = require('util').inherits | |
, extend = require('xtend') | |
, prr = require('prr') | |
, DeferredLevelDOWN = require('deferred-leveldown') | |
, WriteError = require('./errors').WriteError | |
, ReadError = require('./errors').ReadError | |
, NotFoundError = require('./errors').NotFoundError | |
, OpenError = require('./errors').OpenError | |
, EncodingError = require('./errors').EncodingError | |
, InitializationError = require('./errors').InitializationError | |
, ReadStream = require('./read-stream') | |
, WriteStream = require('./write-stream') | |
, util = require('./util') | |
, Batch = require('./batch') | |
, getOptions = util.getOptions | |
, defaultOptions = util.defaultOptions | |
, getLevelDOWN = util.getLevelDOWN | |
, dispatchError = util.dispatchError | |
function getCallback (options, callback) { | |
return typeof options == 'function' ? options : callback | |
} | |
// Possible LevelUP#_status values: | |
// - 'new' - newly created, not opened or closed | |
// - 'opening' - waiting for the database to be opened, post open() | |
// - 'open' - successfully opened the database, available for use | |
// - 'closing' - waiting for the database to be closed, post close() | |
// - 'closed' - database has been successfully closed, should not be | |
// used except for another open() operation | |
function LevelUP (location, options, callback) { | |
if (!(this instanceof LevelUP)) | |
return new LevelUP(location, options, callback) | |
var error | |
EventEmitter.call(this) | |
this.setMaxListeners(Infinity) | |
if (typeof location == 'function') { | |
options = typeof options == 'object' ? options : {} | |
options.db = location | |
location = null | |
} else if (typeof location == 'object' && typeof location.db == 'function') { | |
options = location | |
location = null | |
} | |
if (typeof options == 'function') { | |
callback = options | |
options = {} | |
} | |
if ((!options || typeof options.db != 'function') && typeof location != 'string') { | |
error = new InitializationError( | |
'Must provide a location for the database') | |
if (callback) { | |
return process.nextTick(function () { | |
callback(error) | |
}) | |
} | |
throw error | |
} | |
options = getOptions(this, options) | |
this.options = extend(defaultOptions, options) | |
this._status = 'new' | |
// set this.location as enumerable but not configurable or writable | |
prr(this, 'location', location, 'e') | |
this.open(callback) | |
} | |
inherits(LevelUP, EventEmitter) | |
LevelUP.prototype.open = function (callback) { | |
var self = this | |
, dbFactory | |
, db | |
if (this.isOpen()) { | |
if (callback) | |
process.nextTick(function () { callback(null, self) }) | |
return this | |
} | |
if (this._isOpening()) { | |
return callback && this.once( | |
'open' | |
, function () { callback(null, self) } | |
) | |
} | |
this.emit('opening') | |
this._status = 'opening' | |
this.db = new DeferredLevelDOWN(this.location) | |
dbFactory = this.options.db || getLevelDOWN() | |
db = dbFactory(this.location) | |
db.open(this.options, function (err) { | |
if (err) { | |
return dispatchError(self, new OpenError(err), callback) | |
} else { | |
self.db.setDb(db) | |
self.db = db | |
self._status = 'open' | |
if (callback) | |
callback(null, self) | |
self.emit('open') | |
self.emit('ready') | |
} | |
}) | |
} | |
LevelUP.prototype.close = function (callback) { | |
var self = this | |
if (this.isOpen()) { | |
this._status = 'closing' | |
this.db.close(function () { | |
self._status = 'closed' | |
self.emit('closed') | |
if (callback) | |
callback.apply(null, arguments) | |
}) | |
this.emit('closing') | |
this.db = null | |
} else if (this._status == 'closed' && callback) { | |
return process.nextTick(callback) | |
} else if (this._status == 'closing' && callback) { | |
this.once('closed', callback) | |
} else if (this._isOpening()) { | |
this.once('open', function () { | |
self.close(callback) | |
}) | |
} | |
} | |
LevelUP.prototype.isOpen = function () { | |
return this._status == 'open' | |
} | |
LevelUP.prototype._isOpening = function () { | |
return this._status == 'opening' | |
} | |
LevelUP.prototype.isClosed = function () { | |
return (/^clos/).test(this._status) | |
} | |
LevelUP.prototype.get = function (key_, options, callback) { | |
var self = this | |
, key | |
callback = getCallback(options, callback) | |
if (typeof callback != 'function') { | |
return dispatchError( | |
this | |
, new ReadError('get() requires key and callback arguments') | |
) | |
} | |
if (!this._isOpening() && !this.isOpen()) { | |
return dispatchError( | |
this | |
, new ReadError('Database is not open') | |
, callback | |
) | |
} | |
options = util.getOptions(this, options) | |
key = util.encodeKey(key_, options) | |
options.asBuffer = util.isValueAsBuffer(options) | |
this.db.get(key, options, function (err, value) { | |
if (err) { | |
if ((/notfound/i).test(err)) { | |
err = new NotFoundError( | |
'Key not found in database [' + key_ + ']', err) | |
} else { | |
err = new ReadError(err) | |
} | |
return dispatchError(self, err, callback) | |
} | |
if (callback) { | |
try { | |
value = util.decodeValue(value, options) | |
} catch (e) { | |
return callback(new EncodingError(e)) | |
} | |
callback(null, value) | |
} | |
}) | |
} | |
LevelUP.prototype.put = function (key_, value_, options, callback) { | |
var self = this | |
, key | |
, value | |
callback = getCallback(options, callback) | |
if (key_ === null || key_ === undefined | |
|| value_ === null || value_ === undefined) { | |
return dispatchError( | |
this | |
, new WriteError('put() requires key and value arguments') | |
, callback | |
) | |
} | |
if (!this._isOpening() && !this.isOpen()) { | |
return dispatchError( | |
this | |
, new WriteError('Database is not open') | |
, callback | |
) | |
} | |
options = getOptions(this, options) | |
key = util.encodeKey(key_, options) | |
value = util.encodeValue(value_, options) | |
this.db.put(key, value, options, function (err) { | |
if (err) { | |
return dispatchError(self, new WriteError(err), callback) | |
} else { | |
self.emit('put', key_, value_) | |
if (callback) | |
callback() | |
} | |
}) | |
} | |
LevelUP.prototype.del = function (key_, options, callback) { | |
var self = this | |
, key | |
callback = getCallback(options, callback) | |
if (key_ === null || key_ === undefined) { | |
return dispatchError( | |
this | |
, new WriteError('del() requires a key argument') | |
, callback | |
) | |
} | |
if (!this._isOpening() && !this.isOpen()) { | |
return dispatchError( | |
this | |
, new WriteError('Database is not open') | |
, callback | |
) | |
} | |
options = getOptions(this, options) | |
key = util.encodeKey(key_, options) | |
this.db.del(key, options, function (err) { | |
if (err) { | |
return dispatchError(self, new WriteError(err), callback) | |
} else { | |
self.emit('del', key_) | |
if (callback) | |
callback() | |
} | |
}) | |
} | |
LevelUP.prototype.batch = function (arr_, options, callback) { | |
var self = this | |
, keyEnc | |
, valueEnc | |
, arr | |
if (!arguments.length) | |
return new Batch(this) | |
callback = getCallback(options, callback) | |
if (!Array.isArray(arr_)) { | |
return dispatchError( | |
this | |
, new WriteError('batch() requires an array argument') | |
, callback | |
) | |
} | |
if (!this._isOpening() && !this.isOpen()) { | |
return dispatchError( | |
this | |
, new WriteError('Database is not open') | |
, callback | |
) | |
} | |
options = getOptions(this, options) | |
keyEnc = options.keyEncoding | |
valueEnc = options.valueEncoding | |
arr = arr_.map(function (e) { | |
if (e.type === undefined || e.key === undefined) | |
return {} | |
// inherit encoding | |
var kEnc = e.keyEncoding || keyEnc | |
, vEnc = e.valueEncoding || e.encoding || valueEnc | |
, o | |
// If we're not dealing with plain utf8 strings or plain | |
// Buffers then we have to do some work on the array to | |
// encode the keys and/or values. This includes JSON types. | |
if (kEnc != 'utf8' && kEnc != 'binary' | |
|| vEnc != 'utf8' && vEnc != 'binary') { | |
o = { | |
type: e.type | |
, key: util.encodeKey(e.key, options, e) | |
} | |
if (e.value !== undefined) | |
o.value = util.encodeValue(e.value, options, e) | |
return o | |
} else { | |
return e | |
} | |
}) | |
this.db.batch(arr, options, function (err) { | |
if (err) { | |
return dispatchError(self, new WriteError(err), callback) | |
} else { | |
self.emit('batch', arr_) | |
if (callback) | |
callback() | |
} | |
}) | |
} | |
// DEPRECATED: prefer accessing LevelDOWN for this: db.db.approximateSize() | |
LevelUP.prototype.approximateSize = function (start_, end_, callback) { | |
var self = this | |
, start | |
, end | |
if (start_ === null || start_ === undefined | |
|| end_ === null || end_ === undefined | |
|| typeof callback != 'function') { | |
return dispatchError( | |
this | |
, new ReadError('approximateSize() requires start, end and callback arguments') | |
, callback | |
) | |
} | |
start = util.encodeKey(start_, this.options) | |
end = util.encodeKey(end_, this.options) | |
if (!this._isOpening() && !this.isOpen()) { | |
return dispatchError( | |
this | |
, new WriteError('Database is not open') | |
, callback | |
) | |
} | |
this.db.approximateSize(start, end, function (err, size) { | |
if (err) { | |
return dispatchError(self, new OpenError(err), callback) | |
} else if (callback) { | |
callback(null, size) | |
} | |
}) | |
} | |
LevelUP.prototype.readStream = | |
LevelUP.prototype.createReadStream = function (options) { | |
var self = this | |
options = extend(this.options, options) | |
return new ReadStream( | |
options | |
, this | |
, function (options) { | |
return self.db.iterator(options) | |
} | |
) | |
} | |
LevelUP.prototype.keyStream = | |
LevelUP.prototype.createKeyStream = function (options) { | |
return this.createReadStream(extend(options, { keys: true, values: false })) | |
} | |
LevelUP.prototype.valueStream = | |
LevelUP.prototype.createValueStream = function (options) { | |
return this.createReadStream(extend(options, { keys: false, values: true })) | |
} | |
LevelUP.prototype.writeStream = | |
LevelUP.prototype.createWriteStream = function (options) { | |
return new WriteStream(extend(options), this) | |
} | |
LevelUP.prototype.toString = function () { | |
return 'LevelUP' | |
} | |
function utilStatic (name) { | |
return function (location, callback) { | |
getLevelDOWN()[name](location, callback || function () {}) | |
} | |
} | |
module.exports = LevelUP | |
module.exports.copy = util.copy | |
// DEPRECATED: prefer accessing LevelDOWN for this: require('leveldown').destroy() | |
module.exports.destroy = utilStatic('destroy') | |
// DEPRECATED: prefer accessing LevelDOWN for this: require('leveldown').repair() | |
module.exports.repair = utilStatic('repair') | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"./batch":47,"./errors":48,"./read-stream":50,"./util":51,"./write-stream":52,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"deferred-leveldown":54,"events":18,"prr":60,"util":32,"xtend":70}],50:[function(require,module,exports){ | |
/* Copyright (c) 2012-2014 LevelUP contributors | |
* See list at <https://github.com/rvagg/node-levelup#contributing> | |
* MIT License <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> | |
*/ | |
// NOTE: we are fixed to [email protected] for now | |
// for pure Streams2 across Node versions | |
var Readable = require('readable-stream').Readable | |
, inherits = require('util').inherits | |
, extend = require('xtend') | |
, EncodingError = require('./errors').EncodingError | |
, util = require('./util') | |
, defaultOptions = { keys: true, values: true } | |
, makeKeyValueData = function (key, value) { | |
return { | |
key: util.decodeKey(key, this._options) | |
, value: util.decodeValue(value, this._options) | |
} | |
} | |
, makeKeyData = function (key) { | |
return util.decodeKey(key, this._options) | |
} | |
, makeValueData = function (_, value) { | |
return util.decodeValue(value, this._options) | |
} | |
, makeNoData = function () { return null } | |
function ReadStream (options, db, iteratorFactory) { | |
if (!(this instanceof ReadStream)) | |
return new ReadStream(options, db, iteratorFactory) | |
Readable.call(this, { objectMode: true, highWaterMark: options.highWaterMark }) | |
// purely to keep `db` around until we're done so it's not GCed if the user doesn't keep a ref | |
this._db = db | |
options = this._options = extend(defaultOptions, options) | |
this._keyEncoding = options.keyEncoding || options.encoding | |
this._valueEncoding = options.valueEncoding || options.encoding | |
if (typeof this._options.start != 'undefined') | |
this._options.start = util.encodeKey(this._options.start, this._options) | |
if (typeof this._options.end != 'undefined') | |
this._options.end = util.encodeKey(this._options.end, this._options) | |
if (typeof this._options.limit != 'number') | |
this._options.limit = -1 | |
this._options.keyAsBuffer = util.isKeyAsBuffer(this._options) | |
this._options.valueAsBuffer = util.isValueAsBuffer(this._options) | |
this._makeData = this._options.keys && this._options.values | |
? makeKeyValueData : this._options.keys | |
? makeKeyData : this._options.values | |
? makeValueData : makeNoData | |
var self = this | |
if (!this._db.isOpen()) { | |
this._db.once('ready', function () { | |
if (!self._destroyed) { | |
self._iterator = iteratorFactory(self._options) | |
} | |
}) | |
} else | |
this._iterator = iteratorFactory(this._options) | |
} | |
inherits(ReadStream, Readable) | |
ReadStream.prototype._read = function read () { | |
var self = this | |
if (!self._db.isOpen()) { | |
return self._db.once('ready', function () { read.call(self) }) | |
} | |
if (self._destroyed) | |
return | |
self._iterator.next(function(err, key, value) { | |
if (err || (key === undefined && value === undefined)) { | |
if (!err && !self._destroyed) | |
self.push(null) | |
return self._cleanup(err) | |
} | |
try { | |
value = self._makeData(key, value) | |
} catch (e) { | |
return self._cleanup(new EncodingError(e)) | |
} | |
if (!self._destroyed) | |
self.push(value) | |
}) | |
} | |
ReadStream.prototype._cleanup = function (err) { | |
if (this._destroyed) | |
return | |
this._destroyed = true | |
var self = this | |
if (err) | |
self.emit('error', err) | |
if (self._iterator) { | |
self._iterator.end(function () { | |
self._iterator = null | |
self.emit('close') | |
}) | |
} else { | |
self.emit('close') | |
} | |
} | |
ReadStream.prototype.destroy = function () { | |
this._cleanup() | |
} | |
ReadStream.prototype.toString = function () { | |
return 'LevelUP.ReadStream' | |
} | |
module.exports = ReadStream | |
},{"./errors":48,"./util":51,"readable-stream":69,"util":32,"xtend":70}],51:[function(require,module,exports){ | |
(function (process,Buffer){ | |
/* Copyright (c) 2012-2014 LevelUP contributors | |
* See list at <https://github.com/rvagg/node-levelup#contributing> | |
* MIT License | |
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> | |
*/ | |
var extend = require('xtend') | |
, LevelUPError = require('./errors').LevelUPError | |
, encodingNames = [ | |
'hex' | |
, 'utf8' | |
, 'utf-8' | |
, 'ascii' | |
, 'binary' | |
, 'base64' | |
, 'ucs2' | |
, 'ucs-2' | |
, 'utf16le' | |
, 'utf-16le' | |
] | |
, defaultOptions = { | |
createIfMissing : true | |
, errorIfExists : false | |
, keyEncoding : 'utf8' | |
, valueEncoding : 'utf8' | |
, compression : true | |
} | |
, leveldown | |
, encodings = (function () { | |
function isBinary (data) { | |
return data === undefined || data === null || Buffer.isBuffer(data) | |
} | |
var encodings = {} | |
encodings.utf8 = encodings['utf-8'] = { | |
encode : function (data) { | |
return isBinary(data) ? data : String(data) | |
} | |
, decode : function (data) { | |
return data | |
} | |
, buffer : false | |
, type : 'utf8' | |
} | |
encodings.json = { | |
encode : JSON.stringify | |
, decode : JSON.parse | |
, buffer : false | |
, type : 'json' | |
} | |
encodingNames.forEach(function (type) { | |
if (encodings[type]) | |
return | |
encodings[type] = { | |
encode : function (data) { | |
return isBinary(data) ? data : new Buffer(data, type) | |
} | |
, decode : function (buffer) { | |
return process.browser ? buffer.toString(type) : buffer; | |
} | |
, buffer : true | |
, type : type // useful for debugging purposes | |
} | |
}) | |
return encodings | |
})() | |
, encodingOpts = (function () { | |
var eo = {} | |
encodingNames.forEach(function (e) { | |
eo[e] = { valueEncoding : e } | |
}) | |
return eo | |
}()) | |
function copy (srcdb, dstdb, callback) { | |
srcdb.readStream() | |
.pipe(dstdb.writeStream()) | |
.on('close', callback ? callback : function () {}) | |
.on('error', callback ? callback : function (err) { throw err }) | |
} | |
function getOptions (levelup, options) { | |
var s = typeof options == 'string' // just an encoding | |
if (!s && options && options.encoding && !options.valueEncoding) | |
options.valueEncoding = options.encoding | |
return extend( | |
(levelup && levelup.options) || {} | |
, s ? encodingOpts[options] || encodingOpts[defaultOptions.valueEncoding] | |
: options | |
) | |
} | |
function getLevelDOWN () { | |
if (leveldown) | |
return leveldown | |
var requiredVersion = require('../package.json').devDependencies.leveldown | |
, missingLevelDOWNError = 'Could not locate LevelDOWN, try `npm install leveldown`' | |
, leveldownVersion | |
try { | |
leveldownVersion = require('leveldown/package').version | |
} catch (e) { | |
throw new LevelUPError(missingLevelDOWNError) | |
} | |
if (!require('semver').satisfies(leveldownVersion, requiredVersion)) { | |
throw new LevelUPError( | |
'Installed version of LevelDOWN (' | |
+ leveldownVersion | |
+ ') does not match required version (' | |
+ requiredVersion | |
+ ')' | |
) | |
} | |
try { | |
return leveldown = require('leveldown') | |
} catch (e) { | |
throw new LevelUPError(missingLevelDOWNError) | |
} | |
} | |
function dispatchError (levelup, error, callback) { | |
return typeof callback == 'function' | |
? callback(error) | |
: levelup.emit('error', error) | |
} | |
function getKeyEncoder (options, op) { | |
var type = ((op && op.keyEncoding) || options.keyEncoding) || 'utf8' | |
return encodings[type] || type | |
} | |
function getValueEncoder (options, op) { | |
var type = (((op && (op.valueEncoding || op.encoding)) | |
|| options.valueEncoding || options.encoding)) || 'utf8' | |
return encodings[type] || type | |
} | |
function encodeKey (key, options, op) { | |
return getKeyEncoder(options, op).encode(key) | |
} | |
function encodeValue (value, options, op) { | |
return getValueEncoder(options, op).encode(value) | |
} | |
function decodeKey (key, options) { | |
return getKeyEncoder(options).decode(key) | |
} | |
function decodeValue (value, options) { | |
return getValueEncoder(options).decode(value) | |
} | |
function isValueAsBuffer (options, op) { | |
return getValueEncoder(options, op).buffer | |
} | |
function isKeyAsBuffer (options, op) { | |
return getKeyEncoder(options, op).buffer | |
} | |
module.exports = { | |
defaultOptions : defaultOptions | |
, copy : copy | |
, getOptions : getOptions | |
, getLevelDOWN : getLevelDOWN | |
, dispatchError : dispatchError | |
, encodeKey : encodeKey | |
, encodeValue : encodeValue | |
, isValueAsBuffer : isValueAsBuffer | |
, isKeyAsBuffer : isKeyAsBuffer | |
, decodeValue : decodeValue | |
, decodeKey : decodeKey | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),require("buffer").Buffer) | |
},{"../package.json":71,"./errors":48,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"buffer":20,"leveldown":"1XEYOF","leveldown/package":17,"semver":17,"xtend":70}],52:[function(require,module,exports){ | |
(function (process,global){ | |
/* Copyright (c) 2012-2014 LevelUP contributors | |
* See list at <https://github.com/rvagg/node-levelup#contributing> | |
* MIT License | |
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> | |
*/ | |
var Stream = require('stream').Stream | |
, inherits = require('util').inherits | |
, extend = require('xtend') | |
, bl = require('bl') | |
, setImmediate = global.setImmediate || process.nextTick | |
, getOptions = require('./util').getOptions | |
, defaultOptions = { type: 'put' } | |
function WriteStream (options, db) { | |
if (!(this instanceof WriteStream)) | |
return new WriteStream(options, db) | |
Stream.call(this) | |
this._options = extend(defaultOptions, getOptions(db, options)) | |
this._db = db | |
this._buffer = [] | |
this._status = 'init' | |
this._end = false | |
this.writable = true | |
this.readable = false | |
var self = this | |
, ready = function () { | |
if (!self.writable) | |
return | |
self._status = 'ready' | |
self.emit('ready') | |
self._process() | |
} | |
if (db.isOpen()) | |
setImmediate(ready) | |
else | |
db.once('ready', ready) | |
} | |
inherits(WriteStream, Stream) | |
WriteStream.prototype.write = function (data) { | |
if (!this.writable) | |
return false | |
this._buffer.push(data) | |
if (this._status != 'init') | |
this._processDelayed() | |
if (this._options.maxBufferLength && | |
this._buffer.length > this._options.maxBufferLength) { | |
this._writeBlock = true | |
return false | |
} | |
return true | |
} | |
WriteStream.prototype.end = function (data) { | |
var self = this | |
if (data) | |
this.write(data) | |
setImmediate(function () { | |
self._end = true | |
self._process() | |
}) | |
} | |
WriteStream.prototype.destroy = function () { | |
this.writable = false | |
this.end() | |
} | |
WriteStream.prototype.destroySoon = function () { | |
this.end() | |
} | |
WriteStream.prototype.add = function (entry) { | |
if (!entry.props) | |
return | |
if (entry.props.Directory) | |
entry.pipe(this._db.writeStream(this._options)) | |
else if (entry.props.File || entry.File || entry.type == 'File') | |
this._write(entry) | |
return true | |
} | |
WriteStream.prototype._processDelayed = function () { | |
var self = this | |
setImmediate(function () { | |
self._process() | |
}) | |
} | |
WriteStream.prototype._process = function () { | |
var buffer | |
, self = this | |
, cb = function (err) { | |
if (!self.writable) | |
return | |
if (self._status != 'closed') | |
self._status = 'ready' | |
if (err) { | |
self.writable = false | |
return self.emit('error', err) | |
} | |
self._process() | |
} | |
if (self._status != 'ready' && self.writable) { | |
if (self._buffer.length && self._status != 'closed') | |
self._processDelayed() | |
return | |
} | |
if (self._buffer.length && self.writable) { | |
self._status = 'writing' | |
buffer = self._buffer | |
self._buffer = [] | |
self._db.batch(buffer.map(function (d) { | |
return { | |
type : d.type || self._options.type | |
, key : d.key | |
, value : d.value | |
, keyEncoding : d.keyEncoding || self._options.keyEncoding | |
, valueEncoding : d.valueEncoding | |
|| d.encoding | |
|| self._options.valueEncoding | |
} | |
}), cb) | |
if (self._writeBlock) { | |
self._writeBlock = false | |
self.emit('drain') | |
} | |
// don't allow close until callback has returned | |
return | |
} | |
if (self._end && self._status != 'closed') { | |
self._status = 'closed' | |
self.writable = false | |
self.emit('close') | |
} | |
} | |
WriteStream.prototype._write = function (entry) { | |
var key = entry.path || entry.props.path | |
, self = this | |
if (!key) | |
return | |
entry.pipe(bl(function (err, data) { | |
if (err) { | |
self.writable = false | |
return self.emit('error', err) | |
} | |
if (self._options.fstreamRoot && | |
key.indexOf(self._options.fstreamRoot) > -1) | |
key = key.substr(self._options.fstreamRoot.length + 1) | |
self.write({ key: key, value: data.slice(0) }) | |
})) | |
} | |
WriteStream.prototype.toString = function () { | |
return 'LevelUP.WriteStream' | |
} | |
module.exports = WriteStream | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"./util":51,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"bl":53,"stream":24,"util":32,"xtend":70}],53:[function(require,module,exports){ | |
(function (Buffer){ | |
var DuplexStream = require('readable-stream').Duplex | |
, util = require('util') | |
function BufferList (callback) { | |
if (!(this instanceof BufferList)) | |
return new BufferList(callback) | |
this._bufs = [] | |
this.length = 0 | |
if (typeof callback == 'function') { | |
this._callback = callback | |
var piper = function (err) { | |
if (this._callback) { | |
this._callback(err) | |
this._callback = null | |
} | |
}.bind(this) | |
this.on('pipe', function (src) { | |
src.on('error', piper) | |
}) | |
this.on('unpipe', function (src) { | |
src.removeListener('error', piper) | |
}) | |
} | |
else if (Buffer.isBuffer(callback)) | |
this.append(callback) | |
else if (Array.isArray(callback)) { | |
callback.forEach(function (b) { | |
Buffer.isBuffer(b) && this.append(b) | |
}.bind(this)) | |
} | |
DuplexStream.call(this) | |
} | |
util.inherits(BufferList, DuplexStream) | |
BufferList.prototype._offset = function (offset) { | |
var tot = 0, i = 0, _t | |
for (; i < this._bufs.length; i++) { | |
_t = tot + this._bufs[i].length | |
if (offset < _t) | |
return [ i, offset - tot ] | |
tot = _t | |
} | |
} | |
BufferList.prototype.append = function (buf) { | |
this._bufs.push(Buffer.isBuffer(buf) ? buf : new Buffer(buf)) | |
this.length += buf.length | |
return this | |
} | |
BufferList.prototype._write = function (buf, encoding, callback) { | |
this.append(buf) | |
if (callback) | |
callback() | |
} | |
BufferList.prototype._read = function (size) { | |
if (!this.length) | |
return this.push(null) | |
size = Math.min(size, this.length) | |
this.push(this.slice(0, size)) | |
this.consume(size) | |
} | |
BufferList.prototype.end = function (chunk) { | |
DuplexStream.prototype.end.call(this, chunk) | |
if (this._callback) { | |
this._callback(null, this.slice()) | |
this._callback = null | |
} | |
} | |
BufferList.prototype.get = function (index) { | |
return this.slice(index, index + 1)[0] | |
} | |
BufferList.prototype.slice = function (start, end) { | |
return this.copy(null, 0, start, end) | |
} | |
BufferList.prototype.copy = function (dst, dstStart, srcStart, srcEnd) { | |
if (typeof srcStart != 'number' || srcStart < 0) | |
srcStart = 0 | |
if (typeof srcEnd != 'number' || srcEnd > this.length) | |
srcEnd = this.length | |
if (srcStart >= this.length) | |
return dst || new Buffer(0) | |
if (srcEnd <= 0) | |
return dst || new Buffer(0) | |
var copy = !!dst | |
, off = this._offset(srcStart) | |
, len = srcEnd - srcStart | |
, bytes = len | |
, bufoff = (copy && dstStart) || 0 | |
, start = off[1] | |
, l | |
, i | |
// copy/slice everything | |
if (srcStart === 0 && srcEnd == this.length) { | |
if (!copy) // slice, just return a full concat | |
return Buffer.concat(this._bufs) | |
// copy, need to copy individual buffers | |
for (i = 0; i < this._bufs.length; i++) { | |
this._bufs[i].copy(dst, bufoff) | |
bufoff += this._bufs[i].length | |
} | |
return dst | |
} | |
// easy, cheap case where it's a subset of one of the buffers | |
if (bytes <= this._bufs[off[0]].length - start) { | |
return copy | |
? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes) | |
: this._bufs[off[0]].slice(start, start + bytes) | |
} | |
if (!copy) // a slice, we need something to copy in to | |
dst = new Buffer(len) | |
for (i = off[0]; i < this._bufs.length; i++) { | |
l = this._bufs[i].length - start | |
if (bytes > l) { | |
this._bufs[i].copy(dst, bufoff, start) | |
} else { | |
this._bufs[i].copy(dst, bufoff, start, start + bytes) | |
break | |
} | |
bufoff += l | |
bytes -= l | |
if (start) | |
start = 0 | |
} | |
return dst | |
} | |
BufferList.prototype.toString = function (encoding, start, end) { | |
return this.slice(start, end).toString(encoding) | |
} | |
BufferList.prototype.consume = function (bytes) { | |
while (this._bufs.length) { | |
if (bytes > this._bufs[0].length) { | |
bytes -= this._bufs[0].length | |
this.length -= this._bufs[0].length | |
this._bufs.shift() | |
} else { | |
this._bufs[0] = this._bufs[0].slice(bytes) | |
this.length -= bytes | |
break | |
} | |
} | |
return this | |
} | |
BufferList.prototype.duplicate = function () { | |
var i = 0 | |
, copy = new BufferList() | |
for (; i < this._bufs.length; i++) | |
copy.append(this._bufs[i]) | |
return copy | |
} | |
BufferList.prototype.destroy = function () { | |
this._bufs.length = 0; | |
this.length = 0; | |
this.push(null); | |
} | |
;(function () { | |
var methods = { | |
'readDoubleBE' : 8 | |
, 'readDoubleLE' : 8 | |
, 'readFloatBE' : 4 | |
, 'readFloatLE' : 4 | |
, 'readInt32BE' : 4 | |
, 'readInt32LE' : 4 | |
, 'readUInt32BE' : 4 | |
, 'readUInt32LE' : 4 | |
, 'readInt16BE' : 2 | |
, 'readInt16LE' : 2 | |
, 'readUInt16BE' : 2 | |
, 'readUInt16LE' : 2 | |
, 'readInt8' : 1 | |
, 'readUInt8' : 1 | |
} | |
for (var m in methods) { | |
(function (m) { | |
BufferList.prototype[m] = function (offset) { | |
return this.slice(offset, offset + methods[m])[m](0) | |
} | |
}(m)) | |
} | |
}()) | |
module.exports = BufferList | |
}).call(this,require("buffer").Buffer) | |
},{"buffer":20,"readable-stream":69,"util":32}],54:[function(require,module,exports){ | |
(function (process,Buffer){ | |
var util = require('util') | |
, AbstractLevelDOWN = require('abstract-leveldown').AbstractLevelDOWN | |
function DeferredLevelDOWN (location) { | |
AbstractLevelDOWN.call(this, typeof location == 'string' ? location : '') // optional location, who cares? | |
this._db = undefined | |
this._operations = [] | |
} | |
util.inherits(DeferredLevelDOWN, AbstractLevelDOWN) | |
// called by LevelUP when we have a real DB to take its place | |
DeferredLevelDOWN.prototype.setDb = function (db) { | |
this._db = db | |
this._operations.forEach(function (op) { | |
db[op.method].apply(db, op.args) | |
}) | |
} | |
DeferredLevelDOWN.prototype._open = function (options, callback) { | |
return process.nextTick(callback) | |
} | |
// queue a new deferred operation | |
DeferredLevelDOWN.prototype._operation = function (method, args) { | |
if (this._db) | |
return this._db[method].apply(this._db, args) | |
this._operations.push({ method: method, args: args }) | |
} | |
// deferrables | |
'put get del batch approximateSize'.split(' ').forEach(function (m) { | |
DeferredLevelDOWN.prototype['_' + m] = function () { | |
this._operation(m, arguments) | |
} | |
}) | |
DeferredLevelDOWN.prototype._isBuffer = function (obj) { | |
return Buffer.isBuffer(obj) | |
} | |
// don't need to implement this as LevelUP's ReadStream checks for 'ready' state | |
DeferredLevelDOWN.prototype._iterator = function () { | |
throw new TypeError('not implemented') | |
} | |
module.exports = DeferredLevelDOWN | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),require("buffer").Buffer) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"abstract-leveldown":57,"buffer":20,"util":32}],55:[function(require,module,exports){ | |
(function (process){ | |
/* Copyright (c) 2013 Rod Vagg, MIT License */ | |
function AbstractChainedBatch (db) { | |
this._db = db | |
this._operations = [] | |
this._written = false | |
} | |
AbstractChainedBatch.prototype._checkWritten = function () { | |
if (this._written) | |
throw new Error('write() already called on this batch') | |
} | |
AbstractChainedBatch.prototype.put = function (key, value) { | |
this._checkWritten() | |
var err = this._db._checkKeyValue(key, 'key', this._db._isBuffer) | |
if (err) throw err | |
err = this._db._checkKeyValue(value, 'value', this._db._isBuffer) | |
if (err) throw err | |
if (!this._db._isBuffer(key)) key = String(key) | |
if (!this._db._isBuffer(value)) value = String(value) | |
if (typeof this._put == 'function' ) | |
this._put(key, value) | |
else | |
this._operations.push({ type: 'put', key: key, value: value }) | |
return this | |
} | |
AbstractChainedBatch.prototype.del = function (key) { | |
this._checkWritten() | |
var err = this._db._checkKeyValue(key, 'key', this._db._isBuffer) | |
if (err) throw err | |
if (!this._db._isBuffer(key)) key = String(key) | |
if (typeof this._del == 'function' ) | |
this._del(key) | |
else | |
this._operations.push({ type: 'del', key: key }) | |
return this | |
} | |
AbstractChainedBatch.prototype.clear = function () { | |
this._checkWritten() | |
this._operations = [] | |
if (typeof this._clear == 'function' ) | |
this._clear() | |
return this | |
} | |
AbstractChainedBatch.prototype.write = function (options, callback) { | |
this._checkWritten() | |
if (typeof options == 'function') | |
callback = options | |
if (typeof callback != 'function') | |
throw new Error('write() requires a callback argument') | |
if (typeof options != 'object') | |
options = {} | |
this._written = true | |
if (typeof this._write == 'function' ) | |
return this._write(callback) | |
if (typeof this._db._batch == 'function') | |
return this._db._batch(this._operations, options, callback) | |
process.nextTick(callback) | |
} | |
module.exports = AbstractChainedBatch | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19}],56:[function(require,module,exports){ | |
(function (process){ | |
/* Copyright (c) 2013 Rod Vagg, MIT License */ | |
function AbstractIterator (db) { | |
this.db = db | |
this._ended = false | |
this._nexting = false | |
} | |
AbstractIterator.prototype.next = function (callback) { | |
var self = this | |
if (typeof callback != 'function') | |
throw new Error('next() requires a callback argument') | |
if (self._ended) | |
return callback(new Error('cannot call next() after end()')) | |
if (self._nexting) | |
return callback(new Error('cannot call next() before previous next() has completed')) | |
self._nexting = true | |
if (typeof self._next == 'function') { | |
return self._next(function () { | |
self._nexting = false | |
callback.apply(null, arguments) | |
}) | |
} | |
process.nextTick(function () { | |
self._nexting = false | |
callback() | |
}) | |
} | |
AbstractIterator.prototype.end = function (callback) { | |
if (typeof callback != 'function') | |
throw new Error('end() requires a callback argument') | |
if (this._ended) | |
return callback(new Error('end() already called on iterator')) | |
this._ended = true | |
if (typeof this._end == 'function') | |
return this._end(callback) | |
process.nextTick(callback) | |
} | |
module.exports = AbstractIterator | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19}],57:[function(require,module,exports){ | |
(function (process,Buffer){ | |
/* Copyright (c) 2013 Rod Vagg, MIT License */ | |
var xtend = require('xtend') | |
, AbstractIterator = require('./abstract-iterator') | |
, AbstractChainedBatch = require('./abstract-chained-batch') | |
function AbstractLevelDOWN (location) { | |
if (!arguments.length || location === undefined) | |
throw new Error('constructor requires at least a location argument') | |
if (typeof location != 'string') | |
throw new Error('constructor requires a location string argument') | |
this.location = location | |
} | |
AbstractLevelDOWN.prototype.open = function (options, callback) { | |
if (typeof options == 'function') | |
callback = options | |
if (typeof callback != 'function') | |
throw new Error('open() requires a callback argument') | |
if (typeof options != 'object') | |
options = {} | |
if (typeof this._open == 'function') | |
return this._open(options, callback) | |
process.nextTick(callback) | |
} | |
AbstractLevelDOWN.prototype.close = function (callback) { | |
if (typeof callback != 'function') | |
throw new Error('close() requires a callback argument') | |
if (typeof this._close == 'function') | |
return this._close(callback) | |
process.nextTick(callback) | |
} | |
AbstractLevelDOWN.prototype.get = function (key, options, callback) { | |
var err | |
if (typeof options == 'function') | |
callback = options | |
if (typeof callback != 'function') | |
throw new Error('get() requires a callback argument') | |
if (err = this._checkKeyValue(key, 'key', this._isBuffer)) | |
return callback(err) | |
if (!this._isBuffer(key)) | |
key = String(key) | |
if (typeof options != 'object') | |
options = {} | |
if (typeof this._get == 'function') | |
return this._get(key, options, callback) | |
process.nextTick(function () { callback(new Error('NotFound')) }) | |
} | |
AbstractLevelDOWN.prototype.put = function (key, value, options, callback) { | |
var err | |
if (typeof options == 'function') | |
callback = options | |
if (typeof callback != 'function') | |
throw new Error('put() requires a callback argument') | |
if (err = this._checkKeyValue(key, 'key', this._isBuffer)) | |
return callback(err) | |
if (err = this._checkKeyValue(value, 'value', this._isBuffer)) | |
return callback(err) | |
if (!this._isBuffer(key)) | |
key = String(key) | |
// coerce value to string in node, don't touch it in browser | |
// (indexeddb can store any JS type) | |
if (!this._isBuffer(value) && !process.browser) | |
value = String(value) | |
if (typeof options != 'object') | |
options = {} | |
if (typeof this._put == 'function') | |
return this._put(key, value, options, callback) | |
process.nextTick(callback) | |
} | |
AbstractLevelDOWN.prototype.del = function (key, options, callback) { | |
var err | |
if (typeof options == 'function') | |
callback = options | |
if (typeof callback != 'function') | |
throw new Error('del() requires a callback argument') | |
if (err = this._checkKeyValue(key, 'key', this._isBuffer)) | |
return callback(err) | |
if (!this._isBuffer(key)) | |
key = String(key) | |
if (typeof options != 'object') | |
options = {} | |
if (typeof this._del == 'function') | |
return this._del(key, options, callback) | |
process.nextTick(callback) | |
} | |
AbstractLevelDOWN.prototype.batch = function (array, options, callback) { | |
if (!arguments.length) | |
return this._chainedBatch() | |
if (typeof options == 'function') | |
callback = options | |
if (typeof callback != 'function') | |
throw new Error('batch(array) requires a callback argument') | |
if (!Array.isArray(array)) | |
return callback(new Error('batch(array) requires an array argument')) | |
if (typeof options != 'object') | |
options = {} | |
var i = 0 | |
, l = array.length | |
, e | |
, err | |
for (; i < l; i++) { | |
e = array[i] | |
if (typeof e != 'object') | |
continue | |
if (err = this._checkKeyValue(e.type, 'type', this._isBuffer)) | |
return callback(err) | |
if (err = this._checkKeyValue(e.key, 'key', this._isBuffer)) | |
return callback(err) | |
if (e.type == 'put') { | |
if (err = this._checkKeyValue(e.value, 'value', this._isBuffer)) | |
return callback(err) | |
} | |
} | |
if (typeof this._batch == 'function') | |
return this._batch(array, options, callback) | |
process.nextTick(callback) | |
} | |
//TODO: remove from here, not a necessary primitive | |
AbstractLevelDOWN.prototype.approximateSize = function (start, end, callback) { | |
if ( start == null | |
|| end == null | |
|| typeof start == 'function' | |
|| typeof end == 'function') { | |
throw new Error('approximateSize() requires valid `start`, `end` and `callback` arguments') | |
} | |
if (typeof callback != 'function') | |
throw new Error('approximateSize() requires a callback argument') | |
if (!this._isBuffer(start)) | |
start = String(start) | |
if (!this._isBuffer(end)) | |
end = String(end) | |
if (typeof this._approximateSize == 'function') | |
return this._approximateSize(start, end, callback) | |
process.nextTick(function () { | |
callback(null, 0) | |
}) | |
} | |
AbstractLevelDOWN.prototype._setupIteratorOptions = function (options) { | |
var self = this | |
options = xtend(options) | |
;[ 'start', 'end', 'gt', 'gte', 'lt', 'lte' ].forEach(function (o) { | |
if (options[o] && self._isBuffer(options[o]) && options[o].length === 0) | |
delete options[o] | |
}) | |
options.reverse = !!options.reverse | |
// fix `start` so it takes into account gt, gte, lt, lte as appropriate | |
if (options.reverse && options.lt) | |
options.start = options.lt | |
if (options.reverse && options.lte) | |
options.start = options.lte | |
if (!options.reverse && options.gt) | |
options.start = options.gt | |
if (!options.reverse && options.gte) | |
options.start = options.gte | |
if ((options.reverse && options.lt && !options.lte) | |
|| (!options.reverse && options.gt && !options.gte)) | |
options.exclusiveStart = true // start should *not* include matching key | |
return options | |
} | |
AbstractLevelDOWN.prototype.iterator = function (options) { | |
if (typeof options != 'object') | |
options = {} | |
options = this._setupIteratorOptions(options) | |
if (typeof this._iterator == 'function') | |
return this._iterator(options) | |
return new AbstractIterator(this) | |
} | |
AbstractLevelDOWN.prototype._chainedBatch = function () { | |
return new AbstractChainedBatch(this) | |
} | |
AbstractLevelDOWN.prototype._isBuffer = function (obj) { | |
return Buffer.isBuffer(obj) | |
} | |
AbstractLevelDOWN.prototype._checkKeyValue = function (obj, type) { | |
if (obj === null || obj === undefined) | |
return new Error(type + ' cannot be `null` or `undefined`') | |
if (this._isBuffer(obj)) { | |
if (obj.length === 0) | |
return new Error(type + ' cannot be an empty Buffer') | |
} else if (String(obj) === '') | |
return new Error(type + ' cannot be an empty String') | |
} | |
module.exports.AbstractLevelDOWN = AbstractLevelDOWN | |
module.exports.AbstractIterator = AbstractIterator | |
module.exports.AbstractChainedBatch = AbstractChainedBatch | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),require("buffer").Buffer) | |
},{"./abstract-chained-batch":55,"./abstract-iterator":56,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"buffer":20,"xtend":70}],58:[function(require,module,exports){ | |
var prr = require('prr') | |
function init (type, message, cause) { | |
prr(this, { | |
type : type | |
, name : type | |
// can be passed just a 'cause' | |
, cause : typeof message != 'string' ? message : cause | |
, message : !!message && typeof message != 'string' ? message.message : message | |
}, 'ewr') | |
} | |
// generic prototype, not intended to be actually used - helpful for `instanceof` | |
function CustomError (message, cause) { | |
Error.call(this) | |
if (Error.captureStackTrace) | |
Error.captureStackTrace(this, arguments.callee) | |
init.call(this, 'CustomError', message, cause) | |
} | |
CustomError.prototype = new Error() | |
function createError (errno, type, proto) { | |
var err = function (message, cause) { | |
init.call(this, type, message, cause) | |
//TODO: the specificity here is stupid, errno should be available everywhere | |
if (type == 'FilesystemError') { | |
this.code = this.cause.code | |
this.path = this.cause.path | |
this.errno = this.cause.errno | |
this.message = | |
(errno.errno[this.cause.errno] | |
? errno.errno[this.cause.errno].description | |
: this.cause.message) | |
+ (this.cause.path ? ' [' + this.cause.path + ']' : '') | |
} | |
Error.call(this) | |
if (Error.captureStackTrace) | |
Error.captureStackTrace(this, arguments.callee) | |
} | |
err.prototype = !!proto ? new proto() : new CustomError() | |
return err | |
} | |
module.exports = function (errno) { | |
var ce = function (type, proto) { | |
return createError(errno, type, proto) | |
} | |
return { | |
CustomError : CustomError | |
, FilesystemError : ce('FilesystemError') | |
, createError : ce | |
} | |
} | |
},{"prr":60}],59:[function(require,module,exports){ | |
var all = module.exports.all = [ | |
{ | |
"errno": -1, | |
"code": "UNKNOWN", | |
"description": "unknown error" | |
}, | |
{ | |
"errno": 0, | |
"code": "OK", | |
"description": "success" | |
}, | |
{ | |
"errno": 1, | |
"code": "EOF", | |
"description": "end of file" | |
}, | |
{ | |
"errno": 2, | |
"code": "EADDRINFO", | |
"description": "getaddrinfo error" | |
}, | |
{ | |
"errno": 3, | |
"code": "EACCES", | |
"description": "permission denied" | |
}, | |
{ | |
"errno": 4, | |
"code": "EAGAIN", | |
"description": "resource temporarily unavailable" | |
}, | |
{ | |
"errno": 5, | |
"code": "EADDRINUSE", | |
"description": "address already in use" | |
}, | |
{ | |
"errno": 6, | |
"code": "EADDRNOTAVAIL", | |
"description": "address not available" | |
}, | |
{ | |
"errno": 7, | |
"code": "EAFNOSUPPORT", | |
"description": "address family not supported" | |
}, | |
{ | |
"errno": 8, | |
"code": "EALREADY", | |
"description": "connection already in progress" | |
}, | |
{ | |
"errno": 9, | |
"code": "EBADF", | |
"description": "bad file descriptor" | |
}, | |
{ | |
"errno": 10, | |
"code": "EBUSY", | |
"description": "resource busy or locked" | |
}, | |
{ | |
"errno": 11, | |
"code": "ECONNABORTED", | |
"description": "software caused connection abort" | |
}, | |
{ | |
"errno": 12, | |
"code": "ECONNREFUSED", | |
"description": "connection refused" | |
}, | |
{ | |
"errno": 13, | |
"code": "ECONNRESET", | |
"description": "connection reset by peer" | |
}, | |
{ | |
"errno": 14, | |
"code": "EDESTADDRREQ", | |
"description": "destination address required" | |
}, | |
{ | |
"errno": 15, | |
"code": "EFAULT", | |
"description": "bad address in system call argument" | |
}, | |
{ | |
"errno": 16, | |
"code": "EHOSTUNREACH", | |
"description": "host is unreachable" | |
}, | |
{ | |
"errno": 17, | |
"code": "EINTR", | |
"description": "interrupted system call" | |
}, | |
{ | |
"errno": 18, | |
"code": "EINVAL", | |
"description": "invalid argument" | |
}, | |
{ | |
"errno": 19, | |
"code": "EISCONN", | |
"description": "socket is already connected" | |
}, | |
{ | |
"errno": 20, | |
"code": "EMFILE", | |
"description": "too many open files" | |
}, | |
{ | |
"errno": 21, | |
"code": "EMSGSIZE", | |
"description": "message too long" | |
}, | |
{ | |
"errno": 22, | |
"code": "ENETDOWN", | |
"description": "network is down" | |
}, | |
{ | |
"errno": 23, | |
"code": "ENETUNREACH", | |
"description": "network is unreachable" | |
}, | |
{ | |
"errno": 24, | |
"code": "ENFILE", | |
"description": "file table overflow" | |
}, | |
{ | |
"errno": 25, | |
"code": "ENOBUFS", | |
"description": "no buffer space available" | |
}, | |
{ | |
"errno": 26, | |
"code": "ENOMEM", | |
"description": "not enough memory" | |
}, | |
{ | |
"errno": 27, | |
"code": "ENOTDIR", | |
"description": "not a directory" | |
}, | |
{ | |
"errno": 28, | |
"code": "EISDIR", | |
"description": "illegal operation on a directory" | |
}, | |
{ | |
"errno": 29, | |
"code": "ENONET", | |
"description": "machine is not on the network" | |
}, | |
{ | |
"errno": 31, | |
"code": "ENOTCONN", | |
"description": "socket is not connected" | |
}, | |
{ | |
"errno": 32, | |
"code": "ENOTSOCK", | |
"description": "socket operation on non-socket" | |
}, | |
{ | |
"errno": 33, | |
"code": "ENOTSUP", | |
"description": "operation not supported on socket" | |
}, | |
{ | |
"errno": 34, | |
"code": "ENOENT", | |
"description": "no such file or directory" | |
}, | |
{ | |
"errno": 35, | |
"code": "ENOSYS", | |
"description": "function not implemented" | |
}, | |
{ | |
"errno": 36, | |
"code": "EPIPE", | |
"description": "broken pipe" | |
}, | |
{ | |
"errno": 37, | |
"code": "EPROTO", | |
"description": "protocol error" | |
}, | |
{ | |
"errno": 38, | |
"code": "EPROTONOSUPPORT", | |
"description": "protocol not supported" | |
}, | |
{ | |
"errno": 39, | |
"code": "EPROTOTYPE", | |
"description": "protocol wrong type for socket" | |
}, | |
{ | |
"errno": 40, | |
"code": "ETIMEDOUT", | |
"description": "connection timed out" | |
}, | |
{ | |
"errno": 41, | |
"code": "ECHARSET", | |
"description": "invalid Unicode character" | |
}, | |
{ | |
"errno": 42, | |
"code": "EAIFAMNOSUPPORT", | |
"description": "address family for hostname not supported" | |
}, | |
{ | |
"errno": 44, | |
"code": "EAISERVICE", | |
"description": "servname not supported for ai_socktype" | |
}, | |
{ | |
"errno": 45, | |
"code": "EAISOCKTYPE", | |
"description": "ai_socktype not supported" | |
}, | |
{ | |
"errno": 46, | |
"code": "ESHUTDOWN", | |
"description": "cannot send after transport endpoint shutdown" | |
}, | |
{ | |
"errno": 47, | |
"code": "EEXIST", | |
"description": "file already exists" | |
}, | |
{ | |
"errno": 48, | |
"code": "ESRCH", | |
"description": "no such process" | |
}, | |
{ | |
"errno": 49, | |
"code": "ENAMETOOLONG", | |
"description": "name too long" | |
}, | |
{ | |
"errno": 50, | |
"code": "EPERM", | |
"description": "operation not permitted" | |
}, | |
{ | |
"errno": 51, | |
"code": "ELOOP", | |
"description": "too many symbolic links encountered" | |
}, | |
{ | |
"errno": 52, | |
"code": "EXDEV", | |
"description": "cross-device link not permitted" | |
}, | |
{ | |
"errno": 53, | |
"code": "ENOTEMPTY", | |
"description": "directory not empty" | |
}, | |
{ | |
"errno": 54, | |
"code": "ENOSPC", | |
"description": "no space left on device" | |
}, | |
{ | |
"errno": 55, | |
"code": "EIO", | |
"description": "i/o error" | |
}, | |
{ | |
"errno": 56, | |
"code": "EROFS", | |
"description": "read-only file system" | |
}, | |
{ | |
"errno": 57, | |
"code": "ENODEV", | |
"description": "no such device" | |
}, | |
{ | |
"errno": 58, | |
"code": "ESPIPE", | |
"description": "invalid seek" | |
}, | |
{ | |
"errno": 59, | |
"code": "ECANCELED", | |
"description": "operation canceled" | |
} | |
] | |
module.exports.errno = { | |
'-1': all[0] | |
, '0': all[1] | |
, '1': all[2] | |
, '2': all[3] | |
, '3': all[4] | |
, '4': all[5] | |
, '5': all[6] | |
, '6': all[7] | |
, '7': all[8] | |
, '8': all[9] | |
, '9': all[10] | |
, '10': all[11] | |
, '11': all[12] | |
, '12': all[13] | |
, '13': all[14] | |
, '14': all[15] | |
, '15': all[16] | |
, '16': all[17] | |
, '17': all[18] | |
, '18': all[19] | |
, '19': all[20] | |
, '20': all[21] | |
, '21': all[22] | |
, '22': all[23] | |
, '23': all[24] | |
, '24': all[25] | |
, '25': all[26] | |
, '26': all[27] | |
, '27': all[28] | |
, '28': all[29] | |
, '29': all[30] | |
, '31': all[31] | |
, '32': all[32] | |
, '33': all[33] | |
, '34': all[34] | |
, '35': all[35] | |
, '36': all[36] | |
, '37': all[37] | |
, '38': all[38] | |
, '39': all[39] | |
, '40': all[40] | |
, '41': all[41] | |
, '42': all[42] | |
, '44': all[43] | |
, '45': all[44] | |
, '46': all[45] | |
, '47': all[46] | |
, '48': all[47] | |
, '49': all[48] | |
, '50': all[49] | |
, '51': all[50] | |
, '52': all[51] | |
, '53': all[52] | |
, '54': all[53] | |
, '55': all[54] | |
, '56': all[55] | |
, '57': all[56] | |
, '58': all[57] | |
, '59': all[58] | |
} | |
module.exports.code = { | |
'UNKNOWN': all[0] | |
, 'OK': all[1] | |
, 'EOF': all[2] | |
, 'EADDRINFO': all[3] | |
, 'EACCES': all[4] | |
, 'EAGAIN': all[5] | |
, 'EADDRINUSE': all[6] | |
, 'EADDRNOTAVAIL': all[7] | |
, 'EAFNOSUPPORT': all[8] | |
, 'EALREADY': all[9] | |
, 'EBADF': all[10] | |
, 'EBUSY': all[11] | |
, 'ECONNABORTED': all[12] | |
, 'ECONNREFUSED': all[13] | |
, 'ECONNRESET': all[14] | |
, 'EDESTADDRREQ': all[15] | |
, 'EFAULT': all[16] | |
, 'EHOSTUNREACH': all[17] | |
, 'EINTR': all[18] | |
, 'EINVAL': all[19] | |
, 'EISCONN': all[20] | |
, 'EMFILE': all[21] | |
, 'EMSGSIZE': all[22] | |
, 'ENETDOWN': all[23] | |
, 'ENETUNREACH': all[24] | |
, 'ENFILE': all[25] | |
, 'ENOBUFS': all[26] | |
, 'ENOMEM': all[27] | |
, 'ENOTDIR': all[28] | |
, 'EISDIR': all[29] | |
, 'ENONET': all[30] | |
, 'ENOTCONN': all[31] | |
, 'ENOTSOCK': all[32] | |
, 'ENOTSUP': all[33] | |
, 'ENOENT': all[34] | |
, 'ENOSYS': all[35] | |
, 'EPIPE': all[36] | |
, 'EPROTO': all[37] | |
, 'EPROTONOSUPPORT': all[38] | |
, 'EPROTOTYPE': all[39] | |
, 'ETIMEDOUT': all[40] | |
, 'ECHARSET': all[41] | |
, 'EAIFAMNOSUPPORT': all[42] | |
, 'EAISERVICE': all[43] | |
, 'EAISOCKTYPE': all[44] | |
, 'ESHUTDOWN': all[45] | |
, 'EEXIST': all[46] | |
, 'ESRCH': all[47] | |
, 'ENAMETOOLONG': all[48] | |
, 'EPERM': all[49] | |
, 'ELOOP': all[50] | |
, 'EXDEV': all[51] | |
, 'ENOTEMPTY': all[52] | |
, 'ENOSPC': all[53] | |
, 'EIO': all[54] | |
, 'EROFS': all[55] | |
, 'ENODEV': all[56] | |
, 'ESPIPE': all[57] | |
, 'ECANCELED': all[58] | |
} | |
module.exports.custom = require("./custom")(module.exports) | |
module.exports.create = module.exports.custom.createError | |
},{"./custom":58}],60:[function(require,module,exports){ | |
/*! | |
* prr | |
* (c) 2013 Rod Vagg <[email protected]> | |
* https://github.com/rvagg/prr | |
* License: MIT | |
*/ | |
(function (name, context, definition) { | |
if (typeof module != 'undefined' && module.exports) | |
module.exports = definition() | |
else | |
context[name] = definition() | |
})('prr', this, function() { | |
var setProperty = typeof Object.defineProperty == 'function' | |
? function (obj, key, options) { | |
Object.defineProperty(obj, key, options) | |
return obj | |
} | |
: function (obj, key, options) { // < es5 | |
obj[key] = options.value | |
return obj | |
} | |
, makeOptions = function (value, options) { | |
var oo = typeof options == 'object' | |
, os = !oo && typeof options == 'string' | |
, op = function (p) { | |
return oo | |
? !!options[p] | |
: os | |
? options.indexOf(p[0]) > -1 | |
: false | |
} | |
return { | |
enumerable : op('enumerable') | |
, configurable : op('configurable') | |
, writable : op('writable') | |
, value : value | |
} | |
} | |
, prr = function (obj, key, value, options) { | |
var k | |
options = makeOptions(value, options) | |
if (typeof key == 'object') { | |
for (k in key) { | |
if (Object.hasOwnProperty.call(key, k)) { | |
options.value = key[k] | |
setProperty(obj, k, options) | |
} | |
} | |
return obj | |
} | |
return setProperty(obj, key, options) | |
} | |
return prr | |
}) | |
},{}],61:[function(require,module,exports){ | |
(function (process){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// a duplex stream is just a stream that is both readable and writable. | |
// Since JS doesn't have multiple prototypal inheritance, this class | |
// prototypally inherits from Readable, and then parasitically from | |
// Writable. | |
module.exports = Duplex; | |
/*<replacement>*/ | |
var objectKeys = Object.keys || function (obj) { | |
var keys = []; | |
for (var key in obj) keys.push(key); | |
return keys; | |
} | |
/*</replacement>*/ | |
/*<replacement>*/ | |
var util = require('core-util-is'); | |
util.inherits = require('inherits'); | |
/*</replacement>*/ | |
var Readable = require('./_stream_readable'); | |
var Writable = require('./_stream_writable'); | |
util.inherits(Duplex, Readable); | |
forEach(objectKeys(Writable.prototype), function(method) { | |
if (!Duplex.prototype[method]) | |
Duplex.prototype[method] = Writable.prototype[method]; | |
}); | |
function Duplex(options) { | |
if (!(this instanceof Duplex)) | |
return new Duplex(options); | |
Readable.call(this, options); | |
Writable.call(this, options); | |
if (options && options.readable === false) | |
this.readable = false; | |
if (options && options.writable === false) | |
this.writable = false; | |
this.allowHalfOpen = true; | |
if (options && options.allowHalfOpen === false) | |
this.allowHalfOpen = false; | |
this.once('end', onend); | |
} | |
// the no-half-open enforcer | |
function onend() { | |
// if we allow half-open state, or if the writable side ended, | |
// then we're ok. | |
if (this.allowHalfOpen || this._writableState.ended) | |
return; | |
// no more data can be written. | |
// But allow more writes to happen in this tick. | |
process.nextTick(this.end.bind(this)); | |
} | |
function forEach (xs, f) { | |
for (var i = 0, l = xs.length; i < l; i++) { | |
f(xs[i], i); | |
} | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"./_stream_readable":63,"./_stream_writable":65,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"core-util-is":66,"inherits":33}],62:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// a passthrough stream. | |
// basically just the most minimal sort of Transform stream. | |
// Every written chunk gets output as-is. | |
module.exports = PassThrough; | |
var Transform = require('./_stream_transform'); | |
/*<replacement>*/ | |
var util = require('core-util-is'); | |
util.inherits = require('inherits'); | |
/*</replacement>*/ | |
util.inherits(PassThrough, Transform); | |
function PassThrough(options) { | |
if (!(this instanceof PassThrough)) | |
return new PassThrough(options); | |
Transform.call(this, options); | |
} | |
PassThrough.prototype._transform = function(chunk, encoding, cb) { | |
cb(null, chunk); | |
}; | |
},{"./_stream_transform":64,"core-util-is":66,"inherits":33}],63:[function(require,module,exports){ | |
(function (process){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
module.exports = Readable; | |
/*<replacement>*/ | |
var isArray = require('isarray'); | |
/*</replacement>*/ | |
/*<replacement>*/ | |
var Buffer = require('buffer').Buffer; | |
/*</replacement>*/ | |
Readable.ReadableState = ReadableState; | |
var EE = require('events').EventEmitter; | |
/*<replacement>*/ | |
if (!EE.listenerCount) EE.listenerCount = function(emitter, type) { | |
return emitter.listeners(type).length; | |
}; | |
/*</replacement>*/ | |
var Stream = require('stream'); | |
/*<replacement>*/ | |
var util = require('core-util-is'); | |
util.inherits = require('inherits'); | |
/*</replacement>*/ | |
var StringDecoder; | |
util.inherits(Readable, Stream); | |
function ReadableState(options, stream) { | |
options = options || {}; | |
// the point at which it stops calling _read() to fill the buffer | |
// Note: 0 is a valid value, means "don't call _read preemptively ever" | |
var hwm = options.highWaterMark; | |
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024; | |
// cast to ints. | |
this.highWaterMark = ~~this.highWaterMark; | |
this.buffer = []; | |
this.length = 0; | |
this.pipes = null; | |
this.pipesCount = 0; | |
this.flowing = false; | |
this.ended = false; | |
this.endEmitted = false; | |
this.reading = false; | |
// In streams that never have any data, and do push(null) right away, | |
// the consumer can miss the 'end' event if they do some I/O before | |
// consuming the stream. So, we don't emit('end') until some reading | |
// happens. | |
this.calledRead = false; | |
// a flag to be able to tell if the onwrite cb is called immediately, | |
// or on a later tick. We set this to true at first, becuase any | |
// actions that shouldn't happen until "later" should generally also | |
// not happen before the first write call. | |
this.sync = true; | |
// whenever we return null, then we set a flag to say | |
// that we're awaiting a 'readable' event emission. | |
this.needReadable = false; | |
this.emittedReadable = false; | |
this.readableListening = false; | |
// object stream flag. Used to make read(n) ignore n and to | |
// make all the buffer merging and length checks go away | |
this.objectMode = !!options.objectMode; | |
// Crypto is kind of old and crusty. Historically, its default string | |
// encoding is 'binary' so we have to make this configurable. | |
// Everything else in the universe uses 'utf8', though. | |
this.defaultEncoding = options.defaultEncoding || 'utf8'; | |
// when piping, we only care about 'readable' events that happen | |
// after read()ing all the bytes and not getting any pushback. | |
this.ranOut = false; | |
// the number of writers that are awaiting a drain event in .pipe()s | |
this.awaitDrain = 0; | |
// if true, a maybeReadMore has been scheduled | |
this.readingMore = false; | |
this.decoder = null; | |
this.encoding = null; | |
if (options.encoding) { | |
if (!StringDecoder) | |
StringDecoder = require('string_decoder/').StringDecoder; | |
this.decoder = new StringDecoder(options.encoding); | |
this.encoding = options.encoding; | |
} | |
} | |
function Readable(options) { | |
if (!(this instanceof Readable)) | |
return new Readable(options); | |
this._readableState = new ReadableState(options, this); | |
// legacy | |
this.readable = true; | |
Stream.call(this); | |
} | |
// Manually shove something into the read() buffer. | |
// This returns true if the highWaterMark has not been hit yet, | |
// similar to how Writable.write() returns true if you should | |
// write() some more. | |
Readable.prototype.push = function(chunk, encoding) { | |
var state = this._readableState; | |
if (typeof chunk === 'string' && !state.objectMode) { | |
encoding = encoding || state.defaultEncoding; | |
if (encoding !== state.encoding) { | |
chunk = new Buffer(chunk, encoding); | |
encoding = ''; | |
} | |
} | |
return readableAddChunk(this, state, chunk, encoding, false); | |
}; | |
// Unshift should *always* be something directly out of read() | |
Readable.prototype.unshift = function(chunk) { | |
var state = this._readableState; | |
return readableAddChunk(this, state, chunk, '', true); | |
}; | |
function readableAddChunk(stream, state, chunk, encoding, addToFront) { | |
var er = chunkInvalid(state, chunk); | |
if (er) { | |
stream.emit('error', er); | |
} else if (chunk === null || chunk === undefined) { | |
state.reading = false; | |
if (!state.ended) | |
onEofChunk(stream, state); | |
} else if (state.objectMode || chunk && chunk.length > 0) { | |
if (state.ended && !addToFront) { | |
var e = new Error('stream.push() after EOF'); | |
stream.emit('error', e); | |
} else if (state.endEmitted && addToFront) { | |
var e = new Error('stream.unshift() after end event'); | |
stream.emit('error', e); | |
} else { | |
if (state.decoder && !addToFront && !encoding) | |
chunk = state.decoder.write(chunk); | |
// update the buffer info. | |
state.length += state.objectMode ? 1 : chunk.length; | |
if (addToFront) { | |
state.buffer.unshift(chunk); | |
} else { | |
state.reading = false; | |
state.buffer.push(chunk); | |
} | |
if (state.needReadable) | |
emitReadable(stream); | |
maybeReadMore(stream, state); | |
} | |
} else if (!addToFront) { | |
state.reading = false; | |
} | |
return needMoreData(state); | |
} | |
// if it's past the high water mark, we can push in some more. | |
// Also, if we have no data yet, we can stand some | |
// more bytes. This is to work around cases where hwm=0, | |
// such as the repl. Also, if the push() triggered a | |
// readable event, and the user called read(largeNumber) such that | |
// needReadable was set, then we ought to push more, so that another | |
// 'readable' event will be triggered. | |
function needMoreData(state) { | |
return !state.ended && | |
(state.needReadable || | |
state.length < state.highWaterMark || | |
state.length === 0); | |
} | |
// backwards compatibility. | |
Readable.prototype.setEncoding = function(enc) { | |
if (!StringDecoder) | |
StringDecoder = require('string_decoder/').StringDecoder; | |
this._readableState.decoder = new StringDecoder(enc); | |
this._readableState.encoding = enc; | |
}; | |
// Don't raise the hwm > 128MB | |
var MAX_HWM = 0x800000; | |
function roundUpToNextPowerOf2(n) { | |
if (n >= MAX_HWM) { | |
n = MAX_HWM; | |
} else { | |
// Get the next highest power of 2 | |
n--; | |
for (var p = 1; p < 32; p <<= 1) n |= n >> p; | |
n++; | |
} | |
return n; | |
} | |
function howMuchToRead(n, state) { | |
if (state.length === 0 && state.ended) | |
return 0; | |
if (state.objectMode) | |
return n === 0 ? 0 : 1; | |
if (n === null || isNaN(n)) { | |
// only flow one buffer at a time | |
if (state.flowing && state.buffer.length) | |
return state.buffer[0].length; | |
else | |
return state.length; | |
} | |
if (n <= 0) | |
return 0; | |
// If we're asking for more than the target buffer level, | |
// then raise the water mark. Bump up to the next highest | |
// power of 2, to prevent increasing it excessively in tiny | |
// amounts. | |
if (n > state.highWaterMark) | |
state.highWaterMark = roundUpToNextPowerOf2(n); | |
// don't have that much. return null, unless we've ended. | |
if (n > state.length) { | |
if (!state.ended) { | |
state.needReadable = true; | |
return 0; | |
} else | |
return state.length; | |
} | |
return n; | |
} | |
// you can override either this method, or the async _read(n) below. | |
Readable.prototype.read = function(n) { | |
var state = this._readableState; | |
state.calledRead = true; | |
var nOrig = n; | |
var ret; | |
if (typeof n !== 'number' || n > 0) | |
state.emittedReadable = false; | |
// if we're doing read(0) to trigger a readable event, but we | |
// already have a bunch of data in the buffer, then just trigger | |
// the 'readable' event and move on. | |
if (n === 0 && | |
state.needReadable && | |
(state.length >= state.highWaterMark || state.ended)) { | |
emitReadable(this); | |
return null; | |
} | |
n = howMuchToRead(n, state); | |
// if we've ended, and we're now clear, then finish it up. | |
if (n === 0 && state.ended) { | |
ret = null; | |
// In cases where the decoder did not receive enough data | |
// to produce a full chunk, then immediately received an | |
// EOF, state.buffer will contain [<Buffer >, <Buffer 00 ...>]. | |
// howMuchToRead will see this and coerce the amount to | |
// read to zero (because it's looking at the length of the | |
// first <Buffer > in state.buffer), and we'll end up here. | |
// | |
// This can only happen via state.decoder -- no other venue | |
// exists for pushing a zero-length chunk into state.buffer | |
// and triggering this behavior. In this case, we return our | |
// remaining data and end the stream, if appropriate. | |
if (state.length > 0 && state.decoder) { | |
ret = fromList(n, state); | |
state.length -= ret.length; | |
} | |
if (state.length === 0) | |
endReadable(this); | |
return ret; | |
} | |
// All the actual chunk generation logic needs to be | |
// *below* the call to _read. The reason is that in certain | |
// synthetic stream cases, such as passthrough streams, _read | |
// may be a completely synchronous operation which may change | |
// the state of the read buffer, providing enough data when | |
// before there was *not* enough. | |
// | |
// So, the steps are: | |
// 1. Figure out what the state of things will be after we do | |
// a read from the buffer. | |
// | |
// 2. If that resulting state will trigger a _read, then call _read. | |
// Note that this may be asynchronous, or synchronous. Yes, it is | |
// deeply ugly to write APIs this way, but that still doesn't mean | |
// that the Readable class should behave improperly, as streams are | |
// designed to be sync/async agnostic. | |
// Take note if the _read call is sync or async (ie, if the read call | |
// has returned yet), so that we know whether or not it's safe to emit | |
// 'readable' etc. | |
// | |
// 3. Actually pull the requested chunks out of the buffer and return. | |
// if we need a readable event, then we need to do some reading. | |
var doRead = state.needReadable; | |
// if we currently have less than the highWaterMark, then also read some | |
if (state.length - n <= state.highWaterMark) | |
doRead = true; | |
// however, if we've ended, then there's no point, and if we're already | |
// reading, then it's unnecessary. | |
if (state.ended || state.reading) | |
doRead = false; | |
if (doRead) { | |
state.reading = true; | |
state.sync = true; | |
// if the length is currently zero, then we *need* a readable event. | |
if (state.length === 0) | |
state.needReadable = true; | |
// call internal read method | |
this._read(state.highWaterMark); | |
state.sync = false; | |
} | |
// If _read called its callback synchronously, then `reading` | |
// will be false, and we need to re-evaluate how much data we | |
// can return to the user. | |
if (doRead && !state.reading) | |
n = howMuchToRead(nOrig, state); | |
if (n > 0) | |
ret = fromList(n, state); | |
else | |
ret = null; | |
if (ret === null) { | |
state.needReadable = true; | |
n = 0; | |
} | |
state.length -= n; | |
// If we have nothing in the buffer, then we want to know | |
// as soon as we *do* get something into the buffer. | |
if (state.length === 0 && !state.ended) | |
state.needReadable = true; | |
// If we happened to read() exactly the remaining amount in the | |
// buffer, and the EOF has been seen at this point, then make sure | |
// that we emit 'end' on the very next tick. | |
if (state.ended && !state.endEmitted && state.length === 0) | |
endReadable(this); | |
return ret; | |
}; | |
function chunkInvalid(state, chunk) { | |
var er = null; | |
if (!Buffer.isBuffer(chunk) && | |
'string' !== typeof chunk && | |
chunk !== null && | |
chunk !== undefined && | |
!state.objectMode) { | |
er = new TypeError('Invalid non-string/buffer chunk'); | |
} | |
return er; | |
} | |
function onEofChunk(stream, state) { | |
if (state.decoder && !state.ended) { | |
var chunk = state.decoder.end(); | |
if (chunk && chunk.length) { | |
state.buffer.push(chunk); | |
state.length += state.objectMode ? 1 : chunk.length; | |
} | |
} | |
state.ended = true; | |
// if we've ended and we have some data left, then emit | |
// 'readable' now to make sure it gets picked up. | |
if (state.length > 0) | |
emitReadable(stream); | |
else | |
endReadable(stream); | |
} | |
// Don't emit readable right away in sync mode, because this can trigger | |
// another read() call => stack overflow. This way, it might trigger | |
// a nextTick recursion warning, but that's not so bad. | |
function emitReadable(stream) { | |
var state = stream._readableState; | |
state.needReadable = false; | |
if (state.emittedReadable) | |
return; | |
state.emittedReadable = true; | |
if (state.sync) | |
process.nextTick(function() { | |
emitReadable_(stream); | |
}); | |
else | |
emitReadable_(stream); | |
} | |
function emitReadable_(stream) { | |
stream.emit('readable'); | |
} | |
// at this point, the user has presumably seen the 'readable' event, | |
// and called read() to consume some data. that may have triggered | |
// in turn another _read(n) call, in which case reading = true if | |
// it's in progress. | |
// However, if we're not ended, or reading, and the length < hwm, | |
// then go ahead and try to read some more preemptively. | |
function maybeReadMore(stream, state) { | |
if (!state.readingMore) { | |
state.readingMore = true; | |
process.nextTick(function() { | |
maybeReadMore_(stream, state); | |
}); | |
} | |
} | |
function maybeReadMore_(stream, state) { | |
var len = state.length; | |
while (!state.reading && !state.flowing && !state.ended && | |
state.length < state.highWaterMark) { | |
stream.read(0); | |
if (len === state.length) | |
// didn't get any data, stop spinning. | |
break; | |
else | |
len = state.length; | |
} | |
state.readingMore = false; | |
} | |
// abstract method. to be overridden in specific implementation classes. | |
// call cb(er, data) where data is <= n in length. | |
// for virtual (non-string, non-buffer) streams, "length" is somewhat | |
// arbitrary, and perhaps not very meaningful. | |
Readable.prototype._read = function(n) { | |
this.emit('error', new Error('not implemented')); | |
}; | |
Readable.prototype.pipe = function(dest, pipeOpts) { | |
var src = this; | |
var state = this._readableState; | |
switch (state.pipesCount) { | |
case 0: | |
state.pipes = dest; | |
break; | |
case 1: | |
state.pipes = [state.pipes, dest]; | |
break; | |
default: | |
state.pipes.push(dest); | |
break; | |
} | |
state.pipesCount += 1; | |
var doEnd = (!pipeOpts || pipeOpts.end !== false) && | |
dest !== process.stdout && | |
dest !== process.stderr; | |
var endFn = doEnd ? onend : cleanup; | |
if (state.endEmitted) | |
process.nextTick(endFn); | |
else | |
src.once('end', endFn); | |
dest.on('unpipe', onunpipe); | |
function onunpipe(readable) { | |
if (readable !== src) return; | |
cleanup(); | |
} | |
function onend() { | |
dest.end(); | |
} | |
// when the dest drains, it reduces the awaitDrain counter | |
// on the source. This would be more elegant with a .once() | |
// handler in flow(), but adding and removing repeatedly is | |
// too slow. | |
var ondrain = pipeOnDrain(src); | |
dest.on('drain', ondrain); | |
function cleanup() { | |
// cleanup event handlers once the pipe is broken | |
dest.removeListener('close', onclose); | |
dest.removeListener('finish', onfinish); | |
dest.removeListener('drain', ondrain); | |
dest.removeListener('error', onerror); | |
dest.removeListener('unpipe', onunpipe); | |
src.removeListener('end', onend); | |
src.removeListener('end', cleanup); | |
// if the reader is waiting for a drain event from this | |
// specific writer, then it would cause it to never start | |
// flowing again. | |
// So, if this is awaiting a drain, then we just call it now. | |
// If we don't know, then assume that we are waiting for one. | |
if (!dest._writableState || dest._writableState.needDrain) | |
ondrain(); | |
} | |
// if the dest has an error, then stop piping into it. | |
// however, don't suppress the throwing behavior for this. | |
function onerror(er) { | |
unpipe(); | |
dest.removeListener('error', onerror); | |
if (EE.listenerCount(dest, 'error') === 0) | |
dest.emit('error', er); | |
} | |
// This is a brutally ugly hack to make sure that our error handler | |
// is attached before any userland ones. NEVER DO THIS. | |
if (!dest._events || !dest._events.error) | |
dest.on('error', onerror); | |
else if (isArray(dest._events.error)) | |
dest._events.error.unshift(onerror); | |
else | |
dest._events.error = [onerror, dest._events.error]; | |
// Both close and finish should trigger unpipe, but only once. | |
function onclose() { | |
dest.removeListener('finish', onfinish); | |
unpipe(); | |
} | |
dest.once('close', onclose); | |
function onfinish() { | |
dest.removeListener('close', onclose); | |
unpipe(); | |
} | |
dest.once('finish', onfinish); | |
function unpipe() { | |
src.unpipe(dest); | |
} | |
// tell the dest that it's being piped to | |
dest.emit('pipe', src); | |
// start the flow if it hasn't been started already. | |
if (!state.flowing) { | |
// the handler that waits for readable events after all | |
// the data gets sucked out in flow. | |
// This would be easier to follow with a .once() handler | |
// in flow(), but that is too slow. | |
this.on('readable', pipeOnReadable); | |
state.flowing = true; | |
process.nextTick(function() { | |
flow(src); | |
}); | |
} | |
return dest; | |
}; | |
function pipeOnDrain(src) { | |
return function() { | |
var dest = this; | |
var state = src._readableState; | |
state.awaitDrain--; | |
if (state.awaitDrain === 0) | |
flow(src); | |
}; | |
} | |
function flow(src) { | |
var state = src._readableState; | |
var chunk; | |
state.awaitDrain = 0; | |
function write(dest, i, list) { | |
var written = dest.write(chunk); | |
if (false === written) { | |
state.awaitDrain++; | |
} | |
} | |
while (state.pipesCount && null !== (chunk = src.read())) { | |
if (state.pipesCount === 1) | |
write(state.pipes, 0, null); | |
else | |
forEach(state.pipes, write); | |
src.emit('data', chunk); | |
// if anyone needs a drain, then we have to wait for that. | |
if (state.awaitDrain > 0) | |
return; | |
} | |
// if every destination was unpiped, either before entering this | |
// function, or in the while loop, then stop flowing. | |
// | |
// NB: This is a pretty rare edge case. | |
if (state.pipesCount === 0) { | |
state.flowing = false; | |
// if there were data event listeners added, then switch to old mode. | |
if (EE.listenerCount(src, 'data') > 0) | |
emitDataEvents(src); | |
return; | |
} | |
// at this point, no one needed a drain, so we just ran out of data | |
// on the next readable event, start it over again. | |
state.ranOut = true; | |
} | |
function pipeOnReadable() { | |
if (this._readableState.ranOut) { | |
this._readableState.ranOut = false; | |
flow(this); | |
} | |
} | |
Readable.prototype.unpipe = function(dest) { | |
var state = this._readableState; | |
// if we're not piping anywhere, then do nothing. | |
if (state.pipesCount === 0) | |
return this; | |
// just one destination. most common case. | |
if (state.pipesCount === 1) { | |
// passed in one, but it's not the right one. | |
if (dest && dest !== state.pipes) | |
return this; | |
if (!dest) | |
dest = state.pipes; | |
// got a match. | |
state.pipes = null; | |
state.pipesCount = 0; | |
this.removeListener('readable', pipeOnReadable); | |
state.flowing = false; | |
if (dest) | |
dest.emit('unpipe', this); | |
return this; | |
} | |
// slow case. multiple pipe destinations. | |
if (!dest) { | |
// remove all. | |
var dests = state.pipes; | |
var len = state.pipesCount; | |
state.pipes = null; | |
state.pipesCount = 0; | |
this.removeListener('readable', pipeOnReadable); | |
state.flowing = false; | |
for (var i = 0; i < len; i++) | |
dests[i].emit('unpipe', this); | |
return this; | |
} | |
// try to find the right one. | |
var i = indexOf(state.pipes, dest); | |
if (i === -1) | |
return this; | |
state.pipes.splice(i, 1); | |
state.pipesCount -= 1; | |
if (state.pipesCount === 1) | |
state.pipes = state.pipes[0]; | |
dest.emit('unpipe', this); | |
return this; | |
}; | |
// set up data events if they are asked for | |
// Ensure readable listeners eventually get something | |
Readable.prototype.on = function(ev, fn) { | |
var res = Stream.prototype.on.call(this, ev, fn); | |
if (ev === 'data' && !this._readableState.flowing) | |
emitDataEvents(this); | |
if (ev === 'readable' && this.readable) { | |
var state = this._readableState; | |
if (!state.readableListening) { | |
state.readableListening = true; | |
state.emittedReadable = false; | |
state.needReadable = true; | |
if (!state.reading) { | |
this.read(0); | |
} else if (state.length) { | |
emitReadable(this, state); | |
} | |
} | |
} | |
return res; | |
}; | |
Readable.prototype.addListener = Readable.prototype.on; | |
// pause() and resume() are remnants of the legacy readable stream API | |
// If the user uses them, then switch into old mode. | |
Readable.prototype.resume = function() { | |
emitDataEvents(this); | |
this.read(0); | |
this.emit('resume'); | |
}; | |
Readable.prototype.pause = function() { | |
emitDataEvents(this, true); | |
this.emit('pause'); | |
}; | |
function emitDataEvents(stream, startPaused) { | |
var state = stream._readableState; | |
if (state.flowing) { | |
// https://github.com/isaacs/readable-stream/issues/16 | |
throw new Error('Cannot switch to old mode now.'); | |
} | |
var paused = startPaused || false; | |
var readable = false; | |
// convert to an old-style stream. | |
stream.readable = true; | |
stream.pipe = Stream.prototype.pipe; | |
stream.on = stream.addListener = Stream.prototype.on; | |
stream.on('readable', function() { | |
readable = true; | |
var c; | |
while (!paused && (null !== (c = stream.read()))) | |
stream.emit('data', c); | |
if (c === null) { | |
readable = false; | |
stream._readableState.needReadable = true; | |
} | |
}); | |
stream.pause = function() { | |
paused = true; | |
this.emit('pause'); | |
}; | |
stream.resume = function() { | |
paused = false; | |
if (readable) | |
process.nextTick(function() { | |
stream.emit('readable'); | |
}); | |
else | |
this.read(0); | |
this.emit('resume'); | |
}; | |
// now make it start, just in case it hadn't already. | |
stream.emit('readable'); | |
} | |
// wrap an old-style stream as the async data source. | |
// This is *not* part of the readable stream interface. | |
// It is an ugly unfortunate mess of history. | |
Readable.prototype.wrap = function(stream) { | |
var state = this._readableState; | |
var paused = false; | |
var self = this; | |
stream.on('end', function() { | |
if (state.decoder && !state.ended) { | |
var chunk = state.decoder.end(); | |
if (chunk && chunk.length) | |
self.push(chunk); | |
} | |
self.push(null); | |
}); | |
stream.on('data', function(chunk) { | |
if (state.decoder) | |
chunk = state.decoder.write(chunk); | |
// don't skip over falsy values in objectMode | |
//if (state.objectMode && util.isNullOrUndefined(chunk)) | |
if (state.objectMode && (chunk === null || chunk === undefined)) | |
return; | |
else if (!state.objectMode && (!chunk || !chunk.length)) | |
return; | |
var ret = self.push(chunk); | |
if (!ret) { | |
paused = true; | |
stream.pause(); | |
} | |
}); | |
// proxy all the other methods. | |
// important when wrapping filters and duplexes. | |
for (var i in stream) { | |
if (typeof stream[i] === 'function' && | |
typeof this[i] === 'undefined') { | |
this[i] = function(method) { return function() { | |
return stream[method].apply(stream, arguments); | |
}}(i); | |
} | |
} | |
// proxy certain important events. | |
var events = ['error', 'close', 'destroy', 'pause', 'resume']; | |
forEach(events, function(ev) { | |
stream.on(ev, self.emit.bind(self, ev)); | |
}); | |
// when we try to consume some more bytes, simply unpause the | |
// underlying stream. | |
self._read = function(n) { | |
if (paused) { | |
paused = false; | |
stream.resume(); | |
} | |
}; | |
return self; | |
}; | |
// exposed for testing purposes only. | |
Readable._fromList = fromList; | |
// Pluck off n bytes from an array of buffers. | |
// Length is the combined lengths of all the buffers in the list. | |
function fromList(n, state) { | |
var list = state.buffer; | |
var length = state.length; | |
var stringMode = !!state.decoder; | |
var objectMode = !!state.objectMode; | |
var ret; | |
// nothing in the list, definitely empty. | |
if (list.length === 0) | |
return null; | |
if (length === 0) | |
ret = null; | |
else if (objectMode) | |
ret = list.shift(); | |
else if (!n || n >= length) { | |
// read it all, truncate the array. | |
if (stringMode) | |
ret = list.join(''); | |
else | |
ret = Buffer.concat(list, length); | |
list.length = 0; | |
} else { | |
// read just some of it. | |
if (n < list[0].length) { | |
// just take a part of the first list item. | |
// slice is the same for buffers and strings. | |
var buf = list[0]; | |
ret = buf.slice(0, n); | |
list[0] = buf.slice(n); | |
} else if (n === list[0].length) { | |
// first list is a perfect match | |
ret = list.shift(); | |
} else { | |
// complex case. | |
// we have enough to cover it, but it spans past the first buffer. | |
if (stringMode) | |
ret = ''; | |
else | |
ret = new Buffer(n); | |
var c = 0; | |
for (var i = 0, l = list.length; i < l && c < n; i++) { | |
var buf = list[0]; | |
var cpy = Math.min(n - c, buf.length); | |
if (stringMode) | |
ret += buf.slice(0, cpy); | |
else | |
buf.copy(ret, c, 0, cpy); | |
if (cpy < buf.length) | |
list[0] = buf.slice(cpy); | |
else | |
list.shift(); | |
c += cpy; | |
} | |
} | |
} | |
return ret; | |
} | |
function endReadable(stream) { | |
var state = stream._readableState; | |
// If we get here before consuming all the bytes, then that is a | |
// bug in node. Should never happen. | |
if (state.length > 0) | |
throw new Error('endReadable called on non-empty stream'); | |
if (!state.endEmitted && state.calledRead) { | |
state.ended = true; | |
process.nextTick(function() { | |
// Check that we didn't get one last unshift. | |
if (!state.endEmitted && state.length === 0) { | |
state.endEmitted = true; | |
stream.readable = false; | |
stream.emit('end'); | |
} | |
}); | |
} | |
} | |
function forEach (xs, f) { | |
for (var i = 0, l = xs.length; i < l; i++) { | |
f(xs[i], i); | |
} | |
} | |
function indexOf (xs, x) { | |
for (var i = 0, l = xs.length; i < l; i++) { | |
if (xs[i] === x) return i; | |
} | |
return -1; | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"buffer":20,"core-util-is":66,"events":18,"inherits":33,"isarray":67,"stream":24,"string_decoder/":68}],64:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// a transform stream is a readable/writable stream where you do | |
// something with the data. Sometimes it's called a "filter", | |
// but that's not a great name for it, since that implies a thing where | |
// some bits pass through, and others are simply ignored. (That would | |
// be a valid example of a transform, of course.) | |
// | |
// While the output is causally related to the input, it's not a | |
// necessarily symmetric or synchronous transformation. For example, | |
// a zlib stream might take multiple plain-text writes(), and then | |
// emit a single compressed chunk some time in the future. | |
// | |
// Here's how this works: | |
// | |
// The Transform stream has all the aspects of the readable and writable | |
// stream classes. When you write(chunk), that calls _write(chunk,cb) | |
// internally, and returns false if there's a lot of pending writes | |
// buffered up. When you call read(), that calls _read(n) until | |
// there's enough pending readable data buffered up. | |
// | |
// In a transform stream, the written data is placed in a buffer. When | |
// _read(n) is called, it transforms the queued up data, calling the | |
// buffered _write cb's as it consumes chunks. If consuming a single | |
// written chunk would result in multiple output chunks, then the first | |
// outputted bit calls the readcb, and subsequent chunks just go into | |
// the read buffer, and will cause it to emit 'readable' if necessary. | |
// | |
// This way, back-pressure is actually determined by the reading side, | |
// since _read has to be called to start processing a new chunk. However, | |
// a pathological inflate type of transform can cause excessive buffering | |
// here. For example, imagine a stream where every byte of input is | |
// interpreted as an integer from 0-255, and then results in that many | |
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in | |
// 1kb of data being output. In this case, you could write a very small | |
// amount of input, and end up with a very large amount of output. In | |
// such a pathological inflating mechanism, there'd be no way to tell | |
// the system to stop doing the transform. A single 4MB write could | |
// cause the system to run out of memory. | |
// | |
// However, even in such a pathological case, only a single written chunk | |
// would be consumed, and then the rest would wait (un-transformed) until | |
// the results of the previous transformed chunk were consumed. | |
module.exports = Transform; | |
var Duplex = require('./_stream_duplex'); | |
/*<replacement>*/ | |
var util = require('core-util-is'); | |
util.inherits = require('inherits'); | |
/*</replacement>*/ | |
util.inherits(Transform, Duplex); | |
function TransformState(options, stream) { | |
this.afterTransform = function(er, data) { | |
return afterTransform(stream, er, data); | |
}; | |
this.needTransform = false; | |
this.transforming = false; | |
this.writecb = null; | |
this.writechunk = null; | |
} | |
function afterTransform(stream, er, data) { | |
var ts = stream._transformState; | |
ts.transforming = false; | |
var cb = ts.writecb; | |
if (!cb) | |
return stream.emit('error', new Error('no writecb in Transform class')); | |
ts.writechunk = null; | |
ts.writecb = null; | |
if (data !== null && data !== undefined) | |
stream.push(data); | |
if (cb) | |
cb(er); | |
var rs = stream._readableState; | |
rs.reading = false; | |
if (rs.needReadable || rs.length < rs.highWaterMark) { | |
stream._read(rs.highWaterMark); | |
} | |
} | |
function Transform(options) { | |
if (!(this instanceof Transform)) | |
return new Transform(options); | |
Duplex.call(this, options); | |
var ts = this._transformState = new TransformState(options, this); | |
// when the writable side finishes, then flush out anything remaining. | |
var stream = this; | |
// start out asking for a readable event once data is transformed. | |
this._readableState.needReadable = true; | |
// we have implemented the _read method, and done the other things | |
// that Readable wants before the first _read call, so unset the | |
// sync guard flag. | |
this._readableState.sync = false; | |
this.once('finish', function() { | |
if ('function' === typeof this._flush) | |
this._flush(function(er) { | |
done(stream, er); | |
}); | |
else | |
done(stream); | |
}); | |
} | |
Transform.prototype.push = function(chunk, encoding) { | |
this._transformState.needTransform = false; | |
return Duplex.prototype.push.call(this, chunk, encoding); | |
}; | |
// This is the part where you do stuff! | |
// override this function in implementation classes. | |
// 'chunk' is an input chunk. | |
// | |
// Call `push(newChunk)` to pass along transformed output | |
// to the readable side. You may call 'push' zero or more times. | |
// | |
// Call `cb(err)` when you are done with this chunk. If you pass | |
// an error, then that'll put the hurt on the whole operation. If you | |
// never call cb(), then you'll never get another chunk. | |
Transform.prototype._transform = function(chunk, encoding, cb) { | |
throw new Error('not implemented'); | |
}; | |
Transform.prototype._write = function(chunk, encoding, cb) { | |
var ts = this._transformState; | |
ts.writecb = cb; | |
ts.writechunk = chunk; | |
ts.writeencoding = encoding; | |
if (!ts.transforming) { | |
var rs = this._readableState; | |
if (ts.needTransform || | |
rs.needReadable || | |
rs.length < rs.highWaterMark) | |
this._read(rs.highWaterMark); | |
} | |
}; | |
// Doesn't matter what the args are here. | |
// _transform does all the work. | |
// That we got here means that the readable side wants more data. | |
Transform.prototype._read = function(n) { | |
var ts = this._transformState; | |
if (ts.writechunk !== null && ts.writecb && !ts.transforming) { | |
ts.transforming = true; | |
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); | |
} else { | |
// mark that we need a transform, so that any data that comes in | |
// will get processed, now that we've asked for it. | |
ts.needTransform = true; | |
} | |
}; | |
function done(stream, er) { | |
if (er) | |
return stream.emit('error', er); | |
// if there's nothing in the write buffer, then that means | |
// that nothing more will ever be provided | |
var ws = stream._writableState; | |
var rs = stream._readableState; | |
var ts = stream._transformState; | |
if (ws.length) | |
throw new Error('calling transform done when ws.length != 0'); | |
if (ts.transforming) | |
throw new Error('calling transform done when still transforming'); | |
return stream.push(null); | |
} | |
},{"./_stream_duplex":61,"core-util-is":66,"inherits":33}],65:[function(require,module,exports){ | |
(function (process){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// A bit simpler than readable streams. | |
// Implement an async ._write(chunk, cb), and it'll handle all | |
// the drain event emission and buffering. | |
module.exports = Writable; | |
/*<replacement>*/ | |
var Buffer = require('buffer').Buffer; | |
/*</replacement>*/ | |
Writable.WritableState = WritableState; | |
/*<replacement>*/ | |
var util = require('core-util-is'); | |
util.inherits = require('inherits'); | |
/*</replacement>*/ | |
var Stream = require('stream'); | |
util.inherits(Writable, Stream); | |
function WriteReq(chunk, encoding, cb) { | |
this.chunk = chunk; | |
this.encoding = encoding; | |
this.callback = cb; | |
} | |
function WritableState(options, stream) { | |
options = options || {}; | |
// the point at which write() starts returning false | |
// Note: 0 is a valid value, means that we always return false if | |
// the entire buffer is not flushed immediately on write() | |
var hwm = options.highWaterMark; | |
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024; | |
// object stream flag to indicate whether or not this stream | |
// contains buffers or objects. | |
this.objectMode = !!options.objectMode; | |
// cast to ints. | |
this.highWaterMark = ~~this.highWaterMark; | |
this.needDrain = false; | |
// at the start of calling end() | |
this.ending = false; | |
// when end() has been called, and returned | |
this.ended = false; | |
// when 'finish' is emitted | |
this.finished = false; | |
// should we decode strings into buffers before passing to _write? | |
// this is here so that some node-core streams can optimize string | |
// handling at a lower level. | |
var noDecode = options.decodeStrings === false; | |
this.decodeStrings = !noDecode; | |
// Crypto is kind of old and crusty. Historically, its default string | |
// encoding is 'binary' so we have to make this configurable. | |
// Everything else in the universe uses 'utf8', though. | |
this.defaultEncoding = options.defaultEncoding || 'utf8'; | |
// not an actual buffer we keep track of, but a measurement | |
// of how much we're waiting to get pushed to some underlying | |
// socket or file. | |
this.length = 0; | |
// a flag to see when we're in the middle of a write. | |
this.writing = false; | |
// a flag to be able to tell if the onwrite cb is called immediately, | |
// or on a later tick. We set this to true at first, becuase any | |
// actions that shouldn't happen until "later" should generally also | |
// not happen before the first write call. | |
this.sync = true; | |
// a flag to know if we're processing previously buffered items, which | |
// may call the _write() callback in the same tick, so that we don't | |
// end up in an overlapped onwrite situation. | |
this.bufferProcessing = false; | |
// the callback that's passed to _write(chunk,cb) | |
this.onwrite = function(er) { | |
onwrite(stream, er); | |
}; | |
// the callback that the user supplies to write(chunk,encoding,cb) | |
this.writecb = null; | |
// the amount that is being written when _write is called. | |
this.writelen = 0; | |
this.buffer = []; | |
// True if the error was already emitted and should not be thrown again | |
this.errorEmitted = false; | |
} | |
function Writable(options) { | |
var Duplex = require('./_stream_duplex'); | |
// Writable ctor is applied to Duplexes, though they're not | |
// instanceof Writable, they're instanceof Readable. | |
if (!(this instanceof Writable) && !(this instanceof Duplex)) | |
return new Writable(options); | |
this._writableState = new WritableState(options, this); | |
// legacy. | |
this.writable = true; | |
Stream.call(this); | |
} | |
// Otherwise people can pipe Writable streams, which is just wrong. | |
Writable.prototype.pipe = function() { | |
this.emit('error', new Error('Cannot pipe. Not readable.')); | |
}; | |
function writeAfterEnd(stream, state, cb) { | |
var er = new Error('write after end'); | |
// TODO: defer error events consistently everywhere, not just the cb | |
stream.emit('error', er); | |
process.nextTick(function() { | |
cb(er); | |
}); | |
} | |
// If we get something that is not a buffer, string, null, or undefined, | |
// and we're not in objectMode, then that's an error. | |
// Otherwise stream chunks are all considered to be of length=1, and the | |
// watermarks determine how many objects to keep in the buffer, rather than | |
// how many bytes or characters. | |
function validChunk(stream, state, chunk, cb) { | |
var valid = true; | |
if (!Buffer.isBuffer(chunk) && | |
'string' !== typeof chunk && | |
chunk !== null && | |
chunk !== undefined && | |
!state.objectMode) { | |
var er = new TypeError('Invalid non-string/buffer chunk'); | |
stream.emit('error', er); | |
process.nextTick(function() { | |
cb(er); | |
}); | |
valid = false; | |
} | |
return valid; | |
} | |
Writable.prototype.write = function(chunk, encoding, cb) { | |
var state = this._writableState; | |
var ret = false; | |
if (typeof encoding === 'function') { | |
cb = encoding; | |
encoding = null; | |
} | |
if (Buffer.isBuffer(chunk)) | |
encoding = 'buffer'; | |
else if (!encoding) | |
encoding = state.defaultEncoding; | |
if (typeof cb !== 'function') | |
cb = function() {}; | |
if (state.ended) | |
writeAfterEnd(this, state, cb); | |
else if (validChunk(this, state, chunk, cb)) | |
ret = writeOrBuffer(this, state, chunk, encoding, cb); | |
return ret; | |
}; | |
function decodeChunk(state, chunk, encoding) { | |
if (!state.objectMode && | |
state.decodeStrings !== false && | |
typeof chunk === 'string') { | |
chunk = new Buffer(chunk, encoding); | |
} | |
return chunk; | |
} | |
// if we're already writing something, then just put this | |
// in the queue, and wait our turn. Otherwise, call _write | |
// If we return false, then we need a drain event, so set that flag. | |
function writeOrBuffer(stream, state, chunk, encoding, cb) { | |
chunk = decodeChunk(state, chunk, encoding); | |
if (Buffer.isBuffer(chunk)) | |
encoding = 'buffer'; | |
var len = state.objectMode ? 1 : chunk.length; | |
state.length += len; | |
var ret = state.length < state.highWaterMark; | |
// we must ensure that previous needDrain will not be reset to false. | |
if (!ret) | |
state.needDrain = true; | |
if (state.writing) | |
state.buffer.push(new WriteReq(chunk, encoding, cb)); | |
else | |
doWrite(stream, state, len, chunk, encoding, cb); | |
return ret; | |
} | |
function doWrite(stream, state, len, chunk, encoding, cb) { | |
state.writelen = len; | |
state.writecb = cb; | |
state.writing = true; | |
state.sync = true; | |
stream._write(chunk, encoding, state.onwrite); | |
state.sync = false; | |
} | |
function onwriteError(stream, state, sync, er, cb) { | |
if (sync) | |
process.nextTick(function() { | |
cb(er); | |
}); | |
else | |
cb(er); | |
stream._writableState.errorEmitted = true; | |
stream.emit('error', er); | |
} | |
function onwriteStateUpdate(state) { | |
state.writing = false; | |
state.writecb = null; | |
state.length -= state.writelen; | |
state.writelen = 0; | |
} | |
function onwrite(stream, er) { | |
var state = stream._writableState; | |
var sync = state.sync; | |
var cb = state.writecb; | |
onwriteStateUpdate(state); | |
if (er) | |
onwriteError(stream, state, sync, er, cb); | |
else { | |
// Check if we're actually ready to finish, but don't emit yet | |
var finished = needFinish(stream, state); | |
if (!finished && !state.bufferProcessing && state.buffer.length) | |
clearBuffer(stream, state); | |
if (sync) { | |
process.nextTick(function() { | |
afterWrite(stream, state, finished, cb); | |
}); | |
} else { | |
afterWrite(stream, state, finished, cb); | |
} | |
} | |
} | |
function afterWrite(stream, state, finished, cb) { | |
if (!finished) | |
onwriteDrain(stream, state); | |
cb(); | |
if (finished) | |
finishMaybe(stream, state); | |
} | |
// Must force callback to be called on nextTick, so that we don't | |
// emit 'drain' before the write() consumer gets the 'false' return | |
// value, and has a chance to attach a 'drain' listener. | |
function onwriteDrain(stream, state) { | |
if (state.length === 0 && state.needDrain) { | |
state.needDrain = false; | |
stream.emit('drain'); | |
} | |
} | |
// if there's something in the buffer waiting, then process it | |
function clearBuffer(stream, state) { | |
state.bufferProcessing = true; | |
for (var c = 0; c < state.buffer.length; c++) { | |
var entry = state.buffer[c]; | |
var chunk = entry.chunk; | |
var encoding = entry.encoding; | |
var cb = entry.callback; | |
var len = state.objectMode ? 1 : chunk.length; | |
doWrite(stream, state, len, chunk, encoding, cb); | |
// if we didn't call the onwrite immediately, then | |
// it means that we need to wait until it does. | |
// also, that means that the chunk and cb are currently | |
// being processed, so move the buffer counter past them. | |
if (state.writing) { | |
c++; | |
break; | |
} | |
} | |
state.bufferProcessing = false; | |
if (c < state.buffer.length) | |
state.buffer = state.buffer.slice(c); | |
else | |
state.buffer.length = 0; | |
} | |
Writable.prototype._write = function(chunk, encoding, cb) { | |
cb(new Error('not implemented')); | |
}; | |
Writable.prototype.end = function(chunk, encoding, cb) { | |
var state = this._writableState; | |
if (typeof chunk === 'function') { | |
cb = chunk; | |
chunk = null; | |
encoding = null; | |
} else if (typeof encoding === 'function') { | |
cb = encoding; | |
encoding = null; | |
} | |
if (typeof chunk !== 'undefined' && chunk !== null) | |
this.write(chunk, encoding); | |
// ignore unnecessary end() calls. | |
if (!state.ending && !state.finished) | |
endWritable(this, state, cb); | |
}; | |
function needFinish(stream, state) { | |
return (state.ending && | |
state.length === 0 && | |
!state.finished && | |
!state.writing); | |
} | |
function finishMaybe(stream, state) { | |
var need = needFinish(stream, state); | |
if (need) { | |
state.finished = true; | |
stream.emit('finish'); | |
} | |
return need; | |
} | |
function endWritable(stream, state, cb) { | |
state.ending = true; | |
finishMaybe(stream, state); | |
if (cb) { | |
if (state.finished) | |
process.nextTick(cb); | |
else | |
stream.once('finish', cb); | |
} | |
state.ended = true; | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"./_stream_duplex":61,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"buffer":20,"core-util-is":66,"inherits":33,"stream":24}],66:[function(require,module,exports){ | |
(function (Buffer){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// NOTE: These type checking functions intentionally don't use `instanceof` | |
// because it is fragile and can be easily faked with `Object.create()`. | |
function isArray(ar) { | |
return Array.isArray(ar); | |
} | |
exports.isArray = isArray; | |
function isBoolean(arg) { | |
return typeof arg === 'boolean'; | |
} | |
exports.isBoolean = isBoolean; | |
function isNull(arg) { | |
return arg === null; | |
} | |
exports.isNull = isNull; | |
function isNullOrUndefined(arg) { | |
return arg == null; | |
} | |
exports.isNullOrUndefined = isNullOrUndefined; | |
function isNumber(arg) { | |
return typeof arg === 'number'; | |
} | |
exports.isNumber = isNumber; | |
function isString(arg) { | |
return typeof arg === 'string'; | |
} | |
exports.isString = isString; | |
function isSymbol(arg) { | |
return typeof arg === 'symbol'; | |
} | |
exports.isSymbol = isSymbol; | |
function isUndefined(arg) { | |
return arg === void 0; | |
} | |
exports.isUndefined = isUndefined; | |
function isRegExp(re) { | |
return isObject(re) && objectToString(re) === '[object RegExp]'; | |
} | |
exports.isRegExp = isRegExp; | |
function isObject(arg) { | |
return typeof arg === 'object' && arg !== null; | |
} | |
exports.isObject = isObject; | |
function isDate(d) { | |
return isObject(d) && objectToString(d) === '[object Date]'; | |
} | |
exports.isDate = isDate; | |
function isError(e) { | |
return isObject(e) && | |
(objectToString(e) === '[object Error]' || e instanceof Error); | |
} | |
exports.isError = isError; | |
function isFunction(arg) { | |
return typeof arg === 'function'; | |
} | |
exports.isFunction = isFunction; | |
function isPrimitive(arg) { | |
return arg === null || | |
typeof arg === 'boolean' || | |
typeof arg === 'number' || | |
typeof arg === 'string' || | |
typeof arg === 'symbol' || // ES6 symbol | |
typeof arg === 'undefined'; | |
} | |
exports.isPrimitive = isPrimitive; | |
function isBuffer(arg) { | |
return Buffer.isBuffer(arg); | |
} | |
exports.isBuffer = isBuffer; | |
function objectToString(o) { | |
return Object.prototype.toString.call(o); | |
} | |
}).call(this,require("buffer").Buffer) | |
},{"buffer":20}],67:[function(require,module,exports){ | |
module.exports = Array.isArray || function (arr) { | |
return Object.prototype.toString.call(arr) == '[object Array]'; | |
}; | |
},{}],68:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
var Buffer = require('buffer').Buffer; | |
var isBufferEncoding = Buffer.isEncoding | |
|| function(encoding) { | |
switch (encoding && encoding.toLowerCase()) { | |
case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true; | |
default: return false; | |
} | |
} | |
function assertEncoding(encoding) { | |
if (encoding && !isBufferEncoding(encoding)) { | |
throw new Error('Unknown encoding: ' + encoding); | |
} | |
} | |
// StringDecoder provides an interface for efficiently splitting a series of | |
// buffers into a series of JS strings without breaking apart multi-byte | |
// characters. CESU-8 is handled as part of the UTF-8 encoding. | |
// | |
// @TODO Handling all encodings inside a single object makes it very difficult | |
// to reason about this code, so it should be split up in the future. | |
// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code | |
// points as used by CESU-8. | |
var StringDecoder = exports.StringDecoder = function(encoding) { | |
this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); | |
assertEncoding(encoding); | |
switch (this.encoding) { | |
case 'utf8': | |
// CESU-8 represents each of Surrogate Pair by 3-bytes | |
this.surrogateSize = 3; | |
break; | |
case 'ucs2': | |
case 'utf16le': | |
// UTF-16 represents each of Surrogate Pair by 2-bytes | |
this.surrogateSize = 2; | |
this.detectIncompleteChar = utf16DetectIncompleteChar; | |
break; | |
case 'base64': | |
// Base-64 stores 3 bytes in 4 chars, and pads the remainder. | |
this.surrogateSize = 3; | |
this.detectIncompleteChar = base64DetectIncompleteChar; | |
break; | |
default: | |
this.write = passThroughWrite; | |
return; | |
} | |
// Enough space to store all bytes of a single character. UTF-8 needs 4 | |
// bytes, but CESU-8 may require up to 6 (3 bytes per surrogate). | |
this.charBuffer = new Buffer(6); | |
// Number of bytes received for the current incomplete multi-byte character. | |
this.charReceived = 0; | |
// Number of bytes expected for the current incomplete multi-byte character. | |
this.charLength = 0; | |
}; | |
// write decodes the given buffer and returns it as JS string that is | |
// guaranteed to not contain any partial multi-byte characters. Any partial | |
// character found at the end of the buffer is buffered up, and will be | |
// returned when calling write again with the remaining bytes. | |
// | |
// Note: Converting a Buffer containing an orphan surrogate to a String | |
// currently works, but converting a String to a Buffer (via `new Buffer`, or | |
// Buffer#write) will replace incomplete surrogates with the unicode | |
// replacement character. See https://codereview.chromium.org/121173009/ . | |
StringDecoder.prototype.write = function(buffer) { | |
var charStr = ''; | |
// if our last write ended with an incomplete multibyte character | |
while (this.charLength) { | |
// determine how many remaining bytes this buffer has to offer for this char | |
var available = (buffer.length >= this.charLength - this.charReceived) ? | |
this.charLength - this.charReceived : | |
buffer.length; | |
// add the new bytes to the char buffer | |
buffer.copy(this.charBuffer, this.charReceived, 0, available); | |
this.charReceived += available; | |
if (this.charReceived < this.charLength) { | |
// still not enough chars in this buffer? wait for more ... | |
return ''; | |
} | |
// remove bytes belonging to the current character from the buffer | |
buffer = buffer.slice(available, buffer.length); | |
// get the character that was split | |
charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); | |
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character | |
var charCode = charStr.charCodeAt(charStr.length - 1); | |
if (charCode >= 0xD800 && charCode <= 0xDBFF) { | |
this.charLength += this.surrogateSize; | |
charStr = ''; | |
continue; | |
} | |
this.charReceived = this.charLength = 0; | |
// if there are no more bytes in this buffer, just emit our char | |
if (buffer.length === 0) { | |
return charStr; | |
} | |
break; | |
} | |
// determine and set charLength / charReceived | |
this.detectIncompleteChar(buffer); | |
var end = buffer.length; | |
if (this.charLength) { | |
// buffer the incomplete character bytes we got | |
buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end); | |
end -= this.charReceived; | |
} | |
charStr += buffer.toString(this.encoding, 0, end); | |
var end = charStr.length - 1; | |
var charCode = charStr.charCodeAt(end); | |
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character | |
if (charCode >= 0xD800 && charCode <= 0xDBFF) { | |
var size = this.surrogateSize; | |
this.charLength += size; | |
this.charReceived += size; | |
this.charBuffer.copy(this.charBuffer, size, 0, size); | |
buffer.copy(this.charBuffer, 0, 0, size); | |
return charStr.substring(0, end); | |
} | |
// or just emit the charStr | |
return charStr; | |
}; | |
// detectIncompleteChar determines if there is an incomplete UTF-8 character at | |
// the end of the given buffer. If so, it sets this.charLength to the byte | |
// length that character, and sets this.charReceived to the number of bytes | |
// that are available for this character. | |
StringDecoder.prototype.detectIncompleteChar = function(buffer) { | |
// determine how many bytes we have to check at the end of this buffer | |
var i = (buffer.length >= 3) ? 3 : buffer.length; | |
// Figure out if one of the last i bytes of our buffer announces an | |
// incomplete char. | |
for (; i > 0; i--) { | |
var c = buffer[buffer.length - i]; | |
// See http://en.wikipedia.org/wiki/UTF-8#Description | |
// 110XXXXX | |
if (i == 1 && c >> 5 == 0x06) { | |
this.charLength = 2; | |
break; | |
} | |
// 1110XXXX | |
if (i <= 2 && c >> 4 == 0x0E) { | |
this.charLength = 3; | |
break; | |
} | |
// 11110XXX | |
if (i <= 3 && c >> 3 == 0x1E) { | |
this.charLength = 4; | |
break; | |
} | |
} | |
this.charReceived = i; | |
}; | |
StringDecoder.prototype.end = function(buffer) { | |
var res = ''; | |
if (buffer && buffer.length) | |
res = this.write(buffer); | |
if (this.charReceived) { | |
var cr = this.charReceived; | |
var buf = this.charBuffer; | |
var enc = this.encoding; | |
res += buf.slice(0, cr).toString(enc); | |
} | |
return res; | |
}; | |
function passThroughWrite(buffer) { | |
return buffer.toString(this.encoding); | |
} | |
function utf16DetectIncompleteChar(buffer) { | |
this.charReceived = buffer.length % 2; | |
this.charLength = this.charReceived ? 2 : 0; | |
} | |
function base64DetectIncompleteChar(buffer) { | |
this.charReceived = buffer.length % 3; | |
this.charLength = this.charReceived ? 3 : 0; | |
} | |
},{"buffer":20}],69:[function(require,module,exports){ | |
exports = module.exports = require('./lib/_stream_readable.js'); | |
exports.Readable = exports; | |
exports.Writable = require('./lib/_stream_writable.js'); | |
exports.Duplex = require('./lib/_stream_duplex.js'); | |
exports.Transform = require('./lib/_stream_transform.js'); | |
exports.PassThrough = require('./lib/_stream_passthrough.js'); | |
},{"./lib/_stream_duplex.js":61,"./lib/_stream_passthrough.js":62,"./lib/_stream_readable.js":63,"./lib/_stream_transform.js":64,"./lib/_stream_writable.js":65}],70:[function(require,module,exports){ | |
module.exports = extend | |
function extend() { | |
var target = {} | |
for (var i = 0; i < arguments.length; i++) { | |
var source = arguments[i] | |
for (var key in source) { | |
if (source.hasOwnProperty(key)) { | |
target[key] = source[key] | |
} | |
} | |
} | |
return target | |
} | |
},{}],71:[function(require,module,exports){ | |
module.exports={ | |
"name": "levelup", | |
"description": "Fast & simple storage - a Node.js-style LevelDB wrapper", | |
"version": "0.18.6", | |
"contributors": [ | |
{ | |
"name": "Rod Vagg", | |
"email": "[email protected]", | |
"url": "https://github.com/rvagg" | |
}, | |
{ | |
"name": "John Chesley", | |
"email": "[email protected]", | |
"url": "https://github.com/chesles/" | |
}, | |
{ | |
"name": "Jake Verbaten", | |
"email": "[email protected]", | |
"url": "https://github.com/raynos" | |
}, | |
{ | |
"name": "Dominic Tarr", | |
"email": "[email protected]", | |
"url": "https://github.com/dominictarr" | |
}, | |
{ | |
"name": "Max Ogden", | |
"email": "[email protected]", | |
"url": "https://github.com/maxogden" | |
}, | |
{ | |
"name": "Lars-Magnus Skog", | |
"email": "[email protected]", | |
"url": "https://github.com/ralphtheninja" | |
}, | |
{ | |
"name": "David Björklund", | |
"email": "[email protected]", | |
"url": "https://github.com/kesla" | |
}, | |
{ | |
"name": "Julian Gruber", | |
"email": "[email protected]", | |
"url": "https://github.com/juliangruber" | |
}, | |
{ | |
"name": "Paolo Fragomeni", | |
"email": "[email protected]", | |
"url": "https://github.com/hij1nx" | |
}, | |
{ | |
"name": "Anton Whalley", | |
"email": "[email protected]", | |
"url": "https://github.com/No9" | |
}, | |
{ | |
"name": "Matteo Collina", | |
"email": "[email protected]", | |
"url": "https://github.com/mcollina" | |
}, | |
{ | |
"name": "Pedro Teixeira", | |
"email": "[email protected]", | |
"url": "https://github.com/pgte" | |
}, | |
{ | |
"name": "James Halliday", | |
"email": "[email protected]", | |
"url": "https://github.com/substack" | |
} | |
], | |
"repository": { | |
"type": "git", | |
"url": "https://github.com/rvagg/node-levelup.git" | |
}, | |
"homepage": "https://github.com/rvagg/node-levelup", | |
"keywords": [ | |
"leveldb", | |
"stream", | |
"database", | |
"db", | |
"store", | |
"storage", | |
"json" | |
], | |
"main": "lib/levelup.js", | |
"dependencies": { | |
"bl": "~0.8.1", | |
"deferred-leveldown": "~0.2.0", | |
"errno": "~0.1.1", | |
"prr": "~0.0.0", | |
"readable-stream": "~1.0.26", | |
"semver": "~2.3.1", | |
"xtend": "~3.0.0" | |
}, | |
"devDependencies": { | |
"leveldown": "~0.10.0", | |
"bustermove": "*", | |
"tap": "*", | |
"referee": "*", | |
"rimraf": "*", | |
"async": "*", | |
"fstream": "*", | |
"tar": "*", | |
"mkfiletree": "*", | |
"readfiletree": "*", | |
"slow-stream": ">=0.0.4", | |
"delayed": "*", | |
"boganipsum": "*", | |
"du": "*", | |
"memdown": "*", | |
"msgpack-js": "*" | |
}, | |
"browser": { | |
"leveldown": false, | |
"leveldown/package": false, | |
"semver": false | |
}, | |
"scripts": { | |
"test": "tap test/*-test.js --stderr", | |
"functionaltests": "node ./test/functional/fstream-test.js && node ./test/functional/binary-data-test.js && node ./test/functional/compat-test.js", | |
"alltests": "npm test && npm run-script functionaltests" | |
}, | |
"license": "MIT", | |
"readme": "LevelUP\n=======\n\n\n\n**Fast & simple storage - a Node.js-style LevelDB wrapper**\n\n[](http://travis-ci.org/rvagg/node-levelup)\n\n[](https://nodei.co/npm/levelup/) [](https://nodei.co/npm/levelup/)\n\n\n * <a href=\"#intro\">Introduction</a>\n * <a href=\"#leveldown\">Relationship to LevelDOWN</a>\n * <a href=\"#platforms\">Tested & supported platforms</a>\n * <a href=\"#basic\">Basic usage</a>\n * <a href=\"#api\">API</a>\n * <a href=\"#events\">Events</a>\n * <a href=\"#json\">JSON data</a>\n * <a href=\"#custom_encodings\">Custom encodings</a>\n * <a href=\"#extending\">Extending LevelUP</a>\n * <a href=\"#multiproc\">Multi-process access</a>\n * <a href=\"#support\">Getting support</a>\n * <a href=\"#contributing\">Contributing</a>\n * <a href=\"#licence\">Licence & copyright</a>\n\n<a name=\"intro\"></a>\nIntroduction\n------------\n\n**[LevelDB](http://code.google.com/p/leveldb/)** is a simple key/value data store built by Google, inspired by BigTable. It's used in Google Chrome and many other products. LevelDB supports arbitrary byte arrays as both keys and values, singular *get*, *put* and *delete* operations, *batched put and delete*, bi-directional iterators and simple compression using the very fast [Snappy](http://code.google.com/p/snappy/) algorithm.\n\n**LevelUP** aims to expose the features of LevelDB in a **Node.js-friendly way**. All standard `Buffer` encoding types are supported, as is a special JSON encoding. LevelDB's iterators are exposed as a Node.js-style **readable stream** a matching **writeable stream** converts writes to *batch* operations.\n\nLevelDB stores entries **sorted lexicographically by keys**. This makes LevelUP's <a href=\"#createReadStream\"><code>ReadStream</code></a> interface a very powerful query mechanism.\n\n**LevelUP** is an **OPEN Open Source Project**, see the <a href=\"#contributing\">Contributing</a> section to find out what this means.\n\n<a name=\"leveldown\"></a>\nRelationship to LevelDOWN\n-------------------------\n\nLevelUP is designed to be backed by **[LevelDOWN](https://github.com/rvagg/node-leveldown/)** which provides a pure C++ binding to LevelDB and can be used as a stand-alone package if required.\n\n**As of version 0.9, LevelUP no longer requires LevelDOWN as a dependency so you must `npm install leveldown` when you install LevelUP.**\n\nLevelDOWN is now optional because LevelUP can be used with alternative backends, such as **[level.js](https://github.com/maxogden/level.js)** in the browser or [MemDOWN](https://github.com/rvagg/node-memdown) for a pure in-memory store.\n\nLevelUP will look for LevelDOWN and throw an error if it can't find it in its Node `require()` path. It will also tell you if the installed version of LevelDOWN is incompatible.\n\n**The [level](https://github.com/level/level) package is available as an alternative installation mechanism.** Install it instead to automatically get both LevelUP & LevelDOWN. It exposes LevelUP on its export (i.e. you can `var leveldb = require('level')`).\n\n\n<a name=\"platforms\"></a>\nTested & supported platforms\n----------------------------\n\n * **Linux**: including ARM platforms such as Raspberry Pi *and Kindle!*\n * **Mac OS**\n * **Solaris**: including Joyent's SmartOS & Nodejitsu\n * **Windows**: Node 0.10 and above only. See installation instructions for *node-gyp's* dependencies [here](https://github.com/TooTallNate/node-gyp#installation), you'll need these (free) components from Microsoft to compile and run any native Node add-on in Windows.\n\n<a name=\"basic\"></a>\nBasic usage\n-----------\n\nFirst you need to install LevelUP!\n\n```sh\n$ npm install levelup leveldown\n```\n\nOr\n\n```sh\n$ npm install level\n```\n\n*(this second option requires you to use LevelUP by calling `var levelup = require('level')`)*\n\n\nAll operations are asynchronous although they don't necessarily require a callback if you don't need to know when the operation was performed.\n\n```js\nvar levelup = require('levelup')\n\n// 1) Create our database, supply location and options.\n// This will create or open the underlying LevelDB store.\nvar db = levelup('./mydb')\n\n// 2) put a key & value\ndb.put('name', 'LevelUP', function (err) {\n if (err) return console.log('Ooops!', err) // some kind of I/O error\n\n // 3) fetch by key\n db.get('name', function (err, value) {\n if (err) return console.log('Ooops!', err) // likely the key was not found\n\n // ta da!\n console.log('name=' + value)\n })\n})\n```\n\n<a name=\"api\"></a>\n## API\n\n * <a href=\"#ctor\"><code><b>levelup()</b></code></a>\n * <a href=\"#open\"><code>db.<b>open()</b></code></a>\n * <a href=\"#close\"><code>db.<b>close()</b></code></a>\n * <a href=\"#put\"><code>db.<b>put()</b></code></a>\n * <a href=\"#get\"><code>db.<b>get()</b></code></a>\n * <a href=\"#del\"><code>db.<b>del()</b></code></a>\n * <a href=\"#batch\"><code>db.<b>batch()</b></code> *(array form)*</a>\n * <a href=\"#batch_chained\"><code>db.<b>batch()</b></code> *(chained form)*</a>\n * <a href=\"#isOpen\"><code>db.<b>isOpen()</b></code></a>\n * <a href=\"#isClosed\"><code>db.<b>isClosed()</b></code></a>\n * <a href=\"#createReadStream\"><code>db.<b>createReadStream()</b></code></a>\n * <a href=\"#createKeyStream\"><code>db.<b>createKeyStream()</b></code></a>\n * <a href=\"#createValueStream\"><code>db.<b>createValueStream()</b></code></a>\n * <a href=\"#createWriteStream\"><code>db.<b>createWriteStream()</b></code></a>\n\n### Special operations exposed by LevelDOWN\n\n * <a href=\"#approximateSize\"><code>db.db.<b>approximateSize()</b></code></a>\n * <a href=\"#getProperty\"><code>db.db.<b>getProperty()</b></code></a>\n * <a href=\"#destroy\"><code><b>leveldown.destroy()</b></code></a>\n * <a href=\"#repair\"><code><b>leveldown.repair()</b></code></a>\n\n\n--------------------------------------------------------\n<a name=\"ctor\"></a>\n### levelup(location[, options[, callback]])\n### levelup(options[, callback ])\n### levelup(db[, callback ])\n<code>levelup()</code> is the main entry point for creating a new LevelUP instance and opening the underlying store with LevelDB.\n\nThis function returns a new instance of LevelUP and will also initiate an <a href=\"#open\"><code>open()</code></a> operation. Opening the database is an asynchronous operation which will trigger your callback if you provide one. The callback should take the form: `function (err, db) {}` where the `db` is the LevelUP instance. If you don't provide a callback, any read & write operations are simply queued internally until the database is fully opened.\n\nThis leads to two alternative ways of managing a new LevelUP instance:\n\n```js\nlevelup(location, options, function (err, db) {\n if (err) throw err\n db.get('foo', function (err, value) {\n if (err) return console.log('foo does not exist')\n console.log('got foo =', value)\n })\n})\n\n// vs the equivalent:\n\nvar db = levelup(location, options) // will throw if an error occurs\ndb.get('foo', function (err, value) {\n if (err) return console.log('foo does not exist')\n console.log('got foo =', value)\n})\n```\n\nThe `location` argument is available as a read-only property on the returned LevelUP instance.\n\nThe `levelup(options, callback)` form (with optional `callback`) is only available where you provide a valid `'db'` property on the options object (see below). Only for back-ends that don't require a `location` argument, such as [MemDOWN](https://github.com/rvagg/memdown).\n\nFor example:\n\n```js\nvar levelup = require('levelup')\nvar memdown = require('memdown')\nvar db = levelup({ db: memdown })\n```\n\nThe `levelup(db, callback)` form (with optional `callback`) is only available where `db` is a factory function, as would be provided as a `'db'` property on an `options` object (see below). Only for back-ends that don't require a `location` argument, such as [MemDOWN](https://github.com/rvagg/memdown).\n\nFor example:\n\n```js\nvar levelup = require('levelup')\nvar memdown = require('memdown')\nvar db = levelup(memdown)\n```\n\n#### `options`\n\n`levelup()` takes an optional options object as its second argument; the following properties are accepted:\n\n* `'createIfMissing'` *(boolean, default: `true`)*: If `true`, will initialise an empty database at the specified location if one doesn't already exist. If `false` and a database doesn't exist you will receive an error in your `open()` callback and your database won't open.\n\n* `'errorIfExists'` *(boolean, default: `false`)*: If `true`, you will receive an error in your `open()` callback if the database exists at the specified location.\n\n* `'compression'` *(boolean, default: `true`)*: If `true`, all *compressible* data will be run through the Snappy compression algorithm before being stored. Snappy is very fast and shouldn't gain much speed by disabling so leave this on unless you have good reason to turn it off.\n\n* `'cacheSize'` *(number, default: `8 * 1024 * 1024`)*: The size (in bytes) of the in-memory [LRU](http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used) cache with frequently used uncompressed block contents. \n\n* `'keyEncoding'` and `'valueEncoding'` *(string, default: `'utf8'`)*: The encoding of the keys and values passed through Node.js' `Buffer` implementation (see [Buffer#toString()](http://nodejs.org/docs/latest/api/buffer.html#buffer_buf_tostring_encoding_start_end)).\n <p><code>'utf8'</code> is the default encoding for both keys and values so you can simply pass in strings and expect strings from your <code>get()</code> operations. You can also pass <code>Buffer</code> objects as keys and/or values and conversion will be performed.</p>\n <p>Supported encodings are: hex, utf8, ascii, binary, base64, ucs2, utf16le.</p>\n <p><code>'json'</code> encoding is also supported, see below.</p>\n\n* `'db'` *(object, default: LevelDOWN)*: LevelUP is backed by [LevelDOWN](https://github.com/rvagg/node-leveldown/) to provide an interface to LevelDB. You can completely replace the use of LevelDOWN by providing a \"factory\" function that will return a LevelDOWN API compatible object given a `location` argument. For further information, see [MemDOWN](https://github.com/rvagg/node-memdown/), a fully LevelDOWN API compatible replacement that uses a memory store rather than LevelDB. Also see [Abstract LevelDOWN](http://github.com/rvagg/node-abstract-leveldown), a partial implementation of the LevelDOWN API that can be used as a base prototype for a LevelDOWN substitute.\n\nAdditionally, each of the main interface methods accept an optional options object that can be used to override `'keyEncoding'` and `'valueEncoding'`.\n\n--------------------------------------------------------\n<a name=\"open\"></a>\n### db.open([callback])\n<code>open()</code> opens the underlying LevelDB store. In general **you should never need to call this method directly** as it's automatically called by <a href=\"#ctor\"><code>levelup()</code></a>.\n\nHowever, it is possible to *reopen* a database after it has been closed with <a href=\"#close\"><code>close()</code></a>, although this is not generally advised.\n\n--------------------------------------------------------\n<a name=\"close\"></a>\n### db.close([callback])\n<code>close()</code> closes the underlying LevelDB store. The callback will receive any error encountered during closing as the first argument.\n\nYou should always clean up your LevelUP instance by calling `close()` when you no longer need it to free up resources. A LevelDB store cannot be opened by multiple instances of LevelDB/LevelUP simultaneously.\n\n--------------------------------------------------------\n<a name=\"put\"></a>\n### db.put(key, value[, options][, callback])\n<code>put()</code> is the primary method for inserting data into the store. Both the `key` and `value` can be arbitrary data objects.\n\nThe callback argument is optional but if you don't provide one and an error occurs then expect the error to be thrown.\n\n#### `options`\n\nEncoding of the `key` and `value` objects will adhere to `'keyEncoding'` and `'valueEncoding'` options provided to <a href=\"#ctor\"><code>levelup()</code></a>, although you can provide alternative encoding settings in the options for `put()` (it's recommended that you stay consistent in your encoding of keys and values in a single store).\n\nIf you provide a `'sync'` value of `true` in your `options` object, LevelDB will perform a synchronous write of the data; although the operation will be asynchronous as far as Node is concerned. Normally, LevelDB passes the data to the operating system for writing and returns immediately, however a synchronous write will use `fsync()` or equivalent so your callback won't be triggered until the data is actually on disk. Synchronous filesystem writes are **significantly** slower than asynchronous writes but if you want to be absolutely sure that the data is flushed then you can use `'sync': true`.\n\n--------------------------------------------------------\n<a name=\"get\"></a>\n### db.get(key[, options][, callback])\n<code>get()</code> is the primary method for fetching data from the store. The `key` can be an arbitrary data object. If it doesn't exist in the store then the callback will receive an error as its first argument. A not-found err object will be of type `'NotFoundError'` so you can `err.type == 'NotFoundError'` or you can perform a truthy test on the property `err.notFound`.\n\n```js\ndb.get('foo', function (err, value) {\n if (err) {\n if (err.notFound) {\n // handle a 'NotFoundError' here\n return\n }\n // I/O or other error, pass it up the callback chain\n return callback(err)\n }\n\n // .. handle `value` here\n})\n```\n\n#### `options`\n\nEncoding of the `key` object will adhere to the `'keyEncoding'` option provided to <a href=\"#ctor\"><code>levelup()</code></a>, although you can provide alternative encoding settings in the options for `get()` (it's recommended that you stay consistent in your encoding of keys and values in a single store).\n\nLevelDB will by default fill the in-memory LRU Cache with data from a call to get. Disabling this is done by setting `fillCache` to `false`. \n\n--------------------------------------------------------\n<a name=\"del\"></a>\n### db.del(key[, options][, callback])\n<code>del()</code> is the primary method for removing data from the store.\n\n#### `options`\n\nEncoding of the `key` object will adhere to the `'keyEncoding'` option provided to <a href=\"#ctor\"><code>levelup()</code></a>, although you can provide alternative encoding settings in the options for `del()` (it's recommended that you stay consistent in your encoding of keys and values in a single store).\n\nA `'sync'` option can also be passed, see <a href=\"#put\"><code>put()</code></a> for details on how this works.\n\n--------------------------------------------------------\n<a name=\"batch\"></a>\n### db.batch(array[, options][, callback]) *(array form)*\n<code>batch()</code> can be used for very fast bulk-write operations (both *put* and *delete*). The `array` argument should contain a list of operations to be executed sequentially, although as a whole they are performed as an atomic operation inside LevelDB. Each operation is contained in an object having the following properties: `type`, `key`, `value`, where the *type* is either `'put'` or `'del'`. In the case of `'del'` the `'value'` property is ignored. Any entries with a `'key'` of `null` or `undefined` will cause an error to be returned on the `callback` and any `'type': 'put'` entry with a `'value'` of `null` or `undefined` will return an error.\n\n```js\nvar ops = [\n { type: 'del', key: 'father' }\n , { type: 'put', key: 'name', value: 'Yuri Irsenovich Kim' }\n , { type: 'put', key: 'dob', value: '16 February 1941' }\n , { type: 'put', key: 'spouse', value: 'Kim Young-sook' }\n , { type: 'put', key: 'occupation', value: 'Clown' }\n]\n\ndb.batch(ops, function (err) {\n if (err) return console.log('Ooops!', err)\n console.log('Great success dear leader!')\n})\n```\n\n#### `options`\n\nSee <a href=\"#put\"><code>put()</code></a> for a discussion on the `options` object. You can overwrite default `'keyEncoding'` and `'valueEncoding'` and also specify the use of `sync` filesystem operations.\n\nIn addition to encoding options for the whole batch you can also overwrite the encoding per operation, like:\n\n```js\nvar ops = [{\n type : 'put'\n , key : new Buffer([1, 2, 3])\n , value : { some: 'json' }\n , keyEncoding : 'binary'\n , valueEncoding : 'json'\n}]\n```\n\n--------------------------------------------------------\n<a name=\"batch_chained\"></a>\n### db.batch() *(chained form)*\n<code>batch()</code>, when called with no arguments will return a `Batch` object which can be used to build, and eventually commit, an atomic LevelDB batch operation. Depending on how it's used, it is possible to obtain greater performance when using the chained form of `batch()` over the array form.\n\n```js\ndb.batch()\n .del('father')\n .put('name', 'Yuri Irsenovich Kim')\n .put('dob', '16 February 1941')\n .put('spouse', 'Kim Young-sook')\n .put('occupation', 'Clown')\n .write(function () { console.log('Done!') })\n```\n\n<b><code>batch.put(key, value[, options])</code></b>\n\nQueue a *put* operation on the current batch, not committed until a `write()` is called on the batch.\n\nThe optional `options` argument can be used to override the default `'keyEncoding'` and/or `'valueEncoding'`.\n\nThis method may `throw` a `WriteError` if there is a problem with your put (such as the `value` being `null` or `undefined`).\n\n<b><code>batch.del(key[, options])</code></b>\n\nQueue a *del* operation on the current batch, not committed until a `write()` is called on the batch.\n\nThe optional `options` argument can be used to override the default `'keyEncoding'`.\n\nThis method may `throw` a `WriteError` if there is a problem with your delete.\n\n<b><code>batch.clear()</code></b>\n\nClear all queued operations on the current batch, any previous operations will be discarded.\n\n<b><code>batch.write([callback])</code></b>\n\nCommit the queued operations for this batch. All operations not *cleared* will be written to the database atomically, that is, they will either all succeed or fail with no partial commits. The optional `callback` will be called when the operation has completed with an *error* argument if an error has occurred; if no `callback` is supplied and an error occurs then this method will `throw` a `WriteError`.\n\n\n--------------------------------------------------------\n<a name=\"isOpen\"></a>\n### db.isOpen()\n\nA LevelUP object can be in one of the following states:\n\n * *\"new\"* - newly created, not opened or closed\n * *\"opening\"* - waiting for the database to be opened\n * *\"open\"* - successfully opened the database, available for use\n * *\"closing\"* - waiting for the database to be closed\n * *\"closed\"* - database has been successfully closed, should not be used\n\n`isOpen()` will return `true` only when the state is \"open\".\n\n--------------------------------------------------------\n<a name=\"isClosed\"></a>\n### db.isClosed()\n\n*See <a href=\"#put\"><code>isOpen()</code></a>*\n\n`isClosed()` will return `true` only when the state is \"closing\" *or* \"closed\", it can be useful for determining if read and write operations are permissible.\n\n--------------------------------------------------------\n<a name=\"createReadStream\"></a>\n### db.createReadStream([options])\n\nYou can obtain a **ReadStream** of the full database by calling the `createReadStream()` method. The resulting stream is a complete Node.js-style [Readable Stream](http://nodejs.org/docs/latest/api/stream.html#stream_readable_stream) where `'data'` events emit objects with `'key'` and `'value'` pairs. You can also use the `start`, `end` and `limit` options to control the range of keys that are streamed.\n\n```js\ndb.createReadStream()\n .on('data', function (data) {\n console.log(data.key, '=', data.value)\n })\n .on('error', function (err) {\n console.log('Oh my!', err)\n })\n .on('close', function () {\n console.log('Stream closed')\n })\n .on('end', function () {\n console.log('Stream closed')\n })\n```\n\nThe standard `pause()`, `resume()` and `destroy()` methods are implemented on the ReadStream, as is `pipe()` (see below). `'data'`, '`error'`, `'end'` and `'close'` events are emitted.\n\nAdditionally, you can supply an options object as the first parameter to `createReadStream()` with the following options:\n\n* `'start'`: the key you wish to start the read at. By default it will start at the beginning of the store. Note that the *start* doesn't have to be an actual key that exists, LevelDB will simply find the *next* key, greater than the key you provide.\n\n* `'end'`: the key you wish to end the read on. By default it will continue until the end of the store. Again, the *end* doesn't have to be an actual key as an (inclusive) `<=`-type operation is performed to detect the end. You can also use the `destroy()` method instead of supplying an `'end'` parameter to achieve the same effect.\n\n* `'reverse'` *(boolean, default: `false`)*: a boolean, set to true if you want the stream to go in reverse order. Beware that due to the way LevelDB works, a reverse seek will be slower than a forward seek.\n\n* `'keys'` *(boolean, default: `true`)*: whether the `'data'` event should contain keys. If set to `true` and `'values'` set to `false` then `'data'` events will simply be keys, rather than objects with a `'key'` property. Used internally by the `createKeyStream()` method.\n\n* `'values'` *(boolean, default: `true`)*: whether the `'data'` event should contain values. If set to `true` and `'keys'` set to `false` then `'data'` events will simply be values, rather than objects with a `'value'` property. Used internally by the `createValueStream()` method.\n\n* `'limit'` *(number, default: `-1`)*: limit the number of results collected by this stream. This number represents a *maximum* number of results and may not be reached if you get to the end of the store or your `'end'` value first. A value of `-1` means there is no limit.\n\n* `'fillCache'` *(boolean, default: `false`)*: wheather LevelDB's LRU-cache should be filled with data read.\n\n* `'keyEncoding'` / `'valueEncoding'` *(string)*: the encoding applied to each read piece of data.\n\n--------------------------------------------------------\n<a name=\"createKeyStream\"></a>\n### db.createKeyStream([options])\n\nA **KeyStream** is a **ReadStream** where the `'data'` events are simply the keys from the database so it can be used like a traditional stream rather than an object stream.\n\nYou can obtain a KeyStream either by calling the `createKeyStream()` method on a LevelUP object or by passing passing an options object to `createReadStream()` with `keys` set to `true` and `values` set to `false`.\n\n```js\ndb.createKeyStream()\n .on('data', function (data) {\n console.log('key=', data)\n })\n\n// same as:\ndb.createReadStream({ keys: true, values: false })\n .on('data', function (data) {\n console.log('key=', data)\n })\n```\n\n--------------------------------------------------------\n<a name=\"createValueStream\"></a>\n### db.createValueStream([options])\n\nA **ValueStream** is a **ReadStream** where the `'data'` events are simply the values from the database so it can be used like a traditional stream rather than an object stream.\n\nYou can obtain a ValueStream either by calling the `createValueStream()` method on a LevelUP object or by passing passing an options object to `createReadStream()` with `values` set to `true` and `keys` set to `false`.\n\n```js\ndb.createValueStream()\n .on('data', function (data) {\n console.log('value=', data)\n })\n\n// same as:\ndb.createReadStream({ keys: false, values: true })\n .on('data', function (data) {\n console.log('value=', data)\n })\n```\n\n--------------------------------------------------------\n<a name=\"createWriteStream\"></a>\n### db.createWriteStream([options])\n\nA **WriteStream** can be obtained by calling the `createWriteStream()` method. The resulting stream is a complete Node.js-style [Writable Stream](http://nodejs.org/docs/latest/api/stream.html#stream_writable_stream) which accepts objects with `'key'` and `'value'` pairs on its `write()` method.\n\nThe WriteStream will buffer writes and submit them as a `batch()` operations where writes occur *within the same tick*.\n\n```js\nvar ws = db.createWriteStream()\n\nws.on('error', function (err) {\n console.log('Oh my!', err)\n})\nws.on('close', function () {\n console.log('Stream closed')\n})\n\nws.write({ key: 'name', value: 'Yuri Irsenovich Kim' })\nws.write({ key: 'dob', value: '16 February 1941' })\nws.write({ key: 'spouse', value: 'Kim Young-sook' })\nws.write({ key: 'occupation', value: 'Clown' })\nws.end()\n```\n\nThe standard `write()`, `end()`, `destroy()` and `destroySoon()` methods are implemented on the WriteStream. `'drain'`, `'error'`, `'close'` and `'pipe'` events are emitted.\n\nYou can specify encodings both for the whole stream and individual entries:\n\nTo set the encoding for the whole stream, provide an options object as the first parameter to `createWriteStream()` with `'keyEncoding'` and/or `'valueEncoding'`.\n\nTo set the encoding for an individual entry:\n\n```js\nwriteStream.write({\n key : new Buffer([1, 2, 3])\n , value : { some: 'json' }\n , keyEncoding : 'binary'\n , valueEncoding : 'json'\n})\n```\n\n#### write({ type: 'put' })\n\nIf individual `write()` operations are performed with a `'type'` property of `'del'`, they will be passed on as `'del'` operations to the batch.\n\n```js\nvar ws = db.createWriteStream()\n\nws.on('error', function (err) {\n console.log('Oh my!', err)\n})\nws.on('close', function () {\n console.log('Stream closed')\n})\n\nws.write({ type: 'del', key: 'name' })\nws.write({ type: 'del', key: 'dob' })\nws.write({ type: 'put', key: 'spouse' })\nws.write({ type: 'del', key: 'occupation' })\nws.end()\n```\n\n#### db.createWriteStream({ type: 'del' })\n\nIf the *WriteStream* is created with a `'type'` option of `'del'`, all `write()` operations will be interpreted as `'del'`, unless explicitly specified as `'put'`.\n\n```js\nvar ws = db.createWriteStream({ type: 'del' })\n\nws.on('error', function (err) {\n console.log('Oh my!', err)\n})\nws.on('close', function () {\n console.log('Stream closed')\n})\n\nws.write({ key: 'name' })\nws.write({ key: 'dob' })\n// but it can be overridden\nws.write({ type: 'put', key: 'spouse', value: 'Ri Sol-ju' })\nws.write({ key: 'occupation' })\nws.end()\n```\n\n#### Pipes and Node Stream compatibility\n\nA ReadStream can be piped directly to a WriteStream, allowing for easy copying of an entire database. A simple `copy()` operation is included in LevelUP that performs exactly this on two open databases:\n\n```js\nfunction copy (srcdb, dstdb, callback) {\n srcdb.createReadStream().pipe(dstdb.createWriteStream()).on('close', callback)\n}\n```\n\nThe ReadStream is also [fstream](https://github.com/isaacs/fstream)-compatible which means you should be able to pipe to and from fstreams. So you can serialize and deserialize an entire database to a directory where keys are filenames and values are their contents, or even into a *tar* file using [node-tar](https://github.com/isaacs/node-tar). See the [fstream functional test](https://github.com/rvagg/node-levelup/blob/master/test/functional/fstream-test.js) for an example. *(Note: I'm not really sure there's a great use-case for this but it's a fun example and it helps to harden the stream implementations.)*\n\nKeyStreams and ValueStreams can be treated like standard streams of raw data. If `'keyEncoding'` or `'valueEncoding'` is set to `'binary'` the `'data'` events will simply be standard Node `Buffer` objects straight out of the data store.\n\n\n--------------------------------------------------------\n<a name='approximateSize'></a>\n### db.db.approximateSize(start, end, callback)\n<code>approximateSize()</code> can used to get the approximate number of bytes of file system space used by the range `[start..end)`. The result may not include recently written data.\n\n```js\nvar db = require('level')('./huge.db')\n\ndb.db.approximateSize('a', 'c', function (err, size) {\n if (err) return console.error('Ooops!', err)\n console.log('Approximate size of range is %d', size)\n})\n```\n\n**Note:** `approximateSize()` is available via [LevelDOWN](https://github.com/rvagg/node-leveldown/), which by default is accessible as the `db` property of your LevelUP instance. This is a specific LevelDB operation and is not likely to be available where you replace LevelDOWN with an alternative back-end via the `'db'` option.\n\n\n--------------------------------------------------------\n<a name='getProperty'></a>\n### db.db.getProperty(property)\n<code>getProperty</code> can be used to get internal details from LevelDB. When issued with a valid property string, a readable string will be returned (this method is synchronous).\n\nCurrently, the only valid properties are:\n\n* <b><code>'leveldb.num-files-at-levelN'</code></b>: returns the number of files at level *N*, where N is an integer representing a valid level (e.g. \"0\").\n\n* <b><code>'leveldb.stats'</code></b>: returns a multi-line string describing statistics about LevelDB's internal operation.\n\n* <b><code>'leveldb.sstables'</code></b>: returns a multi-line string describing all of the *sstables* that make up contents of the current database.\n\n\n```js\nvar db = require('level')('./huge.db')\nconsole.log(db.db.getProperty('leveldb.num-files-at-level3'))\n// → '243'\n```\n\n**Note:** `getProperty()` is available via [LevelDOWN](https://github.com/rvagg/node-leveldown/), which by default is accessible as the `db` property of your LevelUP instance. This is a specific LevelDB operation and is not likely to be available where you replace LevelDOWN with an alternative back-end via the `'db'` option.\n\n\n--------------------------------------------------------\n<a name=\"destroy\"></a>\n### leveldown.destroy(location, callback)\n<code>destroy()</code> is used to completely remove an existing LevelDB database directory. You can use this function in place of a full directory *rm* if you want to be sure to only remove LevelDB-related files. If the directory only contains LevelDB files, the directory itself will be removed as well. If there are additional, non-LevelDB files in the directory, those files, and the directory, will be left alone.\n\nThe callback will be called when the destroy operation is complete, with a possible `error` argument.\n\n**Note:** `destroy()` is available via [LevelDOWN](https://github.com/rvagg/node-leveldown/) which you will have to install seperately, e.g.:\n\n```js\nrequire('leveldown').destroy('./huge.db', function (err) { console.log('done!') })\n```\n\n--------------------------------------------------------\n<a name=\"repair\"></a>\n### leveldown.repair(location, callback)\n<code>repair()</code> can be used to attempt a restoration of a damaged LevelDB store. From the LevelDB documentation:\n\n> If a DB cannot be opened, you may attempt to call this method to resurrect as much of the contents of the database as possible. Some data may be lost, so be careful when calling this function on a database that contains important information.\n\nYou will find information on the *repair* operation in the *LOG* file inside the store directory. \n\nA `repair()` can also be used to perform a compaction of the LevelDB log into table files.\n\nThe callback will be called when the repair operation is complete, with a possible `error` argument.\n\n**Note:** `repair()` is available via [LevelDOWN](https://github.com/rvagg/node-leveldown/) which you will have to install seperately, e.g.:\n\n```js\nrequire('leveldown').repair('./huge.db', function (err) { console.log('done!') })\n```\n\n--------------------------------------------------------\n\n<a name=\"events\"></a>\nEvents\n------\n\nLevelUP emits events when the callbacks to the corresponding methods are called.\n\n* `db.emit('put', key, value)` emitted when a new value is `'put'`\n* `db.emit('del', key)` emitted when a value is deleted\n* `db.emit('batch', ary)` emitted when a batch operation has executed\n* `db.emit('ready')` emitted when the database has opened (`'open'` is synonym)\n* `db.emit('closed')` emitted when the database has closed\n* `db.emit('opening')` emitted when the database is opening\n* `db.emit('closing')` emitted when the database is closing\n\nIf you do not pass a callback to an async function, and there is an error, LevelUP will `emit('error', err)` instead.\n\n<a name=\"json\"></a>\nJSON data\n---------\n\nYou specify `'json'` encoding for both keys and/or values, you can then supply JavaScript objects to LevelUP and receive them from all fetch operations, including ReadStreams. LevelUP will automatically *stringify* your objects and store them as *utf8* and parse the strings back into objects before passing them back to you.\n\n<a name=\"custom_encodings\"></a>\nCustom encodings\n----------------\n\nA custom encoding may be provided by passing in an object as an value for `keyEncoding` or `valueEncoding` (wherever accepted), it must have the following properties:\n\n```js\n{\n encode : function (val) { ... }\n , decode : function (val) { ... }\n , buffer : boolean // encode returns a buffer-like and decode accepts a buffer\n , type : String // name of this encoding type.\n}\n```\n\n*\"buffer-like\"* means either a `Buffer` if running in Node, or a Uint8Array if in a browser. Use [bops](https://github.com/chrisdickinson/bops) to get portable binary operations.\n\n<a name=\"extending\"></a>\nExtending LevelUP\n-----------------\n\nA list of <a href=\"https://github.com/rvagg/node-levelup/wiki/Modules\"><b>Node.js LevelDB modules and projects</b></a> can be found in the wiki.\n\nWhen attempting to extend the functionality of LevelUP, it is recommended that you consider using [level-hooks](https://github.com/dominictarr/level-hooks) and/or [level-sublevel](https://github.com/dominictarr/level-sublevel). **level-sublevel** is particularly helpful for keeping additional, extension-specific, data in a LevelDB store. It allows you to partition a LevelUP instance into multiple sub-instances that each correspond to discrete namespaced key ranges.\n\n<a name=\"multiproc\"></a>\nMulti-process access\n--------------------\n\nLevelDB is thread-safe but is **not** suitable for accessing with multiple processes. You should only ever have a LevelDB database open from a single Node.js process. Node.js clusters are made up of multiple processes so a LevelUP instance cannot be shared between them either.\n\nSee the <a href=\"https://github.com/rvagg/node-levelup/wiki/Modules\"><b>wiki</b></a> for some LevelUP extensions, including [multilevel](https://github.com/juliangruber/multilevel), that may help if you require a single data store to be shared across processes.\n\n<a name=\"support\"></a>\nGetting support\n---------------\n\nThere are multiple ways you can find help in using LevelDB in Node.js:\n\n * **IRC:** you'll find an active group of LevelUP users in the **##leveldb** channel on Freenode, including most of the contributors to this project.\n * **Mailing list:** there is an active [Node.js LevelDB](https://groups.google.com/forum/#!forum/node-levelup) Google Group.\n * **GitHub:** you're welcome to open an issue here on this GitHub repository if you have a question.\n\n<a name=\"contributing\"></a>\nContributing\n------------\n\nLevelUP is an **OPEN Open Source Project**. This means that:\n\n> Individuals making significant and valuable contributions are given commit-access to the project to contribute as they see fit. This project is more like an open wiki than a standard guarded open source project.\n\nSee the [CONTRIBUTING.md](https://github.com/rvagg/node-levelup/blob/master/CONTRIBUTING.md) file for more details.\n\n### Contributors\n\nLevelUP is only possible due to the excellent work of the following contributors:\n\n<table><tbody>\n<tr><th align=\"left\">Rod Vagg</th><td><a href=\"https://github.com/rvagg\">GitHub/rvagg</a></td><td><a href=\"http://twitter.com/rvagg\">Twitter/@rvagg</a></td></tr>\n<tr><th align=\"left\">John Chesley</th><td><a href=\"https://github.com/chesles/\">GitHub/chesles</a></td><td><a href=\"http://twitter.com/chesles\">Twitter/@chesles</a></td></tr>\n<tr><th align=\"left\">Jake Verbaten</th><td><a href=\"https://github.com/raynos\">GitHub/raynos</a></td><td><a href=\"http://twitter.com/raynos2\">Twitter/@raynos2</a></td></tr>\n<tr><th align=\"left\">Dominic Tarr</th><td><a href=\"https://github.com/dominictarr\">GitHub/dominictarr</a></td><td><a href=\"http://twitter.com/dominictarr\">Twitter/@dominictarr</a></td></tr>\n<tr><th align=\"left\">Max Ogden</th><td><a href=\"https://github.com/maxogden\">GitHub/maxogden</a></td><td><a href=\"http://twitter.com/maxogden\">Twitter/@maxogden</a></td></tr>\n<tr><th align=\"left\">Lars-Magnus Skog</th><td><a href=\"https://github.com/ralphtheninja\">GitHub/ralphtheninja</a></td><td><a href=\"http://twitter.com/ralphtheninja\">Twitter/@ralphtheninja</a></td></tr>\n<tr><th align=\"left\">David Björklund</th><td><a href=\"https://github.com/kesla\">GitHub/kesla</a></td><td><a href=\"http://twitter.com/david_bjorklund\">Twitter/@david_bjorklund</a></td></tr>\n<tr><th align=\"left\">Julian Gruber</th><td><a href=\"https://github.com/juliangruber\">GitHub/juliangruber</a></td><td><a href=\"http://twitter.com/juliangruber\">Twitter/@juliangruber</a></td></tr>\n<tr><th align=\"left\">Paolo Fragomeni</th><td><a href=\"https://github.com/hij1nx\">GitHub/hij1nx</a></td><td><a href=\"http://twitter.com/hij1nx\">Twitter/@hij1nx</a></td></tr>\n<tr><th align=\"left\">Anton Whalley</th><td><a href=\"https://github.com/No9\">GitHub/No9</a></td><td><a href=\"https://twitter.com/antonwhalley\">Twitter/@antonwhalley</a></td></tr>\n<tr><th align=\"left\">Matteo Collina</th><td><a href=\"https://github.com/mcollina\">GitHub/mcollina</a></td><td><a href=\"https://twitter.com/matteocollina\">Twitter/@matteocollina</a></td></tr>\n<tr><th align=\"left\">Pedro Teixeira</th><td><a href=\"https://github.com/pgte\">GitHub/pgte</a></td><td><a href=\"https://twitter.com/pgte\">Twitter/@pgte</a></td></tr>\n<tr><th align=\"left\">James Halliday</th><td><a href=\"https://github.com/substack\">GitHub/substack</a></td><td><a href=\"https://twitter.com/substack\">Twitter/@substack</a></td></tr>\n</tbody></table>\n\n### Windows\n\nA large portion of the Windows support comes from code by [Krzysztof Kowalczyk](http://blog.kowalczyk.info/) [@kjk](https://twitter.com/kjk), see his Windows LevelDB port [here](http://code.google.com/r/kkowalczyk-leveldb/). If you're using LevelUP on Windows, you should give him your thanks!\n\n\n<a name=\"license\"></a>\nLicense & copyright\n-------------------\n\nCopyright (c) 2012-2014 LevelUP contributors (listed above).\n\nLevelUP is licensed under the MIT license. All rights not explicitly granted in the MIT license are reserved. See the included LICENSE.md file for more details.\n\n=======\n*LevelUP builds on the excellent work of the LevelDB and Snappy teams from Google and additional contributors. LevelDB and Snappy are both issued under the [New BSD Licence](http://opensource.org/licenses/BSD-3-Clause).*\n", | |
"readmeFilename": "README.md", | |
"bugs": { | |
"url": "https://github.com/rvagg/node-levelup/issues" | |
}, | |
"_id": "[email protected]", | |
"_from": "levelup@~0.18.4" | |
} | |
},{}],72:[function(require,module,exports){ | |
'use strict'; | |
module.exports = INTERNAL; | |
function INTERNAL() {} | |
},{}],73:[function(require,module,exports){ | |
'use strict'; | |
var Promise = require('./promise'); | |
var reject = require('./reject'); | |
var resolve = require('./resolve'); | |
var INTERNAL = require('./INTERNAL'); | |
var handlers = require('./handlers'); | |
module.exports = all; | |
function all(iterable) { | |
if (Object.prototype.toString.call(iterable) !== '[object Array]') { | |
return reject(new TypeError('must be an array')); | |
} | |
var len = iterable.length; | |
var called = false; | |
if (!len) { | |
return resolve([]); | |
} | |
var values = new Array(len); | |
var resolved = 0; | |
var i = -1; | |
var promise = new Promise(INTERNAL); | |
while (++i < len) { | |
allResolver(iterable[i], i); | |
} | |
return promise; | |
function allResolver(value, i) { | |
resolve(value).then(resolveFromAll, function (error) { | |
if (!called) { | |
called = true; | |
handlers.reject(promise, error); | |
} | |
}); | |
function resolveFromAll(outValue) { | |
values[i] = outValue; | |
if (++resolved === len & !called) { | |
called = true; | |
handlers.resolve(promise, values); | |
} | |
} | |
} | |
} | |
},{"./INTERNAL":72,"./handlers":74,"./promise":76,"./reject":79,"./resolve":80}],74:[function(require,module,exports){ | |
'use strict'; | |
var tryCatch = require('./tryCatch'); | |
var resolveThenable = require('./resolveThenable'); | |
var states = require('./states'); | |
exports.resolve = function (self, value) { | |
var result = tryCatch(getThen, value); | |
if (result.status === 'error') { | |
return exports.reject(self, result.value); | |
} | |
var thenable = result.value; | |
if (thenable) { | |
resolveThenable.safely(self, thenable); | |
} else { | |
self.state = states.FULFILLED; | |
self.outcome = value; | |
var i = -1; | |
var len = self.queue.length; | |
while (++i < len) { | |
self.queue[i].callFulfilled(value); | |
} | |
} | |
return self; | |
}; | |
exports.reject = function (self, error) { | |
self.state = states.REJECTED; | |
self.outcome = error; | |
var i = -1; | |
var len = self.queue.length; | |
while (++i < len) { | |
self.queue[i].callRejected(error); | |
} | |
return self; | |
}; | |
function getThen(obj) { | |
// Make sure we only access the accessor once as required by the spec | |
var then = obj && obj.then; | |
if (obj && typeof obj === 'object' && typeof then === 'function') { | |
return function appyThen() { | |
then.apply(obj, arguments); | |
}; | |
} | |
} | |
},{"./resolveThenable":81,"./states":82,"./tryCatch":83}],75:[function(require,module,exports){ | |
module.exports = exports = require('./promise'); | |
exports.resolve = require('./resolve'); | |
exports.reject = require('./reject'); | |
exports.all = require('./all'); | |
exports.race = require('./race'); | |
},{"./all":73,"./promise":76,"./race":78,"./reject":79,"./resolve":80}],76:[function(require,module,exports){ | |
'use strict'; | |
var unwrap = require('./unwrap'); | |
var INTERNAL = require('./INTERNAL'); | |
var resolveThenable = require('./resolveThenable'); | |
var states = require('./states'); | |
var QueueItem = require('./queueItem'); | |
module.exports = Promise; | |
function Promise(resolver) { | |
if (!(this instanceof Promise)) { | |
return new Promise(resolver); | |
} | |
if (typeof resolver !== 'function') { | |
throw new TypeError('reslover must be a function'); | |
} | |
this.state = states.PENDING; | |
this.queue = []; | |
this.outcome = void 0; | |
if (resolver !== INTERNAL) { | |
resolveThenable.safely(this, resolver); | |
} | |
} | |
Promise.prototype['catch'] = function (onRejected) { | |
return this.then(null, onRejected); | |
}; | |
Promise.prototype.then = function (onFulfilled, onRejected) { | |
if (typeof onFulfilled !== 'function' && this.state === states.FULFILLED || | |
typeof onRejected !== 'function' && this.state === states.REJECTED) { | |
return this; | |
} | |
var promise = new Promise(INTERNAL); | |
if (this.state !== states.PENDING) { | |
var resolver = this.state === states.FULFILLED ? onFulfilled: onRejected; | |
unwrap(promise, resolver, this.outcome); | |
} else { | |
this.queue.push(new QueueItem(promise, onFulfilled, onRejected)); | |
} | |
return promise; | |
}; | |
},{"./INTERNAL":72,"./queueItem":77,"./resolveThenable":81,"./states":82,"./unwrap":84}],77:[function(require,module,exports){ | |
'use strict'; | |
var handlers = require('./handlers'); | |
var unwrap = require('./unwrap'); | |
module.exports = QueueItem; | |
function QueueItem(promise, onFulfilled, onRejected) { | |
this.promise = promise; | |
if (typeof onFulfilled === 'function') { | |
this.onFulfilled = onFulfilled; | |
this.callFulfilled = this.otherCallFulfilled; | |
} | |
if (typeof onRejected === 'function') { | |
this.onRejected = onRejected; | |
this.callRejected = this.otherCallRejected; | |
} | |
} | |
QueueItem.prototype.callFulfilled = function (value) { | |
handlers.resolve(this.promise, value); | |
}; | |
QueueItem.prototype.otherCallFulfilled = function (value) { | |
unwrap(this.promise, this.onFulfilled, value); | |
}; | |
QueueItem.prototype.callRejected = function (value) { | |
handlers.reject(this.promise, value); | |
}; | |
QueueItem.prototype.otherCallRejected = function (value) { | |
unwrap(this.promise, this.onRejected, value); | |
}; | |
},{"./handlers":74,"./unwrap":84}],78:[function(require,module,exports){ | |
'use strict'; | |
var Promise = require('./promise'); | |
var reject = require('./reject'); | |
var resolve = require('./resolve'); | |
var INTERNAL = require('./INTERNAL'); | |
var handlers = require('./handlers'); | |
module.exports = race; | |
function race(iterable) { | |
if (Object.prototype.toString.call(iterable) !== '[object Array]') { | |
return reject(new TypeError('must be an array')); | |
} | |
var len = iterable.length; | |
var called = false; | |
if (!len) { | |
return resolve([]); | |
} | |
var resolved = 0; | |
var i = -1; | |
var promise = new Promise(INTERNAL); | |
while (++i < len) { | |
resolver(iterable[i]); | |
} | |
return promise; | |
function resolver(value) { | |
resolve(value).then(function (response) { | |
if (!called) { | |
called = true; | |
handlers.resolve(promise, response); | |
} | |
}, function (error) { | |
if (!called) { | |
called = true; | |
handlers.reject(promise, error); | |
} | |
}); | |
} | |
} | |
},{"./INTERNAL":72,"./handlers":74,"./promise":76,"./reject":79,"./resolve":80}],79:[function(require,module,exports){ | |
'use strict'; | |
var Promise = require('./promise'); | |
var INTERNAL = require('./INTERNAL'); | |
var handlers = require('./handlers'); | |
module.exports = reject; | |
function reject(reason) { | |
var promise = new Promise(INTERNAL); | |
return handlers.reject(promise, reason); | |
} | |
},{"./INTERNAL":72,"./handlers":74,"./promise":76}],80:[function(require,module,exports){ | |
'use strict'; | |
var Promise = require('./promise'); | |
var INTERNAL = require('./INTERNAL'); | |
var handlers = require('./handlers'); | |
module.exports = resolve; | |
var FALSE = handlers.resolve(new Promise(INTERNAL), false); | |
var NULL = handlers.resolve(new Promise(INTERNAL), null); | |
var UNDEFINED = handlers.resolve(new Promise(INTERNAL), void 0); | |
var ZERO = handlers.resolve(new Promise(INTERNAL), 0); | |
var EMPTYSTRING = handlers.resolve(new Promise(INTERNAL), ''); | |
function resolve(value) { | |
if (value) { | |
if (value instanceof Promise) { | |
return value; | |
} | |
return handlers.resolve(new Promise(INTERNAL), value); | |
} | |
var valueType = typeof value; | |
switch (valueType) { | |
case 'boolean': | |
return FALSE; | |
case 'undefined': | |
return UNDEFINED; | |
case 'object': | |
return NULL; | |
case 'number': | |
return ZERO; | |
case 'string': | |
return EMPTYSTRING; | |
} | |
} | |
},{"./INTERNAL":72,"./handlers":74,"./promise":76}],81:[function(require,module,exports){ | |
'use strict'; | |
var handlers = require('./handlers'); | |
var tryCatch = require('./tryCatch'); | |
function safelyResolveThenable(self, thenable) { | |
// Either fulfill, reject or reject with error | |
var called = false; | |
function onError(value) { | |
if (called) { | |
return; | |
} | |
called = true; | |
handlers.reject(self, value); | |
} | |
function onSuccess(value) { | |
if (called) { | |
return; | |
} | |
called = true; | |
handlers.resolve(self, value); | |
} | |
function tryToUnwrap() { | |
thenable(onSuccess, onError); | |
} | |
var result = tryCatch(tryToUnwrap); | |
if (result.status === 'error') { | |
onError(result.value); | |
} | |
} | |
exports.safely = safelyResolveThenable; | |
},{"./handlers":74,"./tryCatch":83}],82:[function(require,module,exports){ | |
// Lazy man's symbols for states | |
exports.REJECTED = ['REJECTED']; | |
exports.FULFILLED = ['FULFILLED']; | |
exports.PENDING = ['PENDING']; | |
},{}],83:[function(require,module,exports){ | |
'use strict'; | |
module.exports = tryCatch; | |
function tryCatch(func, value) { | |
var out = {}; | |
try { | |
out.value = func(value); | |
out.status = 'success'; | |
} catch (e) { | |
out.status = 'error'; | |
out.value = e; | |
} | |
return out; | |
} | |
},{}],84:[function(require,module,exports){ | |
'use strict'; | |
var immediate = require('immediate'); | |
var handlers = require('./handlers'); | |
module.exports = unwrap; | |
function unwrap(promise, func, value) { | |
immediate(function () { | |
var returnValue; | |
try { | |
returnValue = func(value); | |
} catch (e) { | |
return handlers.reject(promise, e); | |
} | |
if (returnValue === promise) { | |
handlers.reject(promise, new TypeError('Cannot resolve promise with itself')); | |
} else { | |
handlers.resolve(promise, returnValue); | |
} | |
}); | |
} | |
},{"./handlers":74,"immediate":85}],85:[function(require,module,exports){ | |
'use strict'; | |
var types = [ | |
require('./nextTick'), | |
require('./mutation.js'), | |
require('./messageChannel'), | |
require('./stateChange'), | |
require('./timeout') | |
]; | |
var draining; | |
var queue = []; | |
function drainQueue() { | |
draining = true; | |
var i, oldQueue; | |
var len = queue.length; | |
while (len) { | |
oldQueue = queue; | |
queue = []; | |
i = -1; | |
while (++i < len) { | |
oldQueue[i](); | |
} | |
len = queue.length; | |
} | |
draining = false; | |
} | |
var scheduleDrain; | |
var i = -1; | |
var len = types.length; | |
while (++ i < len) { | |
if (types[i] && types[i].test && types[i].test()) { | |
scheduleDrain = types[i].install(drainQueue); | |
break; | |
} | |
} | |
module.exports = immediate; | |
function immediate(task) { | |
if (queue.push(task) === 1 && !draining) { | |
scheduleDrain(); | |
} | |
} | |
},{"./messageChannel":86,"./mutation.js":87,"./nextTick":17,"./stateChange":88,"./timeout":89}],86:[function(require,module,exports){ | |
(function (global){ | |
'use strict'; | |
exports.test = function () { | |
if (global.setImmediate) { | |
// we can only get here in IE10 | |
// which doesn't handel postMessage well | |
return false; | |
} | |
return typeof global.MessageChannel !== 'undefined'; | |
}; | |
exports.install = function (func) { | |
var channel = new global.MessageChannel(); | |
channel.port1.onmessage = func; | |
return function () { | |
channel.port2.postMessage(0); | |
}; | |
}; | |
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{}],87:[function(require,module,exports){ | |
(function (global){ | |
'use strict'; | |
//based off rsvp https://github.com/tildeio/rsvp.js | |
//license https://github.com/tildeio/rsvp.js/blob/master/LICENSE | |
//https://github.com/tildeio/rsvp.js/blob/master/lib/rsvp/asap.js | |
var Mutation = global.MutationObserver || global.WebKitMutationObserver; | |
exports.test = function () { | |
return Mutation; | |
}; | |
exports.install = function (handle) { | |
var called = 0; | |
var observer = new Mutation(handle); | |
var element = global.document.createTextNode(''); | |
observer.observe(element, { | |
characterData: true | |
}); | |
return function () { | |
element.data = (called = ++called % 2); | |
}; | |
}; | |
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{}],88:[function(require,module,exports){ | |
(function (global){ | |
'use strict'; | |
exports.test = function () { | |
return 'document' in global && 'onreadystatechange' in global.document.createElement('script'); | |
}; | |
exports.install = function (handle) { | |
return function () { | |
// Create a <script> element; its readystatechange event will be fired asynchronously once it is inserted | |
// into the document. Do so, thus queuing up the task. Remember to clean up once it's been called. | |
var scriptEl = global.document.createElement('script'); | |
scriptEl.onreadystatechange = function () { | |
handle(); | |
scriptEl.onreadystatechange = null; | |
scriptEl.parentNode.removeChild(scriptEl); | |
scriptEl = null; | |
}; | |
global.document.documentElement.appendChild(scriptEl); | |
return handle; | |
}; | |
}; | |
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{}],89:[function(require,module,exports){ | |
'use strict'; | |
exports.test = function () { | |
return true; | |
}; | |
exports.install = function (t) { | |
return function () { | |
setTimeout(t, 0); | |
}; | |
}; | |
},{}],"1XEYOF":[function(require,module,exports){ | |
(function (process,global,Buffer){ | |
var util = require('util') | |
, AbstractLevelDOWN = require('abstract-leveldown').AbstractLevelDOWN | |
, AbstractIterator = require('abstract-leveldown').AbstractIterator | |
, noop = function () {} | |
, setImmediate = global.setImmediate || process.nextTick | |
function toKey (key) { | |
return typeof key == 'string' ? '$' + key : JSON.stringify(key) | |
} | |
function sortedIndexOf (arr, item) { | |
var low = 0, high = arr.length, mid | |
while (low < high) { | |
mid = (low + high) >>> 1 | |
arr[mid] < item ? low = mid + 1 : high = mid | |
} | |
return low | |
} | |
function MemIterator (db, options) { | |
AbstractIterator.call(this, db) | |
this._reverse = options.reverse | |
this._limit = options.limit | |
this._count = 0 | |
this._end = options.end | |
this._start = options.start | |
this._gt = options.gt | |
this._gte = options.gte | |
this._lt = options.lt | |
this._lte = options.lte | |
this._keys = [] | |
var i | |
if (this._start) { | |
for (i = 0; i < this.db._keys.length; i++) { | |
if (this.db._keys[i] >= this._start) { | |
this._pos = i | |
if (this.db._keys[i] != this._start) { | |
if (this._reverse) { | |
// going backwards and key doesn't match, jump back one | |
--this._pos | |
} | |
} else { | |
if (options.exclusiveStart) { | |
// key matches but it's a gt or lt | |
this._pos += (this._reverse ? -1 : 1) | |
} | |
} | |
break | |
} | |
} | |
if (this._pos == null && !this._reverse) // no matching keys, non starter | |
this._pos = -1 | |
} | |
if (!options.start || this._pos === undefined) | |
this._pos = this._reverse ? this.db._keys.length - 1 : 0 | |
// copy the keys that we need so that they're not affected by puts/deletes | |
if (this._pos >= 0) { | |
this._keys = this._reverse ? this.db._keys.slice(0, this._pos + 1) : this.db._keys.slice(this._pos) | |
this._pos = this._reverse ? this._keys.length - 1 : 0 | |
} | |
} | |
util.inherits(MemIterator, AbstractIterator) | |
MemIterator.prototype._next = function (callback) { | |
var self = this | |
, key = self._keys[self._pos] | |
, value | |
if (self._pos >= self._keys.length || self._pos < 0) | |
return setImmediate(callback) | |
if (!!self._end && (self._reverse ? key < self._end : key > self._end)) | |
return setImmediate(callback) | |
if (!!self._limit && self._limit > 0 && self._count++ >= self._limit) | |
return setImmediate(callback) | |
if ( (this._lt && key >= this._lt) | |
|| (this._lte && key > this._lte) | |
|| (this._gt && key <= this._gt) | |
|| (this._gte && key < this._gte)) | |
return setImmediate(callback) | |
value = self.db._store[toKey(key)] | |
self._pos += self._reverse ? -1 : 1 | |
setImmediate(function () { callback(null, key, value) }) | |
} | |
function MemDOWN (location) { | |
if (!(this instanceof MemDOWN)) | |
return new MemDOWN(location) | |
AbstractLevelDOWN.call(this, typeof location == 'string' ? location : '') | |
this._store = {} | |
this._keys = [] | |
} | |
util.inherits(MemDOWN, AbstractLevelDOWN) | |
MemDOWN.prototype._open = function (options, callback) { | |
var self = this | |
setImmediate(function () { callback(null, self) }) | |
} | |
MemDOWN.prototype._put = function (key, value, options, callback) { | |
var ix = sortedIndexOf(this._keys, key) | |
if (this._keys[ix] != key) | |
this._keys.splice(ix, 0, key) | |
key = toKey(key) // safety, to avoid key='__proto__'-type skullduggery | |
this._store[key] = value | |
setImmediate(callback) | |
} | |
MemDOWN.prototype._get = function (key, options, callback) { | |
var value = this._store[toKey(key)] | |
if (value === undefined) { | |
// 'NotFound' error, consistent with LevelDOWN API | |
return setImmediate(function () { callback(new Error('NotFound')) }) | |
} | |
if (options.asBuffer !== false && !Buffer.isBuffer(value)) | |
value = new Buffer(String(value)) | |
setImmediate(function () { | |
callback(null, value) | |
}) | |
} | |
MemDOWN.prototype._del = function (key, options, callback) { | |
var ix = sortedIndexOf(this._keys, key) | |
if (this._keys[ix] == key) | |
this._keys.splice(ix, 1) | |
delete this._store[toKey(key)] | |
setImmediate(callback) | |
} | |
MemDOWN.prototype._batch = function (array, options, callback) { | |
var err | |
, i = 0 | |
, key | |
, value | |
if (Array.isArray(array)) { | |
for (; i < array.length; i++) { | |
if (array[i]) { | |
key = Buffer.isBuffer(array[i].key) ? array[i].key : String(array[i].key) | |
err = this._checkKeyValue(key, 'key') | |
if (err) return setImmediate(function () { callback(err) }) | |
if (array[i].type === 'del') { | |
this._del(array[i].key, options, noop) | |
} else if (array[i].type === 'put') { | |
value = Buffer.isBuffer(array[i].value) ? array[i].value : String(array[i].value) | |
err = this._checkKeyValue(value, 'value') | |
if (err) return setImmediate(function () { callback(err) }) | |
this._put(key, value, options, noop) | |
} | |
} | |
} | |
} | |
setImmediate(callback) | |
} | |
MemDOWN.prototype._iterator = function (options) { | |
return new MemIterator(this, options) | |
} | |
MemDOWN.prototype._isBuffer = function (obj) { | |
return Buffer.isBuffer(obj) | |
} | |
module.exports = MemDOWN | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {},require("buffer").Buffer) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"abstract-leveldown":94,"buffer":20,"util":32}],"leveldown":[function(require,module,exports){ | |
module.exports=require('1XEYOF'); | |
},{}],92:[function(require,module,exports){ | |
module.exports=require(55) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19}],93:[function(require,module,exports){ | |
module.exports=require(56) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19}],94:[function(require,module,exports){ | |
module.exports=require(57) | |
},{"./abstract-chained-batch":92,"./abstract-iterator":93,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"buffer":20,"xtend":95}],95:[function(require,module,exports){ | |
module.exports=require(70) | |
},{}],96:[function(require,module,exports){ | |
"use strict"; | |
// Extends method | |
// (taken from http://code.jquery.com/jquery-1.9.0.js) | |
// Populate the class2type map | |
var class2type = {}; | |
var types = [ | |
"Boolean", "Number", "String", "Function", "Array", | |
"Date", "RegExp", "Object", "Error" | |
]; | |
for (var i = 0; i < types.length; i++) { | |
var typename = types[i]; | |
class2type["[object " + typename + "]"] = typename.toLowerCase(); | |
} | |
var core_toString = class2type.toString; | |
var core_hasOwn = class2type.hasOwnProperty; | |
function type(obj) { | |
if (obj === null) { | |
return String(obj); | |
} | |
return typeof obj === "object" || typeof obj === "function" ? | |
class2type[core_toString.call(obj)] || "object" : | |
typeof obj; | |
} | |
function isWindow(obj) { | |
return obj !== null && obj === obj.window; | |
} | |
function isPlainObject(obj) { | |
// Must be an Object. | |
// Because of IE, we also have to check the presence of | |
// the constructor property. | |
// Make sure that DOM nodes and window objects don't pass through, as well | |
if (!obj || type(obj) !== "object" || obj.nodeType || isWindow(obj)) { | |
return false; | |
} | |
try { | |
// Not own constructor property must be Object | |
if (obj.constructor && | |
!core_hasOwn.call(obj, "constructor") && | |
!core_hasOwn.call(obj.constructor.prototype, "isPrototypeOf")) { | |
return false; | |
} | |
} catch ( e ) { | |
// IE8,9 Will throw exceptions on certain host objects #9897 | |
return false; | |
} | |
// Own properties are enumerated firstly, so to speed up, | |
// if last one is own, then all properties are own. | |
var key; | |
for (key in obj) {} | |
return key === undefined || core_hasOwn.call(obj, key); | |
} | |
function isFunction(obj) { | |
return type(obj) === "function"; | |
} | |
var isArray = Array.isArray || function (obj) { | |
return type(obj) === "array"; | |
}; | |
function extend() { | |
// originally extend() was recursive, but this ended up giving us | |
// "call stack exceeded", so it's been unrolled to use a literal stack | |
// (see https://github.com/pouchdb/pouchdb/issues/2543) | |
var stack = []; | |
var i = -1; | |
var len = arguments.length; | |
var args = new Array(len); | |
while (++i < len) { | |
args[i] = arguments[i]; | |
} | |
var container = {}; | |
stack.push({args: args, result: {container: container, key: 'key'}}); | |
var next; | |
while ((next = stack.pop())) { | |
extendInner(stack, next.args, next.result); | |
} | |
return container.key; | |
} | |
function extendInner(stack, args, result) { | |
var options, name, src, copy, copyIsArray, clone, | |
target = args[0] || {}, | |
i = 1, | |
length = args.length, | |
deep = false, | |
numericStringRegex = /\d+/, | |
optionsIsArray; | |
// Handle a deep copy situation | |
if (typeof target === "boolean") { | |
deep = target; | |
target = args[1] || {}; | |
// skip the boolean and the target | |
i = 2; | |
} | |
// Handle case when target is a string or something (possible in deep copy) | |
if (typeof target !== "object" && !isFunction(target)) { | |
target = {}; | |
} | |
// extend jQuery itself if only one argument is passed | |
if (length === i) { | |
/* jshint validthis: true */ | |
target = this; | |
--i; | |
} | |
for (; i < length; i++) { | |
// Only deal with non-null/undefined values | |
if ((options = args[i]) != null) { | |
optionsIsArray = isArray(options); | |
// Extend the base object | |
for (name in options) { | |
//if (options.hasOwnProperty(name)) { | |
if (!(name in Object.prototype)) { | |
if (optionsIsArray && !numericStringRegex.test(name)) { | |
continue; | |
} | |
src = target[name]; | |
copy = options[name]; | |
// Prevent never-ending loop | |
if (target === copy) { | |
continue; | |
} | |
// Recurse if we're merging plain objects or arrays | |
if (deep && copy && (isPlainObject(copy) || | |
(copyIsArray = isArray(copy)))) { | |
if (copyIsArray) { | |
copyIsArray = false; | |
clone = src && isArray(src) ? src : []; | |
} else { | |
clone = src && isPlainObject(src) ? src : {}; | |
} | |
// Never move original objects, clone them | |
stack.push({ | |
args: [deep, clone, copy], | |
result: { | |
container: target, | |
key: name | |
} | |
}); | |
// Don't bring in undefined values | |
} else if (copy !== undefined) { | |
if (!(isArray(options) && isFunction(copy))) { | |
target[name] = copy; | |
} | |
} | |
} | |
} | |
} | |
} | |
// "Return" the modified object by setting the key | |
// on the given container | |
result.container[result.key] = target; | |
} | |
module.exports = extend; | |
},{}],97:[function(require,module,exports){ | |
/*jshint bitwise:false*/ | |
/*global unescape*/ | |
(function (factory) { | |
if (typeof exports === 'object') { | |
// Node/CommonJS | |
module.exports = factory(); | |
} else if (typeof define === 'function' && define.amd) { | |
// AMD | |
define(factory); | |
} else { | |
// Browser globals (with support for web workers) | |
var glob; | |
try { | |
glob = window; | |
} catch (e) { | |
glob = self; | |
} | |
glob.SparkMD5 = factory(); | |
} | |
}(function (undefined) { | |
'use strict'; | |
//////////////////////////////////////////////////////////////////////////// | |
/* | |
* Fastest md5 implementation around (JKM md5) | |
* Credits: Joseph Myers | |
* | |
* @see http://www.myersdaily.org/joseph/javascript/md5-text.html | |
* @see http://jsperf.com/md5-shootout/7 | |
*/ | |
/* this function is much faster, | |
so if possible we use it. Some IEs | |
are the only ones I know of that | |
need the idiotic second function, | |
generated by an if clause. */ | |
var add32 = function (a, b) { | |
return (a + b) & 0xFFFFFFFF; | |
}, | |
cmn = function (q, a, b, x, s, t) { | |
a = add32(add32(a, q), add32(x, t)); | |
return add32((a << s) | (a >>> (32 - s)), b); | |
}, | |
ff = function (a, b, c, d, x, s, t) { | |
return cmn((b & c) | ((~b) & d), a, b, x, s, t); | |
}, | |
gg = function (a, b, c, d, x, s, t) { | |
return cmn((b & d) | (c & (~d)), a, b, x, s, t); | |
}, | |
hh = function (a, b, c, d, x, s, t) { | |
return cmn(b ^ c ^ d, a, b, x, s, t); | |
}, | |
ii = function (a, b, c, d, x, s, t) { | |
return cmn(c ^ (b | (~d)), a, b, x, s, t); | |
}, | |
md5cycle = function (x, k) { | |
var a = x[0], | |
b = x[1], | |
c = x[2], | |
d = x[3]; | |
a = ff(a, b, c, d, k[0], 7, -680876936); | |
d = ff(d, a, b, c, k[1], 12, -389564586); | |
c = ff(c, d, a, b, k[2], 17, 606105819); | |
b = ff(b, c, d, a, k[3], 22, -1044525330); | |
a = ff(a, b, c, d, k[4], 7, -176418897); | |
d = ff(d, a, b, c, k[5], 12, 1200080426); | |
c = ff(c, d, a, b, k[6], 17, -1473231341); | |
b = ff(b, c, d, a, k[7], 22, -45705983); | |
a = ff(a, b, c, d, k[8], 7, 1770035416); | |
d = ff(d, a, b, c, k[9], 12, -1958414417); | |
c = ff(c, d, a, b, k[10], 17, -42063); | |
b = ff(b, c, d, a, k[11], 22, -1990404162); | |
a = ff(a, b, c, d, k[12], 7, 1804603682); | |
d = ff(d, a, b, c, k[13], 12, -40341101); | |
c = ff(c, d, a, b, k[14], 17, -1502002290); | |
b = ff(b, c, d, a, k[15], 22, 1236535329); | |
a = gg(a, b, c, d, k[1], 5, -165796510); | |
d = gg(d, a, b, c, k[6], 9, -1069501632); | |
c = gg(c, d, a, b, k[11], 14, 643717713); | |
b = gg(b, c, d, a, k[0], 20, -373897302); | |
a = gg(a, b, c, d, k[5], 5, -701558691); | |
d = gg(d, a, b, c, k[10], 9, 38016083); | |
c = gg(c, d, a, b, k[15], 14, -660478335); | |
b = gg(b, c, d, a, k[4], 20, -405537848); | |
a = gg(a, b, c, d, k[9], 5, 568446438); | |
d = gg(d, a, b, c, k[14], 9, -1019803690); | |
c = gg(c, d, a, b, k[3], 14, -187363961); | |
b = gg(b, c, d, a, k[8], 20, 1163531501); | |
a = gg(a, b, c, d, k[13], 5, -1444681467); | |
d = gg(d, a, b, c, k[2], 9, -51403784); | |
c = gg(c, d, a, b, k[7], 14, 1735328473); | |
b = gg(b, c, d, a, k[12], 20, -1926607734); | |
a = hh(a, b, c, d, k[5], 4, -378558); | |
d = hh(d, a, b, c, k[8], 11, -2022574463); | |
c = hh(c, d, a, b, k[11], 16, 1839030562); | |
b = hh(b, c, d, a, k[14], 23, -35309556); | |
a = hh(a, b, c, d, k[1], 4, -1530992060); | |
d = hh(d, a, b, c, k[4], 11, 1272893353); | |
c = hh(c, d, a, b, k[7], 16, -155497632); | |
b = hh(b, c, d, a, k[10], 23, -1094730640); | |
a = hh(a, b, c, d, k[13], 4, 681279174); | |
d = hh(d, a, b, c, k[0], 11, -358537222); | |
c = hh(c, d, a, b, k[3], 16, -722521979); | |
b = hh(b, c, d, a, k[6], 23, 76029189); | |
a = hh(a, b, c, d, k[9], 4, -640364487); | |
d = hh(d, a, b, c, k[12], 11, -421815835); | |
c = hh(c, d, a, b, k[15], 16, 530742520); | |
b = hh(b, c, d, a, k[2], 23, -995338651); | |
a = ii(a, b, c, d, k[0], 6, -198630844); | |
d = ii(d, a, b, c, k[7], 10, 1126891415); | |
c = ii(c, d, a, b, k[14], 15, -1416354905); | |
b = ii(b, c, d, a, k[5], 21, -57434055); | |
a = ii(a, b, c, d, k[12], 6, 1700485571); | |
d = ii(d, a, b, c, k[3], 10, -1894986606); | |
c = ii(c, d, a, b, k[10], 15, -1051523); | |
b = ii(b, c, d, a, k[1], 21, -2054922799); | |
a = ii(a, b, c, d, k[8], 6, 1873313359); | |
d = ii(d, a, b, c, k[15], 10, -30611744); | |
c = ii(c, d, a, b, k[6], 15, -1560198380); | |
b = ii(b, c, d, a, k[13], 21, 1309151649); | |
a = ii(a, b, c, d, k[4], 6, -145523070); | |
d = ii(d, a, b, c, k[11], 10, -1120210379); | |
c = ii(c, d, a, b, k[2], 15, 718787259); | |
b = ii(b, c, d, a, k[9], 21, -343485551); | |
x[0] = add32(a, x[0]); | |
x[1] = add32(b, x[1]); | |
x[2] = add32(c, x[2]); | |
x[3] = add32(d, x[3]); | |
}, | |
/* there needs to be support for Unicode here, | |
* unless we pretend that we can redefine the MD-5 | |
* algorithm for multi-byte characters (perhaps | |
* by adding every four 16-bit characters and | |
* shortening the sum to 32 bits). Otherwise | |
* I suggest performing MD-5 as if every character | |
* was two bytes--e.g., 0040 0025 = @%--but then | |
* how will an ordinary MD-5 sum be matched? | |
* There is no way to standardize text to something | |
* like UTF-8 before transformation; speed cost is | |
* utterly prohibitive. The JavaScript standard | |
* itself needs to look at this: it should start | |
* providing access to strings as preformed UTF-8 | |
* 8-bit unsigned value arrays. | |
*/ | |
md5blk = function (s) { | |
var md5blks = [], | |
i; /* Andy King said do it this way. */ | |
for (i = 0; i < 64; i += 4) { | |
md5blks[i >> 2] = s.charCodeAt(i) + (s.charCodeAt(i + 1) << 8) + (s.charCodeAt(i + 2) << 16) + (s.charCodeAt(i + 3) << 24); | |
} | |
return md5blks; | |
}, | |
md5blk_array = function (a) { | |
var md5blks = [], | |
i; /* Andy King said do it this way. */ | |
for (i = 0; i < 64; i += 4) { | |
md5blks[i >> 2] = a[i] + (a[i + 1] << 8) + (a[i + 2] << 16) + (a[i + 3] << 24); | |
} | |
return md5blks; | |
}, | |
md51 = function (s) { | |
var n = s.length, | |
state = [1732584193, -271733879, -1732584194, 271733878], | |
i, | |
length, | |
tail, | |
tmp, | |
lo, | |
hi; | |
for (i = 64; i <= n; i += 64) { | |
md5cycle(state, md5blk(s.substring(i - 64, i))); | |
} | |
s = s.substring(i - 64); | |
length = s.length; | |
tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; | |
for (i = 0; i < length; i += 1) { | |
tail[i >> 2] |= s.charCodeAt(i) << ((i % 4) << 3); | |
} | |
tail[i >> 2] |= 0x80 << ((i % 4) << 3); | |
if (i > 55) { | |
md5cycle(state, tail); | |
for (i = 0; i < 16; i += 1) { | |
tail[i] = 0; | |
} | |
} | |
// Beware that the final length might not fit in 32 bits so we take care of that | |
tmp = n * 8; | |
tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/); | |
lo = parseInt(tmp[2], 16); | |
hi = parseInt(tmp[1], 16) || 0; | |
tail[14] = lo; | |
tail[15] = hi; | |
md5cycle(state, tail); | |
return state; | |
}, | |
md51_array = function (a) { | |
var n = a.length, | |
state = [1732584193, -271733879, -1732584194, 271733878], | |
i, | |
length, | |
tail, | |
tmp, | |
lo, | |
hi; | |
for (i = 64; i <= n; i += 64) { | |
md5cycle(state, md5blk_array(a.subarray(i - 64, i))); | |
} | |
// Not sure if it is a bug, however IE10 will always produce a sub array of length 1 | |
// containing the last element of the parent array if the sub array specified starts | |
// beyond the length of the parent array - weird. | |
// https://connect.microsoft.com/IE/feedback/details/771452/typed-array-subarray-issue | |
a = (i - 64) < n ? a.subarray(i - 64) : new Uint8Array(0); | |
length = a.length; | |
tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; | |
for (i = 0; i < length; i += 1) { | |
tail[i >> 2] |= a[i] << ((i % 4) << 3); | |
} | |
tail[i >> 2] |= 0x80 << ((i % 4) << 3); | |
if (i > 55) { | |
md5cycle(state, tail); | |
for (i = 0; i < 16; i += 1) { | |
tail[i] = 0; | |
} | |
} | |
// Beware that the final length might not fit in 32 bits so we take care of that | |
tmp = n * 8; | |
tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/); | |
lo = parseInt(tmp[2], 16); | |
hi = parseInt(tmp[1], 16) || 0; | |
tail[14] = lo; | |
tail[15] = hi; | |
md5cycle(state, tail); | |
return state; | |
}, | |
hex_chr = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'], | |
rhex = function (n) { | |
var s = '', | |
j; | |
for (j = 0; j < 4; j += 1) { | |
s += hex_chr[(n >> (j * 8 + 4)) & 0x0F] + hex_chr[(n >> (j * 8)) & 0x0F]; | |
} | |
return s; | |
}, | |
hex = function (x) { | |
var i; | |
for (i = 0; i < x.length; i += 1) { | |
x[i] = rhex(x[i]); | |
} | |
return x.join(''); | |
}, | |
md5 = function (s) { | |
return hex(md51(s)); | |
}, | |
//////////////////////////////////////////////////////////////////////////// | |
/** | |
* SparkMD5 OOP implementation. | |
* | |
* Use this class to perform an incremental md5, otherwise use the | |
* static methods instead. | |
*/ | |
SparkMD5 = function () { | |
// call reset to init the instance | |
this.reset(); | |
}; | |
// In some cases the fast add32 function cannot be used.. | |
if (md5('hello') !== '5d41402abc4b2a76b9719d911017c592') { | |
add32 = function (x, y) { | |
var lsw = (x & 0xFFFF) + (y & 0xFFFF), | |
msw = (x >> 16) + (y >> 16) + (lsw >> 16); | |
return (msw << 16) | (lsw & 0xFFFF); | |
}; | |
} | |
/** | |
* Appends a string. | |
* A conversion will be applied if an utf8 string is detected. | |
* | |
* @param {String} str The string to be appended | |
* | |
* @return {SparkMD5} The instance itself | |
*/ | |
SparkMD5.prototype.append = function (str) { | |
// converts the string to utf8 bytes if necessary | |
if (/[\u0080-\uFFFF]/.test(str)) { | |
str = unescape(encodeURIComponent(str)); | |
} | |
// then append as binary | |
this.appendBinary(str); | |
return this; | |
}; | |
/** | |
* Appends a binary string. | |
* | |
* @param {String} contents The binary string to be appended | |
* | |
* @return {SparkMD5} The instance itself | |
*/ | |
SparkMD5.prototype.appendBinary = function (contents) { | |
this._buff += contents; | |
this._length += contents.length; | |
var length = this._buff.length, | |
i; | |
for (i = 64; i <= length; i += 64) { | |
md5cycle(this._state, md5blk(this._buff.substring(i - 64, i))); | |
} | |
this._buff = this._buff.substr(i - 64); | |
return this; | |
}; | |
/** | |
* Finishes the incremental computation, reseting the internal state and | |
* returning the result. | |
* Use the raw parameter to obtain the raw result instead of the hex one. | |
* | |
* @param {Boolean} raw True to get the raw result, false to get the hex result | |
* | |
* @return {String|Array} The result | |
*/ | |
SparkMD5.prototype.end = function (raw) { | |
var buff = this._buff, | |
length = buff.length, | |
i, | |
tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], | |
ret; | |
for (i = 0; i < length; i += 1) { | |
tail[i >> 2] |= buff.charCodeAt(i) << ((i % 4) << 3); | |
} | |
this._finish(tail, length); | |
ret = !!raw ? this._state : hex(this._state); | |
this.reset(); | |
return ret; | |
}; | |
/** | |
* Finish the final calculation based on the tail. | |
* | |
* @param {Array} tail The tail (will be modified) | |
* @param {Number} length The length of the remaining buffer | |
*/ | |
SparkMD5.prototype._finish = function (tail, length) { | |
var i = length, | |
tmp, | |
lo, | |
hi; | |
tail[i >> 2] |= 0x80 << ((i % 4) << 3); | |
if (i > 55) { | |
md5cycle(this._state, tail); | |
for (i = 0; i < 16; i += 1) { | |
tail[i] = 0; | |
} | |
} | |
// Do the final computation based on the tail and length | |
// Beware that the final length may not fit in 32 bits so we take care of that | |
tmp = this._length * 8; | |
tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/); | |
lo = parseInt(tmp[2], 16); | |
hi = parseInt(tmp[1], 16) || 0; | |
tail[14] = lo; | |
tail[15] = hi; | |
md5cycle(this._state, tail); | |
}; | |
/** | |
* Resets the internal state of the computation. | |
* | |
* @return {SparkMD5} The instance itself | |
*/ | |
SparkMD5.prototype.reset = function () { | |
this._buff = ""; | |
this._length = 0; | |
this._state = [1732584193, -271733879, -1732584194, 271733878]; | |
return this; | |
}; | |
/** | |
* Releases memory used by the incremental buffer and other aditional | |
* resources. If you plan to use the instance again, use reset instead. | |
*/ | |
SparkMD5.prototype.destroy = function () { | |
delete this._state; | |
delete this._buff; | |
delete this._length; | |
}; | |
/** | |
* Performs the md5 hash on a string. | |
* A conversion will be applied if utf8 string is detected. | |
* | |
* @param {String} str The string | |
* @param {Boolean} raw True to get the raw result, false to get the hex result | |
* | |
* @return {String|Array} The result | |
*/ | |
SparkMD5.hash = function (str, raw) { | |
// converts the string to utf8 bytes if necessary | |
if (/[\u0080-\uFFFF]/.test(str)) { | |
str = unescape(encodeURIComponent(str)); | |
} | |
var hash = md51(str); | |
return !!raw ? hash : hex(hash); | |
}; | |
/** | |
* Performs the md5 hash on a binary string. | |
* | |
* @param {String} content The binary string | |
* @param {Boolean} raw True to get the raw result, false to get the hex result | |
* | |
* @return {String|Array} The result | |
*/ | |
SparkMD5.hashBinary = function (content, raw) { | |
var hash = md51(content); | |
return !!raw ? hash : hex(hash); | |
}; | |
/** | |
* SparkMD5 OOP implementation for array buffers. | |
* | |
* Use this class to perform an incremental md5 ONLY for array buffers. | |
*/ | |
SparkMD5.ArrayBuffer = function () { | |
// call reset to init the instance | |
this.reset(); | |
}; | |
//////////////////////////////////////////////////////////////////////////// | |
/** | |
* Appends an array buffer. | |
* | |
* @param {ArrayBuffer} arr The array to be appended | |
* | |
* @return {SparkMD5.ArrayBuffer} The instance itself | |
*/ | |
SparkMD5.ArrayBuffer.prototype.append = function (arr) { | |
// TODO: we could avoid the concatenation here but the algorithm would be more complex | |
// if you find yourself needing extra performance, please make a PR. | |
var buff = this._concatArrayBuffer(this._buff, arr), | |
length = buff.length, | |
i; | |
this._length += arr.byteLength; | |
for (i = 64; i <= length; i += 64) { | |
md5cycle(this._state, md5blk_array(buff.subarray(i - 64, i))); | |
} | |
// Avoids IE10 weirdness (documented above) | |
this._buff = (i - 64) < length ? buff.subarray(i - 64) : new Uint8Array(0); | |
return this; | |
}; | |
/** | |
* Finishes the incremental computation, reseting the internal state and | |
* returning the result. | |
* Use the raw parameter to obtain the raw result instead of the hex one. | |
* | |
* @param {Boolean} raw True to get the raw result, false to get the hex result | |
* | |
* @return {String|Array} The result | |
*/ | |
SparkMD5.ArrayBuffer.prototype.end = function (raw) { | |
var buff = this._buff, | |
length = buff.length, | |
tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], | |
i, | |
ret; | |
for (i = 0; i < length; i += 1) { | |
tail[i >> 2] |= buff[i] << ((i % 4) << 3); | |
} | |
this._finish(tail, length); | |
ret = !!raw ? this._state : hex(this._state); | |
this.reset(); | |
return ret; | |
}; | |
SparkMD5.ArrayBuffer.prototype._finish = SparkMD5.prototype._finish; | |
/** | |
* Resets the internal state of the computation. | |
* | |
* @return {SparkMD5.ArrayBuffer} The instance itself | |
*/ | |
SparkMD5.ArrayBuffer.prototype.reset = function () { | |
this._buff = new Uint8Array(0); | |
this._length = 0; | |
this._state = [1732584193, -271733879, -1732584194, 271733878]; | |
return this; | |
}; | |
/** | |
* Releases memory used by the incremental buffer and other aditional | |
* resources. If you plan to use the instance again, use reset instead. | |
*/ | |
SparkMD5.ArrayBuffer.prototype.destroy = SparkMD5.prototype.destroy; | |
/** | |
* Concats two array buffers, returning a new one. | |
* | |
* @param {ArrayBuffer} first The first array buffer | |
* @param {ArrayBuffer} second The second array buffer | |
* | |
* @return {ArrayBuffer} The new array buffer | |
*/ | |
SparkMD5.ArrayBuffer.prototype._concatArrayBuffer = function (first, second) { | |
var firstLength = first.length, | |
result = new Uint8Array(firstLength + second.byteLength); | |
result.set(first); | |
result.set(new Uint8Array(second), firstLength); | |
return result; | |
}; | |
/** | |
* Performs the md5 hash on an array buffer. | |
* | |
* @param {ArrayBuffer} arr The array buffer | |
* @param {Boolean} raw True to get the raw result, false to get the hex result | |
* | |
* @return {String|Array} The result | |
*/ | |
SparkMD5.ArrayBuffer.hash = function (arr, raw) { | |
var hash = md51_array(new Uint8Array(arr)); | |
return !!raw ? hash : hex(hash); | |
}; | |
return SparkMD5; | |
})); | |
},{}],98:[function(require,module,exports){ | |
arguments[4][61][0].apply(exports,arguments) | |
},{"./_stream_readable":99,"./_stream_writable":101,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"core-util-is":102,"inherits":33}],99:[function(require,module,exports){ | |
module.exports=require(63) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"buffer":20,"core-util-is":102,"events":18,"inherits":33,"isarray":103,"stream":24,"string_decoder/":104}],100:[function(require,module,exports){ | |
arguments[4][64][0].apply(exports,arguments) | |
},{"./_stream_duplex":98,"core-util-is":102,"inherits":33}],101:[function(require,module,exports){ | |
arguments[4][65][0].apply(exports,arguments) | |
},{"./_stream_duplex":98,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"buffer":20,"core-util-is":102,"inherits":33,"stream":24}],102:[function(require,module,exports){ | |
module.exports=require(66) | |
},{"buffer":20}],103:[function(require,module,exports){ | |
module.exports=require(67) | |
},{}],104:[function(require,module,exports){ | |
module.exports=require(68) | |
},{"buffer":20}],105:[function(require,module,exports){ | |
module.exports = require("./lib/_stream_transform.js") | |
},{"./lib/_stream_transform.js":100}],106:[function(require,module,exports){ | |
module.exports=require(40) | |
},{}],107:[function(require,module,exports){ | |
arguments[4][41][0].apply(exports,arguments) | |
},{"./has-keys":106,"object-keys":109}],108:[function(require,module,exports){ | |
var hasOwn = Object.prototype.hasOwnProperty; | |
var toString = Object.prototype.toString; | |
var isFunction = function (fn) { | |
var isFunc = (typeof fn === 'function' && !(fn instanceof RegExp)) || toString.call(fn) === '[object Function]'; | |
if (!isFunc && typeof window !== 'undefined') { | |
isFunc = fn === window.setTimeout || fn === window.alert || fn === window.confirm || fn === window.prompt; | |
} | |
return isFunc; | |
}; | |
module.exports = function forEach(obj, fn) { | |
if (!isFunction(fn)) { | |
throw new TypeError('iterator must be a function'); | |
} | |
var i, k, | |
isString = typeof obj === 'string', | |
l = obj.length, | |
context = arguments.length > 2 ? arguments[2] : null; | |
if (l === +l) { | |
for (i = 0; i < l; i++) { | |
if (context === null) { | |
fn(isString ? obj.charAt(i) : obj[i], i, obj); | |
} else { | |
fn.call(context, isString ? obj.charAt(i) : obj[i], i, obj); | |
} | |
} | |
} else { | |
for (k in obj) { | |
if (hasOwn.call(obj, k)) { | |
if (context === null) { | |
fn(obj[k], k, obj); | |
} else { | |
fn.call(context, obj[k], k, obj); | |
} | |
} | |
} | |
} | |
}; | |
},{}],109:[function(require,module,exports){ | |
arguments[4][42][0].apply(exports,arguments) | |
},{"./shim":111}],110:[function(require,module,exports){ | |
var toString = Object.prototype.toString; | |
module.exports = function isArguments(value) { | |
var str = toString.call(value); | |
var isArguments = str === '[object Arguments]'; | |
if (!isArguments) { | |
isArguments = str !== '[object Array]' | |
&& value !== null | |
&& typeof value === 'object' | |
&& typeof value.length === 'number' | |
&& value.length >= 0 | |
&& toString.call(value.callee) === '[object Function]'; | |
} | |
return isArguments; | |
}; | |
},{}],111:[function(require,module,exports){ | |
(function () { | |
"use strict"; | |
// modified from https://github.com/kriskowal/es5-shim | |
var has = Object.prototype.hasOwnProperty, | |
toString = Object.prototype.toString, | |
forEach = require('./foreach'), | |
isArgs = require('./isArguments'), | |
hasDontEnumBug = !({'toString': null}).propertyIsEnumerable('toString'), | |
hasProtoEnumBug = (function () {}).propertyIsEnumerable('prototype'), | |
dontEnums = [ | |
"toString", | |
"toLocaleString", | |
"valueOf", | |
"hasOwnProperty", | |
"isPrototypeOf", | |
"propertyIsEnumerable", | |
"constructor" | |
], | |
keysShim; | |
keysShim = function keys(object) { | |
var isObject = object !== null && typeof object === 'object', | |
isFunction = toString.call(object) === '[object Function]', | |
isArguments = isArgs(object), | |
theKeys = []; | |
if (!isObject && !isFunction && !isArguments) { | |
throw new TypeError("Object.keys called on a non-object"); | |
} | |
if (isArguments) { | |
forEach(object, function (value) { | |
theKeys.push(value); | |
}); | |
} else { | |
var name, | |
skipProto = hasProtoEnumBug && isFunction; | |
for (name in object) { | |
if (!(skipProto && name === 'prototype') && has.call(object, name)) { | |
theKeys.push(name); | |
} | |
} | |
} | |
if (hasDontEnumBug) { | |
var ctor = object.constructor, | |
skipConstructor = ctor && ctor.prototype === object; | |
forEach(dontEnums, function (dontEnum) { | |
if (!(skipConstructor && dontEnum === 'constructor') && has.call(object, dontEnum)) { | |
theKeys.push(dontEnum); | |
} | |
}); | |
} | |
return theKeys; | |
}; | |
module.exports = keysShim; | |
}()); | |
},{"./foreach":108,"./isArguments":110}],112:[function(require,module,exports){ | |
var Transform = require('readable-stream/transform') | |
, inherits = require('util').inherits | |
, xtend = require('xtend') | |
// a noop _transform function | |
function noop (chunk, enc, callback) { | |
callback(null, chunk) | |
} | |
// create a new export function, used by both the main export and | |
// the .ctor export, contains common logic for dealing with arguments | |
function through2 (construct) { | |
return function (options, transform, flush) { | |
if (typeof options == 'function') { | |
flush = transform | |
transform = options | |
options = {} | |
} | |
if (typeof transform != 'function') | |
transform = noop | |
if (typeof flush != 'function') | |
flush = null | |
return construct(options, transform, flush) | |
} | |
} | |
// main export, just make me a transform stream! | |
module.exports = through2(function (options, transform, flush) { | |
var t2 = new Transform(options) | |
t2._transform = transform | |
if (flush) | |
t2._flush = flush | |
return t2 | |
}) | |
// make me a reusable prototype that I can `new`, or implicitly `new` | |
// with a constructor call | |
module.exports.ctor = through2(function (options, transform, flush) { | |
function Through2 (override) { | |
if (!(this instanceof Through2)) | |
return new Through2(override) | |
this.options = xtend(options, override) | |
Transform.call(this, this.options) | |
} | |
inherits(Through2, Transform) | |
Through2.prototype._transform = transform | |
if (flush) | |
Through2.prototype._flush = flush | |
return Through2 | |
}) | |
module.exports.obj = through2(function (options, transform, flush) { | |
var t2 = new Transform(xtend({ objectMode: true }, options)) | |
t2._transform = transform | |
if (flush) | |
t2._flush = flush | |
return t2 | |
}) | |
},{"readable-stream/transform":105,"util":32,"xtend":107}],113:[function(require,module,exports){ | |
'use strict'; | |
/** | |
* Stringify/parse functions that don't operate | |
* recursively, so they avoid call stack exceeded | |
* errors. | |
*/ | |
exports.stringify = function stringify(input) { | |
var queue = []; | |
queue.push({obj: input}); | |
var res = ''; | |
var next, obj, prefix, val, i, arrayPrefix, keys, k, key, value, objPrefix; | |
while ((next = queue.pop())) { | |
obj = next.obj; | |
prefix = next.prefix || ''; | |
val = next.val || ''; | |
res += prefix; | |
if (val) { | |
res += val; | |
} else if (typeof obj !== 'object') { | |
res += typeof obj === 'undefined' ? null : JSON.stringify(obj); | |
} else if (obj === null) { | |
res += 'null'; | |
} else if (Array.isArray(obj)) { | |
queue.push({val: ']'}); | |
for (i = obj.length - 1; i >= 0; i--) { | |
arrayPrefix = i === 0 ? '' : ','; | |
queue.push({obj: obj[i], prefix: arrayPrefix}); | |
} | |
queue.push({val: '['}); | |
} else { // object | |
keys = []; | |
for (k in obj) { | |
if (obj.hasOwnProperty(k)) { | |
keys.push(k); | |
} | |
} | |
queue.push({val: '}'}); | |
for (i = keys.length - 1; i >= 0; i--) { | |
key = keys[i]; | |
value = obj[key]; | |
objPrefix = (i > 0 ? ',' : ''); | |
objPrefix += JSON.stringify(key) + ':'; | |
queue.push({obj: value, prefix: objPrefix}); | |
} | |
queue.push({val: '{'}); | |
} | |
} | |
return res; | |
}; | |
// Convenience function for the parse function. | |
// This pop function is basically copied from | |
// pouchCollate.parseIndexableString | |
function pop(obj, stack, metaStack) { | |
var lastMetaElement = metaStack[metaStack.length - 1]; | |
if (obj === lastMetaElement.element) { | |
// popping a meta-element, e.g. an object whose value is another object | |
metaStack.pop(); | |
lastMetaElement = metaStack[metaStack.length - 1]; | |
} | |
var element = lastMetaElement.element; | |
var lastElementIndex = lastMetaElement.index; | |
if (Array.isArray(element)) { | |
element.push(obj); | |
} else if (lastElementIndex === stack.length - 2) { // obj with key+value | |
var key = stack.pop(); | |
element[key] = obj; | |
} else { | |
stack.push(obj); // obj with key only | |
} | |
} | |
exports.parse = function (str) { | |
var stack = []; | |
var metaStack = []; // stack for arrays and objects | |
var i = 0; | |
var collationIndex,parsedNum,numChar; | |
var parsedString,lastCh,numConsecutiveSlashes,ch; | |
var arrayElement, objElement; | |
while (true) { | |
collationIndex = str[i++]; | |
if (collationIndex === '}' || | |
collationIndex === ']' || | |
typeof collationIndex === 'undefined') { | |
if (stack.length === 1) { | |
return stack.pop(); | |
} else { | |
pop(stack.pop(), stack, metaStack); | |
continue; | |
} | |
} | |
switch (collationIndex) { | |
case ' ': | |
case '\t': | |
case '\n': | |
case ':': | |
case ',': | |
break; | |
case 'n': | |
i += 3; // 'ull' | |
pop(null, stack, metaStack); | |
break; | |
case 't': | |
i += 3; // 'rue' | |
pop(true, stack, metaStack); | |
break; | |
case 'f': | |
i += 4; // 'alse' | |
pop(false, stack, metaStack); | |
break; | |
case '0': | |
case '1': | |
case '2': | |
case '3': | |
case '4': | |
case '5': | |
case '6': | |
case '7': | |
case '8': | |
case '9': | |
case '-': | |
parsedNum = ''; | |
i--; | |
while (true) { | |
numChar = str[i++]; | |
if (/[\d\.\-e\+]/.test(numChar)) { | |
parsedNum += numChar; | |
} else { | |
i--; | |
break; | |
} | |
} | |
pop(parseFloat(parsedNum), stack, metaStack); | |
break; | |
case '"': | |
parsedString = ''; | |
lastCh = void 0; | |
numConsecutiveSlashes = 0; | |
while (true) { | |
ch = str[i++]; | |
if (ch !== '"' || (lastCh === '\\' && | |
numConsecutiveSlashes % 2 === 1)) { | |
parsedString += ch; | |
lastCh = ch; | |
if (lastCh === '\\') { | |
numConsecutiveSlashes++; | |
} else { | |
numConsecutiveSlashes = 0; | |
} | |
} else { | |
break; | |
} | |
} | |
pop(JSON.parse('"' + parsedString + '"'), stack, metaStack); | |
break; | |
case '[': | |
arrayElement = { element: [], index: stack.length }; | |
stack.push(arrayElement.element); | |
metaStack.push(arrayElement); | |
break; | |
case '{': | |
objElement = { element: {}, index: stack.length }; | |
stack.push(objElement.element); | |
metaStack.push(objElement); | |
break; | |
default: | |
throw new Error( | |
'unexpectedly reached end of input: ' + collationIndex); | |
} | |
} | |
}; | |
},{}]},{},[11]) |
This file has been truncated, but you can view the full file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
require=(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ | |
(function (process,Buffer){ | |
'use strict'; | |
var levelup = require('levelup'); | |
var originalLeveldown = require('leveldown'); | |
var sublevel = require('level-sublevel'); | |
var through = require('through2').obj; | |
var errors = require('../deps/errors'); | |
var merge = require('../merge'); | |
var utils = require('../utils'); | |
var migrate = require('../deps/migrate'); | |
var vuvuzela = require('vuvuzela'); | |
var DOC_STORE = 'document-store'; | |
var BY_SEQ_STORE = 'by-sequence'; | |
var ATTACHMENT_STORE = 'attach-store'; | |
var BINARY_STORE = 'attach-binary-store'; | |
var LOCAL_STORE = 'local-store'; | |
var META_STORE = 'meta-store'; | |
var BATCH_SIZE = 50; | |
// leveldb barks if we try to open a db multiple times | |
// so we cache opened connections here for initstore() | |
var dbStores = new utils.Map(); | |
// store the value of update_seq in the by-sequence store the key name will | |
// never conflict, since the keys in the by-sequence store are integers | |
var UPDATE_SEQ_KEY = '_local_last_update_seq'; | |
var DOC_COUNT_KEY = '_local_doc_count'; | |
var UUID_KEY = '_local_uuid'; | |
var MD5_PREFIX = 'md5-'; | |
var vuvuEncoding = { | |
encode: vuvuzela.stringify, | |
decode: vuvuzela.parse, | |
buffer: false, | |
type: 'cheap-json' | |
}; | |
function LevelPouch(opts, callback) { | |
opts = utils.clone(opts); | |
var api = this; | |
var instanceId; | |
var stores = {}; | |
var db; | |
var name = opts.name; | |
if (typeof opts.createIfMissing === 'undefined') { | |
opts.createIfMissing = true; | |
} | |
var leveldown = opts.db || originalLeveldown; | |
if (typeof leveldown.destroy !== 'function') { | |
leveldown.destroy = function (name, cb) { cb(); }; | |
} | |
var dbStore; | |
if (dbStores.has(leveldown.name)) { | |
dbStore = dbStores.get(leveldown.name); | |
} else { | |
dbStore = new utils.Map(); | |
dbStores.set(leveldown.name, dbStore); | |
} | |
if (dbStore.has(name)) { | |
db = dbStore.get(name); | |
afterDBCreated(); | |
} else { | |
dbStore.set(name, sublevel(levelup(name, opts, function (err) { | |
if (err) { | |
dbStore["delete"](name); | |
return callback(err); | |
} | |
db = dbStore.get(name); | |
db._locks = db._locks || new utils.Set(); | |
db._docCountQueue = { | |
queue : [], | |
running : false, | |
docCount : -1 | |
}; | |
if (opts.db || opts.noMigrate) { | |
afterDBCreated(); | |
} else { | |
migrate.toSublevel(name, db, afterDBCreated); | |
} | |
}))); | |
} | |
function afterDBCreated() { | |
stores.docStore = db.sublevel(DOC_STORE, {valueEncoding: vuvuEncoding}); | |
stores.bySeqStore = db.sublevel(BY_SEQ_STORE, {valueEncoding: 'json'}); | |
stores.attachmentStore = | |
db.sublevel(ATTACHMENT_STORE, {valueEncoding: 'json'}); | |
stores.binaryStore = db.sublevel(BINARY_STORE, {valueEncoding: 'binary'}); | |
stores.localStore = db.sublevel(LOCAL_STORE, {valueEncoding: 'json'}); | |
stores.metaStore = db.sublevel(META_STORE, {valueEncoding: 'json'}); | |
migrate.localAndMetaStores(db, stores, function () { | |
stores.metaStore.get(UPDATE_SEQ_KEY, function (err, value) { | |
if (typeof db._updateSeq === 'undefined') { | |
db._updateSeq = value || 0; | |
} | |
stores.metaStore.get(DOC_COUNT_KEY, function (err, value) { | |
db._docCountQueue.docCount = !err ? value : 0; | |
countDocs(function (err) { // notify queue that the docCount is ready | |
if (err) { | |
api.emit('error', err); | |
} | |
stores.metaStore.get(UUID_KEY, function (err, value) { | |
instanceId = !err ? value : utils.uuid(); | |
stores.metaStore.put(UUID_KEY, instanceId, function (err, value) { | |
process.nextTick(function () { | |
callback(null, api); | |
}); | |
}); | |
}); | |
}); | |
}); | |
}); | |
}); | |
} | |
function countDocs(callback) { | |
if (db._docCountQueue.running || !db._docCountQueue.queue.length || | |
db._docCountQueue.docCount === -1) { | |
return incrementDocCount(0, callback); // wait for fresh data | |
} | |
return db._docCountQueue.docCount; // use cached value | |
} | |
function applyNextDocCountDelta() { | |
if (db._docCountQueue.running || !db._docCountQueue.queue.length || | |
db._docCountQueue.docCount === -1) { | |
return; | |
} | |
db._docCountQueue.running = true; | |
var item = db._docCountQueue.queue.shift(); | |
if (db.isClosed()) { | |
return item.callback(new Error('database is closed')); | |
} | |
stores.metaStore.get(DOC_COUNT_KEY, function (err, docCount) { | |
docCount = !err ? docCount : 0; | |
function complete(err) { | |
db._docCountQueue.docCount = docCount; | |
item.callback(err, docCount); | |
db._docCountQueue.running = false; | |
applyNextDocCountDelta(); | |
} | |
if (item.delta === 0) { | |
complete(); | |
} else { | |
stores.metaStore.put(DOC_COUNT_KEY, docCount + item.delta, complete); | |
} | |
}); | |
} | |
function incrementDocCount(delta, callback) { | |
db._docCountQueue.queue.push({delta : delta, callback : callback}); | |
applyNextDocCountDelta(); | |
} | |
api.type = function () { | |
return 'leveldb'; | |
}; | |
api._id = function (callback) { | |
callback(null, instanceId); | |
}; | |
api._info = function (callback) { | |
countDocs(function (err, docCount) { | |
if (err) { | |
return callback(err); | |
} | |
stores.metaStore.get(UPDATE_SEQ_KEY, function (err, otherUpdateSeq) { | |
if (err) { | |
otherUpdateSeq = db._updateSeq; | |
} | |
return callback(null, { | |
doc_count: docCount, | |
update_seq: otherUpdateSeq | |
}); | |
}); | |
}); | |
}; | |
function formatSeq(n) { | |
return ('0000000000000000' + n).slice(-16); | |
} | |
function parseSeq(s) { | |
return parseInt(s, 10); | |
} | |
function makeDoc(rawDoc, callback) { | |
var doc = rawDoc.data; | |
doc._id = rawDoc.metadata.id; | |
if ('_rev' in doc) { | |
if (doc._rev !== rawDoc.metadata.rev) { | |
return callback(new Error('wrong doc returned')); | |
} | |
} else { | |
// we didn't always store rev | |
doc._rev = rawDoc.metadata.rev; | |
} | |
callback(null, {doc: doc, metadata: rawDoc.metadata}); | |
} | |
api._get = function (id, opts, callback) { | |
opts = utils.clone(opts); | |
var docChanged = []; | |
function didDocChange(doc) { | |
docChanged.push(doc); | |
} | |
db.on('pouchdb-id-' + id, didDocChange); | |
stores.docStore.get(id, function (err, metadata) { | |
db.removeListener('pouchdb-id-' + id, didDocChange); | |
if (err || !metadata) { | |
return callback(errors.MISSING_DOC); | |
} | |
if (utils.isDeleted(metadata) && !opts.rev) { | |
return callback(errors.error(errors.MISSING_DOC, "deleted")); | |
} | |
var updated; | |
function ifUpdate(doc) { | |
updated = doc; | |
} | |
var rev = merge.winningRev(metadata); | |
rev = opts.rev ? opts.rev : rev; | |
var seq = metadata.rev_map[rev]; | |
var anyChanged = docChanged.filter(function (doc) { | |
return doc.metadata.seq === seq; | |
}); | |
if (anyChanged.length) { | |
return makeDoc(anyChanged.pop(), callback); | |
} | |
db.on('pouchdb-' + seq, ifUpdate); | |
stores.bySeqStore.get(formatSeq(seq), function (err, doc) { | |
db.removeListener('pouchdb-' + seq, ifUpdate); | |
if (updated) { | |
return makeDoc(updated, callback); | |
} | |
if (!doc) { | |
return callback(errors.MISSING_DOC); | |
} | |
if ('_id' in doc && doc._id !== metadata.id) { | |
// this failing implies something very wrong | |
return callback(new Error('wrong doc returned')); | |
} | |
doc._id = metadata.id; | |
if ('_rev' in doc) { | |
if (doc._rev !== rev) { | |
// this failing implies something very wrong | |
return callback(new Error('wrong doc returned')); | |
} | |
} else { | |
// we didn't always store this | |
doc._rev = rev; | |
} | |
return callback(null, {doc: doc, metadata: metadata}); | |
}); | |
}); | |
}; | |
// not technically part of the spec, but if putAttachment has its own | |
// method... | |
api._getAttachment = function (attachment, opts, callback) { | |
var digest = attachment.digest; | |
stores.binaryStore.get(digest, function (err, attach) { | |
var data; | |
if (err && err.name === 'NotFoundError') { | |
// Empty attachment | |
data = opts.encode ? '' : process.browser ? | |
utils.createBlob([''], {type: attachment.content_type}) : | |
new Buffer(''); | |
return callback(null, data); | |
} | |
if (err) { | |
return callback(err); | |
} | |
if (process.browser) { | |
if (opts.encode) { | |
data = utils.btoa(attach); | |
} else { | |
data = utils.createBlob([utils.fixBinary(attach)], | |
{type: attachment.content_type}); | |
} | |
} else { | |
data = opts.encode ? utils.btoa(attach) : attach; | |
} | |
callback(null, data); | |
}); | |
}; | |
api.lock = function (id) { | |
if (db._locks.has(id)) { | |
return false; | |
} else { | |
db._locks.add(id); | |
return true; | |
} | |
}; | |
api.unlock = function (id) { | |
if (db._locks.has(id)) { | |
db._locks["delete"](id); | |
return true; | |
} | |
return false; | |
}; | |
api._bulkDocs = function (req, opts, callback) { | |
var newEdits = opts.new_edits; | |
var results = new Array(req.docs.length); | |
// parse the docs and give each a sequence number | |
var userDocs = req.docs; | |
var info = userDocs.map(function (doc, i) { | |
if (doc._id && utils.isLocalId(doc._id)) { | |
return doc; | |
} | |
var newDoc = utils.parseDoc(doc, newEdits); | |
newDoc._bulk_seq = i; | |
if (newDoc.metadata && !newDoc.metadata.rev_map) { | |
newDoc.metadata.rev_map = {}; | |
} | |
return newDoc; | |
}); | |
var current = 0; | |
var infoErrors = info.filter(function (doc) { | |
return doc.error; | |
}); | |
if (infoErrors.length) { | |
return callback(infoErrors[0]); | |
} | |
var inProgress = 0; | |
function processDocs() { | |
var index = current; | |
if (inProgress > BATCH_SIZE) { | |
return; | |
} | |
if (index >= info.length) { | |
if (inProgress === 0) { | |
return complete(); | |
} else { | |
return; | |
} | |
} | |
var currentDoc = info[index]; | |
current++; | |
inProgress++; | |
if (currentDoc._id && utils.isLocalId(currentDoc._id)) { | |
api[currentDoc._deleted ? '_removeLocal' : '_putLocal']( | |
currentDoc, function (err, resp) { | |
if (err) { | |
results[index] = err; | |
} else { | |
results[index] = {}; | |
} | |
inProgress--; | |
processDocs(); | |
}); | |
return; | |
} | |
if (!api.lock(currentDoc.metadata.id)) { | |
results[index] = makeErr(errors.REV_CONFLICT, | |
'someobody else is accessing this'); | |
inProgress--; | |
return processDocs(); | |
} | |
stores.docStore.get(currentDoc.metadata.id, function (err, oldDoc) { | |
if (err) { | |
if (err.name === 'NotFoundError') { | |
insertDoc(currentDoc, index, function () { | |
api.unlock(currentDoc.metadata.id); | |
inProgress--; | |
processDocs(); | |
}); | |
} else { | |
err.error = true; | |
results[index] = err; | |
api.unlock(currentDoc.metadata.id); | |
inProgress--; | |
processDocs(); | |
} | |
} else { | |
updateDoc(oldDoc, currentDoc, index, function () { | |
api.unlock(currentDoc.metadata.id); | |
inProgress--; | |
processDocs(); | |
}); | |
} | |
}); | |
if (newEdits) { | |
processDocs(); | |
} | |
} | |
function insertDoc(doc, index, callback) { | |
// Can't insert new deleted documents | |
if ('was_delete' in opts && utils.isDeleted(doc.metadata)) { | |
results[index] = makeErr(errors.MISSING_DOC, doc._bulk_seq); | |
return callback(); | |
} | |
writeDoc(doc, index, function (err) { | |
if (err) { | |
return callback(err); | |
} | |
if (utils.isDeleted(doc.metadata)) { | |
return callback(); | |
} | |
incrementDocCount(1, callback); | |
}); | |
} | |
function updateDoc(oldDoc, docInfo, index, callback) { | |
var merged = | |
merge.merge(oldDoc.rev_tree, docInfo.metadata.rev_tree[0], 1000); | |
var conflict = (utils.isDeleted(oldDoc) && | |
utils.isDeleted(docInfo.metadata) && | |
newEdits) || | |
(!utils.isDeleted(oldDoc) && | |
newEdits && merged.conflicts !== 'new_leaf'); | |
if (conflict) { | |
results[index] = makeErr(errors.REV_CONFLICT, docInfo._bulk_seq); | |
return callback(); | |
} | |
docInfo.metadata.rev_tree = merged.tree; | |
docInfo.metadata.rev_map = oldDoc.rev_map; | |
var delta = 0; | |
var oldDeleted = utils.isDeleted(oldDoc); | |
var newDeleted = utils.isDeleted(docInfo.metadata); | |
delta = (oldDeleted === newDeleted) ? 0 : | |
oldDeleted < newDeleted ? -1 : 1; | |
incrementDocCount(delta, function (err) { | |
if (err) { | |
return callback(err); | |
} | |
writeDoc(docInfo, index, callback); | |
}); | |
} | |
function writeDoc(doc, index, callback2) { | |
var err = null; | |
var recv = 0; | |
doc.data._id = doc.metadata.id; | |
doc.data._rev = doc.metadata.rev; | |
if (utils.isDeleted(doc.metadata)) { | |
doc.data._deleted = true; | |
} | |
var attachments = doc.data._attachments ? | |
Object.keys(doc.data._attachments) : | |
[]; | |
function collectResults(attachmentErr) { | |
if (!err) { | |
if (attachmentErr) { | |
err = attachmentErr; | |
callback2(err); | |
} else if (recv === attachments.length) { | |
finish(); | |
} | |
} | |
} | |
function attachmentSaved(err) { | |
recv++; | |
collectResults(err); | |
} | |
function onMD5Load(doc, key, data, attachmentSaved) { | |
return function (result) { | |
saveAttachment(doc, MD5_PREFIX + result, key, data, attachmentSaved); | |
}; | |
} | |
function onLoadEnd(doc, key, attachmentSaved) { | |
return function (e) { | |
var data = utils.arrayBufferToBinaryString(e.target.result); | |
utils.MD5(data).then( | |
onMD5Load(doc, key, data, attachmentSaved) | |
); | |
}; | |
} | |
for (var i = 0; i < attachments.length; i++) { | |
var key = attachments[i]; | |
if (doc.data._attachments[key].stub) { | |
recv++; | |
collectResults(); | |
continue; | |
} | |
var att = doc.data._attachments[key]; | |
var data; | |
if (typeof att.data === 'string') { | |
try { | |
data = utils.atob(att.data); | |
} catch (e) { | |
callback(utils.extend({}, errors.BAD_ARG, | |
{reason: "Attachments need to be base64 encoded"})); | |
return; | |
} | |
} else if (!process.browser) { | |
data = att.data; | |
} else { // browser | |
var reader = new FileReader(); | |
reader.onloadend = onLoadEnd(doc, key, attachmentSaved); | |
reader.readAsArrayBuffer(att.data); | |
continue; | |
} | |
utils.MD5(data).then( | |
onMD5Load(doc, key, data, attachmentSaved) | |
); | |
} | |
function finish() { | |
var seq = doc.metadata.rev_map[doc.metadata.rev]; | |
if (!seq) { | |
// check that there aren't any existing revisions with the same | |
// reivision id, else we shouldn't increment updateSeq | |
seq = ++db._updateSeq; | |
} | |
doc.metadata.rev_map[doc.metadata.rev] = doc.metadata.seq = seq; | |
var seqKey = formatSeq(seq); | |
db.emit('pouchdb-id-' + doc.metadata.id, doc); | |
db.emit('pouchdb-' + seqKey, doc); | |
db.batch([{ | |
key: seqKey, | |
value: doc.data, | |
prefix: stores.bySeqStore, | |
type: 'put', | |
valueEncoding: 'json' | |
}, { | |
key: doc.metadata.id, | |
value: doc.metadata, | |
prefix: stores.docStore, | |
type: 'put', | |
valueEncoding: vuvuEncoding | |
}], function (err) { | |
if (!err) { | |
db.emit('pouchdb-id-' + doc.metadata.id, doc); | |
db.emit('pouchdb-' + seqKey, doc); | |
} | |
return stores.metaStore.put(UPDATE_SEQ_KEY, db._updateSeq, | |
function (err) { | |
if (err) { | |
results[index] = err; | |
} else { | |
results[index] = doc; | |
} | |
return callback2(); | |
}); | |
}); | |
} | |
if (!attachments.length) { | |
finish(); | |
} | |
} | |
function saveAttachment(docInfo, digest, key, data, callback) { | |
delete docInfo.data._attachments[key].data; | |
docInfo.data._attachments[key].digest = digest; | |
stores.attachmentStore.get(digest, function (err, oldAtt) { | |
if (err && err.name !== 'NotFoundError') { | |
return callback(err); | |
} | |
var ref = [docInfo.metadata.id, docInfo.metadata.rev].join('@'); | |
var newAtt = {}; | |
if (oldAtt) { | |
if (oldAtt.refs) { | |
// only update references if this attachment already has them | |
// since we cannot migrate old style attachments here without | |
// doing a full db scan for references | |
newAtt.refs = oldAtt.refs; | |
newAtt.refs[ref] = true; | |
} | |
} else { | |
newAtt.refs = {}; | |
newAtt.refs[ref] = true; | |
} | |
stores.attachmentStore.put(digest, newAtt, function (err) { | |
// do not try to store empty attachments | |
if (data.length === 0) { | |
return callback(err); | |
} | |
// doing this in batch causes a test to fail, wtf? | |
stores.binaryStore.put(digest, data, function (err) { | |
callback(err); | |
}); | |
}); | |
}); | |
} | |
function complete() { | |
results.sort(function (a, b) { return a._bulk_seq - b._bulk_seq; }); | |
var aresults = results.map(function (result) { | |
if (result._bulk_seq) { | |
delete result._bulk_seq; | |
} else if (!Object.keys(result).length) { | |
return { | |
ok: true | |
}; | |
} | |
if (result.error) { | |
return result; | |
} | |
var metadata = result.metadata; | |
var rev = merge.winningRev(metadata); | |
return { | |
ok: true, | |
id: metadata.id, | |
rev: rev | |
}; | |
}); | |
LevelPouch.Changes.notify(name); | |
process.nextTick(function () { callback(null, aresults); }); | |
} | |
function makeErr(err, seq) { | |
err._bulk_seq = seq; | |
return err; | |
} | |
processDocs(); | |
}; | |
api._allDocs = function (opts, callback) { | |
opts = utils.clone(opts); | |
countDocs(function (err, docCount) { | |
if (err) { | |
return callback(err); | |
} | |
var readstreamOpts = {}; | |
var skip = opts.skip || 0; | |
if (opts.startkey) { | |
readstreamOpts.start = opts.startkey; | |
} | |
if (opts.endkey) { | |
readstreamOpts.end = opts.endkey; | |
} | |
if (opts.key) { | |
readstreamOpts.start = readstreamOpts.end = opts.key; | |
} | |
if (opts.descending) { | |
readstreamOpts.reverse = true; | |
// switch start and ends | |
var tmp = readstreamOpts.start; | |
readstreamOpts.start = readstreamOpts.end; | |
readstreamOpts.end = tmp; | |
} | |
var limit; | |
if (typeof opts.limit === 'number') { | |
limit = opts.limit; | |
} else { | |
limit = -1; | |
} | |
if (limit === 0 || | |
('start' in readstreamOpts && 'end' in readstreamOpts && | |
readstreamOpts.start > readstreamOpts.end)) { | |
// should return 0 results when start is greater than end. | |
// normally level would "fix" this for us by reversing the order, | |
// so short-circuit instead | |
return callback(null, { | |
total_rows: docCount, | |
offset: opts.skip, | |
rows: [] | |
}); | |
} | |
var results = []; | |
var docstream = stores.docStore.readStream(readstreamOpts); | |
var throughStream = through(function (entry, _, next) { | |
if (!utils.isDeleted(entry.value)) { | |
if (skip-- > 0) { | |
next(); | |
return; | |
} else if (limit-- === 0) { | |
docstream.unpipe(); | |
docstream.destroy(); | |
next(); | |
return; | |
} | |
} else if (opts.deleted !== 'ok') { | |
next(); | |
return; | |
} | |
function allDocsInner(metadata, data) { | |
var doc = { | |
id: metadata.id, | |
key: metadata.id, | |
value: { | |
rev: merge.winningRev(metadata) | |
} | |
}; | |
if (opts.include_docs) { | |
doc.doc = data; | |
doc.doc._rev = doc.value.rev; | |
if (opts.conflicts) { | |
doc.doc._conflicts = merge.collectConflicts(metadata); | |
} | |
for (var att in doc.doc._attachments) { | |
if (doc.doc._attachments.hasOwnProperty(att)) { | |
doc.doc._attachments[att].stub = true; | |
} | |
} | |
} | |
if (opts.inclusive_end === false && metadata.id === opts.endkey) { | |
return next(); | |
} else if (utils.isDeleted(metadata)) { | |
if (opts.deleted === 'ok') { | |
doc.value.deleted = true; | |
doc.doc = null; | |
} else { | |
return next(); | |
} | |
} | |
results.push(doc); | |
next(); | |
} | |
var metadata = entry.value; | |
if (opts.include_docs) { | |
var seq = metadata.rev_map[merge.winningRev(metadata)]; | |
stores.bySeqStore.get(formatSeq(seq), function (err, data) { | |
allDocsInner(metadata, data); | |
}); | |
} | |
else { | |
allDocsInner(metadata); | |
} | |
}, function (next) { | |
callback(null, { | |
total_rows: docCount, | |
offset: opts.skip, | |
rows: results | |
}); | |
next(); | |
}).on('unpipe', function () { | |
throughStream.end(); | |
}); | |
docstream.on('error', callback); | |
docstream.pipe(throughStream); | |
}); | |
}; | |
api._changes = function (opts) { | |
opts = utils.clone(opts); | |
if (opts.continuous) { | |
var id = name + ':' + utils.uuid(); | |
LevelPouch.Changes.addListener(name, id, api, opts); | |
LevelPouch.Changes.notify(name); | |
return { | |
cancel: function () { | |
LevelPouch.Changes.removeListener(name, id); | |
} | |
}; | |
} | |
var descending = opts.descending; | |
var results = []; | |
var last_seq = 0; | |
var called = 0; | |
var streamOpts = { | |
reverse: descending | |
}; | |
var limit; | |
if ('limit' in opts && opts.limit > 0) { | |
limit = opts.limit; | |
} | |
if (!streamOpts.reverse) { | |
streamOpts.start = formatSeq(opts.since ? opts.since + 1 : 0); | |
} | |
var filter = utils.filterChange(opts); | |
var returnDocs; | |
if ('returnDocs' in opts) { | |
returnDocs = opts.returnDocs; | |
} else { | |
returnDocs = true; | |
} | |
function complete() { | |
opts.done = true; | |
if (returnDocs && opts.limit) { | |
if (opts.limit < results.length) { | |
results.length = opts.limit; | |
} | |
} | |
changeStream.unpipe(throughStream); | |
changeStream.destroy(); | |
if (!opts.continuous && !opts.cancelled) { | |
opts.complete(null, {results: results, last_seq: last_seq}); | |
} | |
} | |
var changeStream = stores.bySeqStore.readStream(streamOpts); | |
var throughStream = through(function (data, _, next) { | |
if (limit && called >= limit) { | |
complete(); | |
return next(); | |
} | |
if (opts.cancelled || opts.done) { | |
return next(); | |
} | |
stores.docStore.get(data.value._id, function (err, metadata) { | |
if (opts.cancelled || opts.done || db.isClosed() || | |
utils.isLocalId(metadata.id)) { | |
return next(); | |
} | |
var doc = data.value; | |
doc._rev = merge.winningRev(metadata); | |
var change = opts.processChange(doc, metadata, opts); | |
change.seq = metadata.seq; | |
if (last_seq < metadata.seq) { | |
last_seq = metadata.seq; | |
} | |
// Ensure duplicated dont overwrite winning rev | |
if (parseSeq(data.key) === metadata.rev_map[change.doc._rev] && | |
filter(change)) { | |
called++; | |
utils.call(opts.onChange, change); | |
if (returnDocs) { | |
results.push(change); | |
} | |
} | |
next(); | |
}); | |
}, function (next) { | |
if (opts.cancelled) { | |
return next(); | |
} | |
if (returnDocs && opts.limit) { | |
if (opts.limit < results.length) { | |
results.length = opts.limit; | |
} | |
} | |
next(); | |
}).on('unpipe', function () { | |
throughStream.end(); | |
complete(); | |
}); | |
changeStream.pipe(throughStream); | |
return { | |
cancel: function () { | |
opts.cancelled = true; | |
complete(); | |
} | |
}; | |
}; | |
api._close = function (callback) { | |
if (db.isClosed()) { | |
return callback(errors.NOT_OPEN); | |
} | |
db.close(function (err) { | |
if (err) { | |
callback(err); | |
} else { | |
dbStore["delete"](name); | |
callback(); | |
} | |
}); | |
}; | |
api._getRevisionTree = function (docId, callback) { | |
stores.docStore.get(docId, function (err, metadata) { | |
if (err) { | |
callback(errors.MISSING_DOC); | |
} else { | |
callback(null, metadata.rev_tree); | |
} | |
}); | |
}; | |
api._doCompaction = function (docId, rev_tree, revs, callback) { | |
stores.docStore.get(docId, function (err, metadata) { | |
if (err) { | |
return callback(err); | |
} | |
var seqs = metadata.rev_map; // map from rev to seq | |
metadata.rev_tree = rev_tree; | |
if (!revs.length) { | |
return callback(); | |
} | |
var batch = []; | |
batch.push({ | |
key: metadata.id, | |
value: metadata, | |
type: 'put', | |
valueEncoding: vuvuEncoding, | |
prefix: stores.docStore | |
}); | |
revs.forEach(function (rev) { | |
var seq = seqs[rev]; | |
if (!seq) { | |
return; | |
} | |
batch.push({ | |
key: formatSeq(seq), | |
type: 'del', | |
prefix: stores.bySeqStore | |
}); | |
}); | |
db.batch(batch, callback); | |
}); | |
}; | |
api._getLocal = function (id, callback) { | |
stores.localStore.get(id, function (err, doc) { | |
if (err) { | |
callback(errors.MISSING_DOC); | |
} else { | |
callback(null, doc); | |
} | |
}); | |
}; | |
api._putLocal = function (doc, callback) { | |
delete doc._revisions; // ignore this, trust the rev | |
var oldRev = doc._rev; | |
var id = doc._id; | |
stores.localStore.get(id, function (err, resp) { | |
if (err) { | |
if (oldRev) { | |
return callback(errors.REV_CONFLICT); | |
} | |
} | |
if (resp && resp._rev !== oldRev) { | |
return callback(errors.REV_CONFLICT); | |
} | |
if (!oldRev) { | |
doc._rev = '0-1'; | |
} else { | |
doc._rev = '0-' + (parseInt(oldRev.split('-')[1], 10) + 1); | |
} | |
stores.localStore.put(id, doc, function (err) { | |
if (err) { | |
return callback(err); | |
} | |
var ret = {ok: true, id: doc._id, rev: doc._rev}; | |
callback(null, ret); | |
}); | |
}); | |
}; | |
api._removeLocal = function (doc, callback) { | |
stores.localStore.get(doc._id, function (err, resp) { | |
if (err) { | |
return callback(err); | |
} | |
if (resp._rev !== doc._rev) { | |
return callback(errors.REV_CONFLICT); | |
} | |
stores.localStore.del(doc._id, function (err) { | |
if (err) { | |
return callback(err); | |
} | |
var ret = {ok: true, id: doc._id, rev: '0-0'}; | |
callback(null, ret); | |
}); | |
}); | |
}; | |
} | |
LevelPouch.valid = function () { | |
return process && !process.browser; | |
}; | |
// close and delete open leveldb stores | |
LevelPouch.destroy = utils.toPromise(function (name, opts, callback) { | |
opts = utils.clone(opts); | |
var leveldown = opts.db || originalLeveldown; | |
function callDestroy(name, cb) { | |
if (typeof leveldown.destroy === 'function') { | |
leveldown.destroy(name, cb); | |
} else { | |
process.nextTick(callback); | |
} | |
} | |
var dbStore; | |
if (dbStores.has(leveldown.name)) { | |
dbStore = dbStores.get(leveldown.name); | |
} else { | |
return callDestroy(name, callback); | |
} | |
if (dbStore.has(name)) { | |
LevelPouch.Changes.removeAllListeners(name); | |
dbStore.get(name).close(function () { | |
dbStore["delete"](name); | |
callDestroy(name, callback); | |
}); | |
} else { | |
callDestroy(name, callback); | |
} | |
}); | |
LevelPouch.use_prefix = false; | |
LevelPouch.Changes = new utils.Changes(); | |
module.exports = LevelPouch; | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),require("buffer").Buffer) | |
},{"../deps/errors":5,"../deps/migrate":"6ZjBAW","../merge":8,"../utils":15,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"buffer":20,"level-sublevel":35,"leveldown":"1XEYOF","levelup":49,"through2":112,"vuvuzela":113}],2:[function(require,module,exports){ | |
"use strict"; | |
var createBlob = require('./blob.js'); | |
var errors = require('./errors'); | |
var utils = require("../utils"); | |
var hasUpload; | |
function ajax(options, adapterCallback) { | |
var requestCompleted = false; | |
var callback = utils.getArguments(function (args) { | |
if (requestCompleted) { | |
return; | |
} | |
adapterCallback.apply(this, args); | |
requestCompleted = true; | |
}); | |
if (typeof options === "function") { | |
callback = options; | |
options = {}; | |
} | |
options = utils.clone(options); | |
var defaultOptions = { | |
method : "GET", | |
headers: {}, | |
json: true, | |
processData: true, | |
timeout: 10000, | |
cache: false | |
}; | |
options = utils.extend(true, defaultOptions, options); | |
// cache-buster, specifically designed to work around IE's aggressive caching | |
// see http://www.dashbay.com/2011/05/internet-explorer-caches-ajax/ | |
if (options.method === 'GET' && !options.cache) { | |
var hasArgs = options.url.indexOf('?') !== -1; | |
options.url += (hasArgs ? '&' : '?') + '_nonce=' + utils.uuid(16); | |
} | |
function onSuccess(obj, resp, cb) { | |
if (!options.binary && !options.json && options.processData && | |
typeof obj !== 'string') { | |
obj = JSON.stringify(obj); | |
} else if (!options.binary && options.json && typeof obj === 'string') { | |
try { | |
obj = JSON.parse(obj); | |
} catch (e) { | |
// Probably a malformed JSON from server | |
return cb(e); | |
} | |
} | |
if (Array.isArray(obj)) { | |
obj = obj.map(function (v) { | |
var obj; | |
if (v.ok) { | |
return v; | |
} else if (v.error && v.error === 'conflict') { | |
obj = errors.REV_CONFLICT; | |
obj.id = v.id; | |
return obj; | |
} else if (v.error && v.error === 'forbidden') { | |
obj = errors.FORBIDDEN; | |
obj.id = v.id; | |
obj.reason = v.reason; | |
return obj; | |
} else if (v.missing) { | |
obj = errors.MISSING_DOC; | |
obj.missing = v.missing; | |
return obj; | |
} else { | |
return v; | |
} | |
}); | |
} | |
cb(null, obj, resp); | |
} | |
function onError(err, cb) { | |
var errParsed, errObj, errType, key; | |
try { | |
errParsed = JSON.parse(err.responseText); | |
//would prefer not to have a try/catch clause | |
for (key in errors) { | |
if (errors.hasOwnProperty(key) && | |
errors[key].name === errParsed.error) { | |
errType = errors[key]; | |
break; | |
} | |
} | |
if (!errType) { | |
errType = errors.UNKNOWN_ERROR; | |
if (err.status) { | |
errType.status = err.status; | |
} | |
if (err.statusText) { | |
err.name = err.statusText; | |
} | |
} | |
errObj = errors.error(errType, errParsed.reason); | |
} catch (e) { | |
for (var key in errors) { | |
if (errors.hasOwnProperty(key) && errors[key].status === err.status) { | |
errType = errors[key]; | |
break; | |
} | |
} | |
if (!errType) { | |
errType = errors.UNKNOWN_ERROR; | |
if (err.status) { | |
errType.status = err.status; | |
} | |
if (err.statusText) { | |
err.name = err.statusText; | |
} | |
} | |
errObj = errors.error(errType); | |
} | |
if (err.withCredentials && err.status === 0) { | |
// apparently this is what we get when the method | |
// is reported as not allowed by CORS. so fudge it | |
errObj.status = 405; | |
errObj.statusText = "Method Not Allowed"; | |
} | |
cb(errObj); | |
} | |
var timer; | |
var xhr; | |
if (options.xhr) { | |
xhr = new options.xhr(); | |
} else { | |
xhr = new XMLHttpRequest(); | |
} | |
xhr.open(options.method, options.url); | |
xhr.withCredentials = true; | |
if (options.json) { | |
options.headers.Accept = 'application/json'; | |
options.headers['Content-Type'] = options.headers['Content-Type'] || | |
'application/json'; | |
if (options.body && | |
options.processData && | |
typeof options.body !== "string") { | |
options.body = JSON.stringify(options.body); | |
} | |
} | |
if (options.binary) { | |
xhr.responseType = 'arraybuffer'; | |
} | |
var createCookie = function (name, value, days) { | |
var expires = ""; | |
if (days) { | |
var date = new Date(); | |
date.setTime(date.getTime() + (days * 24 * 60 * 60 * 1000)); | |
expires = "; expires=" + date.toGMTString(); | |
} | |
document.cookie = name + "=" + value + expires + "; path=/"; | |
}; | |
for (var key in options.headers) { | |
if (key === 'Cookie') { | |
var cookie = options.headers[key].split('='); | |
createCookie(cookie[0], cookie[1], 10); | |
} else { | |
xhr.setRequestHeader(key, options.headers[key]); | |
} | |
} | |
if (!("body" in options)) { | |
options.body = null; | |
} | |
var abortReq = function () { | |
if (requestCompleted) { | |
return; | |
} | |
xhr.abort(); | |
onError(xhr, callback); | |
}; | |
xhr.onreadystatechange = function () { | |
if (xhr.readyState !== 4 || requestCompleted) { | |
return; | |
} | |
clearTimeout(timer); | |
if (xhr.status >= 200 && xhr.status < 300) { | |
var data; | |
if (options.binary) { | |
data = createBlob([xhr.response || ''], { | |
type: xhr.getResponseHeader('Content-Type') | |
}); | |
} else { | |
data = xhr.responseText; | |
} | |
onSuccess(data, xhr, callback); | |
} else { | |
onError(xhr, callback); | |
} | |
}; | |
if (options.timeout > 0) { | |
timer = setTimeout(abortReq, options.timeout); | |
xhr.onprogress = function () { | |
clearTimeout(timer); | |
timer = setTimeout(abortReq, options.timeout); | |
}; | |
if (typeof hasUpload === 'undefined') { | |
// IE throws an error if you try to access it directly | |
hasUpload = Object.keys(xhr).indexOf('upload') !== -1; | |
} | |
if (hasUpload) { // does not exist in ie9 | |
xhr.upload.onprogress = xhr.onprogress; | |
} | |
} | |
if (options.body && (options.body instanceof Blob)) { | |
var reader = new FileReader(); | |
reader.onloadend = function (e) { | |
var binary = ""; | |
var bytes = new Uint8Array(this.result); | |
var length = bytes.byteLength; | |
for (var i = 0; i < length; i++) { | |
binary += String.fromCharCode(bytes[i]); | |
} | |
binary = utils.fixBinary(binary); | |
xhr.send(binary); | |
}; | |
reader.readAsArrayBuffer(options.body); | |
} else { | |
xhr.send(options.body); | |
} | |
return {abort: abortReq}; | |
} | |
module.exports = ajax; | |
},{"../utils":15,"./blob.js":3,"./errors":5}],3:[function(require,module,exports){ | |
(function (global){ | |
"use strict"; | |
//Abstracts constructing a Blob object, so it also works in older | |
//browsers that don't support the native Blob constructor. (i.e. | |
//old QtWebKit versions, at least). | |
function createBlob(parts, properties) { | |
parts = parts || []; | |
properties = properties || {}; | |
try { | |
return new Blob(parts, properties); | |
} catch (e) { | |
if (e.name !== "TypeError") { | |
throw e; | |
} | |
var BlobBuilder = global.BlobBuilder || | |
global.MSBlobBuilder || | |
global.MozBlobBuilder || | |
global.WebKitBlobBuilder; | |
var builder = new BlobBuilder(); | |
for (var i = 0; i < parts.length; i += 1) { | |
builder.append(parts[i]); | |
} | |
return builder.getBlob(properties.type); | |
} | |
} | |
module.exports = createBlob; | |
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{}],4:[function(require,module,exports){ | |
'use strict'; | |
exports.Map = LazyMap; // TODO: use ES6 map | |
exports.Set = LazySet; // TODO: use ES6 set | |
// based on https://github.com/montagejs/collections | |
function LazyMap() { | |
this.store = {}; | |
} | |
LazyMap.prototype.mangle = function (key) { | |
if (typeof key !== "string") { | |
throw new TypeError("key must be a string but Got " + key); | |
} | |
return '$' + key; | |
}; | |
LazyMap.prototype.unmangle = function (key) { | |
return key.substring(1); | |
}; | |
LazyMap.prototype.get = function (key) { | |
var mangled = this.mangle(key); | |
if (mangled in this.store) { | |
return this.store[mangled]; | |
} else { | |
return void 0; | |
} | |
}; | |
LazyMap.prototype.set = function (key, value) { | |
var mangled = this.mangle(key); | |
this.store[mangled] = value; | |
return true; | |
}; | |
LazyMap.prototype.has = function (key) { | |
var mangled = this.mangle(key); | |
return mangled in this.store; | |
}; | |
LazyMap.prototype["delete"] = function (key) { | |
var mangled = this.mangle(key); | |
if (mangled in this.store) { | |
delete this.store[mangled]; | |
return true; | |
} | |
return false; | |
}; | |
LazyMap.prototype.forEach = function (cb) { | |
var self = this; | |
var keys = Object.keys(self.store); | |
keys.forEach(function (key) { | |
var value = self.store[key]; | |
key = self.unmangle(key); | |
cb(value, key); | |
}); | |
}; | |
function LazySet() { | |
this.store = new LazyMap(); | |
} | |
LazySet.prototype.add = function (key) { | |
return this.store.set(key, true); | |
}; | |
LazySet.prototype.has = function (key) { | |
return this.store.has(key); | |
}; | |
LazySet.prototype["delete"] = function (key) { | |
return this.store["delete"](key); | |
}; | |
},{}],5:[function(require,module,exports){ | |
"use strict"; | |
function PouchError(opts) { | |
this.status = opts.status; | |
this.name = opts.error; | |
this.message = opts.reason; | |
this.error = true; | |
} | |
PouchError.prototype__proto__ = Error.prototype; | |
PouchError.prototype.toString = function () { | |
return JSON.stringify({ | |
status: this.status, | |
name: this.name, | |
message: this.message | |
}); | |
}; | |
exports.UNAUTHORIZED = new PouchError({ | |
status: 401, | |
error: 'unauthorized', | |
reason: "Name or password is incorrect." | |
}); | |
exports.MISSING_BULK_DOCS = new PouchError({ | |
status: 400, | |
error: 'bad_request', | |
reason: "Missing JSON list of 'docs'" | |
}); | |
exports.MISSING_DOC = new PouchError({ | |
status: 404, | |
error: 'not_found', | |
reason: 'missing' | |
}); | |
exports.REV_CONFLICT = new PouchError({ | |
status: 409, | |
error: 'conflict', | |
reason: 'Document update conflict' | |
}); | |
exports.INVALID_ID = new PouchError({ | |
status: 400, | |
error: 'invalid_id', | |
reason: '_id field must contain a string' | |
}); | |
exports.MISSING_ID = new PouchError({ | |
status: 412, | |
error: 'missing_id', | |
reason: '_id is required for puts' | |
}); | |
exports.RESERVED_ID = new PouchError({ | |
status: 400, | |
error: 'bad_request', | |
reason: 'Only reserved document ids may start with underscore.' | |
}); | |
exports.NOT_OPEN = new PouchError({ | |
status: 412, | |
error: 'precondition_failed', | |
reason: 'Database not open' | |
}); | |
exports.UNKNOWN_ERROR = new PouchError({ | |
status: 500, | |
error: 'unknown_error', | |
reason: 'Database encountered an unknown error' | |
}); | |
exports.BAD_ARG = new PouchError({ | |
status: 500, | |
error: 'badarg', | |
reason: 'Some query argument is invalid' | |
}); | |
exports.INVALID_REQUEST = new PouchError({ | |
status: 400, | |
error: 'invalid_request', | |
reason: 'Request was invalid' | |
}); | |
exports.QUERY_PARSE_ERROR = new PouchError({ | |
status: 400, | |
error: 'query_parse_error', | |
reason: 'Some query parameter is invalid' | |
}); | |
exports.DOC_VALIDATION = new PouchError({ | |
status: 500, | |
error: 'doc_validation', | |
reason: 'Bad special document member' | |
}); | |
exports.BAD_REQUEST = new PouchError({ | |
status: 400, | |
error: 'bad_request', | |
reason: 'Something wrong with the request' | |
}); | |
exports.NOT_AN_OBJECT = new PouchError({ | |
status: 400, | |
error: 'bad_request', | |
reason: 'Document must be a JSON object' | |
}); | |
exports.DB_MISSING = new PouchError({ | |
status: 404, | |
error: 'not_found', | |
reason: 'Database not found' | |
}); | |
exports.IDB_ERROR = new PouchError({ | |
status: 500, | |
error: 'indexed_db_went_bad', | |
reason: 'unknown' | |
}); | |
exports.WSQ_ERROR = new PouchError({ | |
status: 500, | |
error: 'web_sql_went_bad', | |
reason: 'unknown' | |
}); | |
exports.LDB_ERROR = new PouchError({ | |
status: 500, | |
error: 'levelDB_went_went_bad', | |
reason: 'unknown' | |
}); | |
exports.FORBIDDEN = new PouchError({ | |
status: 403, | |
error: 'forbidden', | |
reason: 'Forbidden by design doc validate_doc_update function' | |
}); | |
exports.error = function (error, reason, name) { | |
function CustomPouchError(msg) { | |
this.message = reason; | |
if (name) { | |
this.name = name; | |
} | |
} | |
CustomPouchError.prototype = error; | |
return new CustomPouchError(reason); | |
}; | |
},{}],6:[function(require,module,exports){ | |
(function (process,global){ | |
'use strict'; | |
var crypto = require('crypto'); | |
var Md5 = require('spark-md5'); | |
var setImmediateShim = global.setImmediate || global.setTimeout; | |
function sliceShim(arrayBuffer, begin, end) { | |
if (typeof arrayBuffer.slice === 'function') { | |
if (!begin) { | |
return arrayBuffer.slice(); | |
} else if (!end) { | |
return arrayBuffer.slice(begin); | |
} else { | |
return arrayBuffer.slice(begin, end); | |
} | |
} | |
// | |
// shim for IE courtesy of http://stackoverflow.com/a/21440217 | |
// | |
//If `begin`/`end` is unspecified, Chrome assumes 0, so we do the same | |
//Chrome also converts the values to integers via flooring | |
begin = Math.floor(begin || 0); | |
end = Math.floor(end || 0); | |
var len = arrayBuffer.byteLength; | |
//If either `begin` or `end` is negative, it refers to an | |
//index from the end of the array, as opposed to from the beginning. | |
//The range specified by the `begin` and `end` values is clamped to the | |
//valid index range for the current array. | |
begin = begin < 0 ? Math.max(begin + len, 0) : Math.min(len, begin); | |
end = end < 0 ? Math.max(end + len, 0) : Math.min(len, end); | |
//If the computed length of the new ArrayBuffer would be negative, it | |
//is clamped to zero. | |
if (end - begin <= 0) { | |
return new ArrayBuffer(0); | |
} | |
var result = new ArrayBuffer(end - begin); | |
var resultBytes = new Uint8Array(result); | |
var sourceBytes = new Uint8Array(arrayBuffer, begin, end - begin); | |
resultBytes.set(sourceBytes); | |
return result; | |
} | |
// convert a 64-bit int to a binary string | |
function intToString(int) { | |
var bytes = [ | |
(int & 0xff), | |
((int >>> 8) & 0xff), | |
((int >>> 16) & 0xff), | |
((int >>> 24) & 0xff) | |
]; | |
return bytes.map(function (byte) { | |
return String.fromCharCode(byte); | |
}).join(''); | |
} | |
// convert an array of 64-bit ints into | |
// a base64-encoded string | |
function rawToBase64(raw) { | |
var res = ''; | |
for (var i = 0; i < raw.length; i++) { | |
res += intToString(raw[i]); | |
} | |
return global.btoa(res); | |
} | |
module.exports = function (data, callback) { | |
if (!process.browser) { | |
var base64 = crypto.createHash('md5').update(data).digest('base64'); | |
callback(null, base64); | |
return; | |
} | |
var inputIsString = typeof data === 'string'; | |
var len = inputIsString ? data.length : data.byteLength; | |
var chunkSize = Math.min(524288, len); | |
var chunks = Math.ceil(len / chunkSize); | |
var currentChunk = 0; | |
var buffer = inputIsString ? new Md5() : new Md5.ArrayBuffer(); | |
function append(buffer, data, start, end) { | |
if (inputIsString) { | |
buffer.appendBinary(data.substring(start, end)); | |
} else { | |
buffer.append(sliceShim(data, start, end)); | |
} | |
} | |
function loadNextChunk() { | |
var start = currentChunk * chunkSize; | |
var end = start + chunkSize; | |
if ((start + chunkSize) >= data.size) { | |
end = data.size; | |
} | |
currentChunk++; | |
if (currentChunk < chunks) { | |
append(buffer, data, start, end); | |
setImmediateShim(loadNextChunk); | |
} else { | |
append(buffer, data, start, end); | |
var raw = buffer.end(true); | |
var base64 = rawToBase64(raw); | |
callback(null, base64); | |
buffer.destroy(); | |
} | |
} | |
loadNextChunk(); | |
}; | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"crypto":17,"spark-md5":97}],7:[function(require,module,exports){ | |
"use strict"; | |
// BEGIN Math.uuid.js | |
/*! | |
Math.uuid.js (v1.4) | |
http://www.broofa.com | |
mailto:[email protected] | |
Copyright (c) 2010 Robert Kieffer | |
Dual licensed under the MIT and GPL licenses. | |
*/ | |
/* | |
* Generate a random uuid. | |
* | |
* USAGE: Math.uuid(length, radix) | |
* length - the desired number of characters | |
* radix - the number of allowable values for each character. | |
* | |
* EXAMPLES: | |
* // No arguments - returns RFC4122, version 4 ID | |
* >>> Math.uuid() | |
* "92329D39-6F5C-4520-ABFC-AAB64544E172" | |
* | |
* // One argument - returns ID of the specified length | |
* >>> Math.uuid(15) // 15 character ID (default base=62) | |
* "VcydxgltxrVZSTV" | |
* | |
* // Two arguments - returns ID of the specified length, and radix. | |
* // (Radix must be <= 62) | |
* >>> Math.uuid(8, 2) // 8 character ID (base=2) | |
* "01001010" | |
* >>> Math.uuid(8, 10) // 8 character ID (base=10) | |
* "47473046" | |
* >>> Math.uuid(8, 16) // 8 character ID (base=16) | |
* "098F4D35" | |
*/ | |
var chars = ( | |
'0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' + | |
'abcdefghijklmnopqrstuvwxyz' | |
).split(''); | |
function getValue(radix) { | |
return 0 | Math.random() * radix; | |
} | |
function uuid(len, radix) { | |
radix = radix || chars.length; | |
var out = ''; | |
var i = -1; | |
if (len) { | |
// Compact form | |
while (++i < len) { | |
out += chars[getValue(radix)]; | |
} | |
return out; | |
} | |
// rfc4122, version 4 form | |
// Fill in random data. At i==19 set the high bits of clock sequence as | |
// per rfc4122, sec. 4.1.5 | |
while (++i < 36) { | |
switch (i) { | |
case 8: | |
case 13: | |
case 18: | |
case 23: | |
out += '-'; | |
break; | |
case 19: | |
out += chars[(getValue(16) & 0x3) | 0x8]; | |
break; | |
default: | |
out += chars[getValue(16)]; | |
} | |
} | |
return out; | |
} | |
module.exports = uuid; | |
},{}],8:[function(require,module,exports){ | |
'use strict'; | |
var extend = require('pouchdb-extend'); | |
// for a better overview of what this is doing, read: | |
// https://github.com/apache/couchdb/blob/master/src/couchdb/couch_key_tree.erl | |
// | |
// But for a quick intro, CouchDB uses a revision tree to store a documents | |
// history, A -> B -> C, when a document has conflicts, that is a branch in the | |
// tree, A -> (B1 | B2 -> C), We store these as a nested array in the format | |
// | |
// KeyTree = [Path ... ] | |
// Path = {pos: position_from_root, ids: Tree} | |
// Tree = [Key, Opts, [Tree, ...]], in particular single node: [Key, []] | |
// Turn a path as a flat array into a tree with a single branch | |
function pathToTree(path) { | |
var doc = path.shift(); | |
var root = [doc.id, doc.opts, []]; | |
var leaf = root; | |
var nleaf; | |
while (path.length) { | |
doc = path.shift(); | |
nleaf = [doc.id, doc.opts, []]; | |
leaf[2].push(nleaf); | |
leaf = nleaf; | |
} | |
return root; | |
} | |
// Merge two trees together | |
// The roots of tree1 and tree2 must be the same revision | |
function mergeTree(in_tree1, in_tree2) { | |
var queue = [{tree1: in_tree1, tree2: in_tree2}]; | |
var conflicts = false; | |
while (queue.length > 0) { | |
var item = queue.pop(); | |
var tree1 = item.tree1; | |
var tree2 = item.tree2; | |
if (tree1[1].status || tree2[1].status) { | |
tree1[1].status = | |
(tree1[1].status === 'available' || | |
tree2[1].status === 'available') ? 'available' : 'missing'; | |
} | |
for (var i = 0; i < tree2[2].length; i++) { | |
if (!tree1[2][0]) { | |
conflicts = 'new_leaf'; | |
tree1[2][0] = tree2[2][i]; | |
continue; | |
} | |
var merged = false; | |
for (var j = 0; j < tree1[2].length; j++) { | |
if (tree1[2][j][0] === tree2[2][i][0]) { | |
queue.push({tree1: tree1[2][j], tree2: tree2[2][i]}); | |
merged = true; | |
} | |
} | |
if (!merged) { | |
conflicts = 'new_branch'; | |
tree1[2].push(tree2[2][i]); | |
tree1[2].sort(); | |
} | |
} | |
} | |
return {conflicts: conflicts, tree: in_tree1}; | |
} | |
function doMerge(tree, path, dontExpand) { | |
var restree = []; | |
var conflicts = false; | |
var merged = false; | |
var res; | |
if (!tree.length) { | |
return {tree: [path], conflicts: 'new_leaf'}; | |
} | |
tree.forEach(function (branch) { | |
if (branch.pos === path.pos && branch.ids[0] === path.ids[0]) { | |
// Paths start at the same position and have the same root, so they need | |
// merged | |
res = mergeTree(branch.ids, path.ids); | |
restree.push({pos: branch.pos, ids: res.tree}); | |
conflicts = conflicts || res.conflicts; | |
merged = true; | |
} else if (dontExpand !== true) { | |
// The paths start at a different position, take the earliest path and | |
// traverse up until it as at the same point from root as the path we | |
// want to merge. If the keys match we return the longer path with the | |
// other merged After stemming we dont want to expand the trees | |
var t1 = branch.pos < path.pos ? branch : path; | |
var t2 = branch.pos < path.pos ? path : branch; | |
var diff = t2.pos - t1.pos; | |
var candidateParents = []; | |
var trees = []; | |
trees.push({ids: t1.ids, diff: diff, parent: null, parentIdx: null}); | |
while (trees.length > 0) { | |
var item = trees.pop(); | |
if (item.diff === 0) { | |
if (item.ids[0] === t2.ids[0]) { | |
candidateParents.push(item); | |
} | |
continue; | |
} | |
if (!item.ids) { | |
continue; | |
} | |
/*jshint loopfunc:true */ | |
item.ids[2].forEach(function (el, idx) { | |
trees.push( | |
{ids: el, diff: item.diff - 1, parent: item.ids, parentIdx: idx}); | |
}); | |
} | |
var el = candidateParents[0]; | |
if (!el) { | |
restree.push(branch); | |
} else { | |
res = mergeTree(el.ids, t2.ids); | |
el.parent[2][el.parentIdx] = res.tree; | |
restree.push({pos: t1.pos, ids: t1.ids}); | |
conflicts = conflicts || res.conflicts; | |
merged = true; | |
} | |
} else { | |
restree.push(branch); | |
} | |
}); | |
// We didnt find | |
if (!merged) { | |
restree.push(path); | |
} | |
restree.sort(function (a, b) { | |
return a.pos - b.pos; | |
}); | |
return { | |
tree: restree, | |
conflicts: conflicts || 'internal_node' | |
}; | |
} | |
// To ensure we dont grow the revision tree infinitely, we stem old revisions | |
function stem(tree, depth) { | |
// First we break out the tree into a complete list of root to leaf paths, | |
// we cut off the start of the path and generate a new set of flat trees | |
var stemmedPaths = PouchMerge.rootToLeaf(tree).map(function (path) { | |
var stemmed = path.ids.slice(-depth); | |
return { | |
pos: path.pos + (path.ids.length - stemmed.length), | |
ids: pathToTree(stemmed) | |
}; | |
}); | |
// Then we remerge all those flat trees together, ensuring that we dont | |
// connect trees that would go beyond the depth limit | |
return stemmedPaths.reduce(function (prev, current, i, arr) { | |
return doMerge(prev, current, true).tree; | |
}, [stemmedPaths.shift()]); | |
} | |
var PouchMerge = {}; | |
PouchMerge.merge = function (tree, path, depth) { | |
// Ugh, nicer way to not modify arguments in place? | |
tree = extend(true, [], tree); | |
path = extend(true, {}, path); | |
var newTree = doMerge(tree, path); | |
return { | |
tree: stem(newTree.tree, depth), | |
conflicts: newTree.conflicts | |
}; | |
}; | |
// We fetch all leafs of the revision tree, and sort them based on tree length | |
// and whether they were deleted, undeleted documents with the longest revision | |
// tree (most edits) win | |
// The final sort algorithm is slightly documented in a sidebar here: | |
// http://guide.couchdb.org/draft/conflicts.html | |
PouchMerge.winningRev = function (metadata) { | |
var leafs = []; | |
PouchMerge.traverseRevTree(metadata.rev_tree, | |
function (isLeaf, pos, id, something, opts) { | |
if (isLeaf) { | |
leafs.push({pos: pos, id: id, deleted: !!opts.deleted}); | |
} | |
}); | |
leafs.sort(function (a, b) { | |
if (a.deleted !== b.deleted) { | |
return a.deleted > b.deleted ? 1 : -1; | |
} | |
if (a.pos !== b.pos) { | |
return b.pos - a.pos; | |
} | |
return a.id < b.id ? 1 : -1; | |
}); | |
return leafs[0].pos + '-' + leafs[0].id; | |
}; | |
// Pretty much all below can be combined into a higher order function to | |
// traverse revisions | |
// The return value from the callback will be passed as context to all | |
// children of that node | |
PouchMerge.traverseRevTree = function (revs, callback) { | |
var toVisit = revs.slice(); | |
var node; | |
while ((node = toVisit.pop())) { | |
var pos = node.pos; | |
var tree = node.ids; | |
var branches = tree[2]; | |
var newCtx = | |
callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]); | |
for (var i = 0, len = branches.length; i < len; i++) { | |
toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx}); | |
} | |
} | |
}; | |
PouchMerge.collectLeaves = function (revs) { | |
var leaves = []; | |
PouchMerge.traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) { | |
if (isLeaf) { | |
leaves.unshift({rev: pos + "-" + id, pos: pos, opts: opts}); | |
} | |
}); | |
leaves.sort(function (a, b) { | |
return b.pos - a.pos; | |
}); | |
leaves.map(function (leaf) { delete leaf.pos; }); | |
return leaves; | |
}; | |
// returns revs of all conflicts that is leaves such that | |
// 1. are not deleted and | |
// 2. are different than winning revision | |
PouchMerge.collectConflicts = function (metadata) { | |
var win = PouchMerge.winningRev(metadata); | |
var leaves = PouchMerge.collectLeaves(metadata.rev_tree); | |
var conflicts = []; | |
leaves.forEach(function (leaf) { | |
if (leaf.rev !== win && !leaf.opts.deleted) { | |
conflicts.push(leaf.rev); | |
} | |
}); | |
return conflicts; | |
}; | |
PouchMerge.rootToLeaf = function (tree) { | |
var paths = []; | |
PouchMerge.traverseRevTree(tree, function (isLeaf, pos, id, history, opts) { | |
history = history ? history.slice(0) : []; | |
history.push({id: id, opts: opts}); | |
if (isLeaf) { | |
var rootPos = pos + 1 - history.length; | |
paths.unshift({pos: rootPos, ids: history}); | |
} | |
return history; | |
}); | |
return paths; | |
}; | |
module.exports = PouchMerge; | |
},{"pouchdb-extend":96}],"adapter-config":[function(require,module,exports){ | |
module.exports=require('GfW1tc'); | |
},{}],"GfW1tc":[function(require,module,exports){ | |
'use strict'; | |
module.exports = { | |
name: 'memory', | |
valid: function () { | |
return true; | |
}, | |
use_prefix: false | |
}; | |
},{}],11:[function(require,module,exports){ | |
"use strict"; | |
var adapterConfig = require('adapter-config'); | |
var adapterName = adapterConfig.name; | |
var adapter = require('./levelalt'); | |
window.PouchDB.adapter(adapterName, adapter); | |
window.PouchDB.preferredAdapters.push(adapterName); | |
},{"./levelalt":12,"adapter-config":"GfW1tc"}],12:[function(require,module,exports){ | |
'use strict'; | |
var LevelPouch = require('../adapters/leveldb'); | |
var leveldown = require('leveldown'); | |
var adapterConfig = require('adapter-config'); | |
var utils = require('../utils'); | |
function LevelPouchAlt(opts, callback) { | |
var _opts = utils.extend({ | |
db: leveldown | |
}, opts); | |
LevelPouch.call(this, _opts, callback); | |
} | |
// overrides for normal LevelDB behavior on Node | |
LevelPouchAlt.valid = function () { | |
return adapterConfig.valid(); | |
}; | |
LevelPouchAlt.use_prefix = adapterConfig.use_prefix; | |
LevelPouchAlt.destroy = utils.toPromise(function (name, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
var _opts = utils.extend({ | |
db: leveldown | |
}, opts); | |
return LevelPouch.destroy(name, _opts, callback); | |
}); | |
module.exports = LevelPouchAlt; | |
},{"../adapters/leveldb":1,"../utils":15,"adapter-config":"GfW1tc","leveldown":"1XEYOF"}],"../adapters/../deps/migrate":[function(require,module,exports){ | |
module.exports=require('6ZjBAW'); | |
},{}],"6ZjBAW":[function(require,module,exports){ | |
(function (process){ | |
'use strict'; | |
// LevelAlt doesn't need the pre-2.2.0 LevelDB-specific migrations | |
exports.toSublevel = function (name, db, callback) { | |
process.nextTick(function () { | |
callback(); | |
}); | |
}; | |
exports.localAndMetaStores = function (db, stores, callback) { | |
process.nextTick(function () { | |
callback(); | |
}); | |
}; | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19}],15:[function(require,module,exports){ | |
(function (process,global){ | |
/*jshint strict: false */ | |
/*global chrome */ | |
var merge = require('./merge'); | |
exports.extend = require('pouchdb-extend'); | |
exports.ajax = require('./deps/ajax'); | |
exports.createBlob = require('./deps/blob'); | |
exports.uuid = require('./deps/uuid'); | |
exports.getArguments = require('argsarray'); | |
var buffer = require('./deps/buffer'); | |
var errors = require('./deps/errors'); | |
var EventEmitter = require('events').EventEmitter; | |
var collections = require('./deps/collections'); | |
exports.Map = collections.Map; | |
exports.Set = collections.Set; | |
if (typeof global.Promise === 'function') { | |
exports.Promise = global.Promise; | |
} else { | |
exports.Promise = require('bluebird'); | |
} | |
var Promise = exports.Promise; | |
function toObject(array) { | |
var obj = {}; | |
array.forEach(function (item) { obj[item] = true; }); | |
return obj; | |
} | |
// List of top level reserved words for doc | |
var reservedWords = toObject([ | |
'_id', | |
'_rev', | |
'_attachments', | |
'_deleted', | |
'_revisions', | |
'_revs_info', | |
'_conflicts', | |
'_deleted_conflicts', | |
'_local_seq', | |
'_rev_tree', | |
//replication documents | |
'_replication_id', | |
'_replication_state', | |
'_replication_state_time', | |
'_replication_state_reason', | |
'_replication_stats' | |
]); | |
// List of reserved words that should end up the document | |
var dataWords = toObject([ | |
'_attachments', | |
//replication documents | |
'_replication_id', | |
'_replication_state', | |
'_replication_state_time', | |
'_replication_state_reason', | |
'_replication_stats' | |
]); | |
exports.clone = function (obj) { | |
return exports.extend(true, {}, obj); | |
}; | |
exports.inherits = require('inherits'); | |
// Determine id an ID is valid | |
// - invalid IDs begin with an underescore that does not begin '_design' or | |
// '_local' | |
// - any other string value is a valid id | |
// Returns the specific error object for each case | |
exports.invalidIdError = function (id) { | |
var err; | |
if (!id) { | |
err = new TypeError(errors.MISSING_ID.message); | |
err.status = 412; | |
} else if (typeof id !== 'string') { | |
err = new TypeError(errors.INVALID_ID.message); | |
err.status = 400; | |
} else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) { | |
err = new TypeError(errors.RESERVED_ID.message); | |
err.status = 400; | |
} | |
if (err) { | |
throw err; | |
} | |
}; | |
function isChromeApp() { | |
return (typeof chrome !== "undefined" && | |
typeof chrome.storage !== "undefined" && | |
typeof chrome.storage.local !== "undefined"); | |
} | |
// Pretty dumb name for a function, just wraps callback calls so we dont | |
// to if (callback) callback() everywhere | |
exports.call = exports.getArguments(function (args) { | |
if (!args.length) { | |
return; | |
} | |
var fun = args.shift(); | |
if (typeof fun === 'function') { | |
fun.apply(this, args); | |
} | |
}); | |
exports.isLocalId = function (id) { | |
return (/^_local/).test(id); | |
}; | |
// check if a specific revision of a doc has been deleted | |
// - metadata: the metadata object from the doc store | |
// - rev: (optional) the revision to check. defaults to winning revision | |
exports.isDeleted = function (metadata, rev) { | |
if (!rev) { | |
rev = merge.winningRev(metadata); | |
} | |
var dashIndex = rev.indexOf('-'); | |
if (dashIndex !== -1) { | |
rev = rev.substring(dashIndex + 1); | |
} | |
var deleted = false; | |
merge.traverseRevTree(metadata.rev_tree, | |
function (isLeaf, pos, id, acc, opts) { | |
if (id === rev) { | |
deleted = !!opts.deleted; | |
} | |
}); | |
return deleted; | |
}; | |
exports.filterChange = function (opts) { | |
return function (change) { | |
var req = {}; | |
var hasFilter = opts.filter && typeof opts.filter === 'function'; | |
req.query = opts.query_params; | |
if (opts.filter && hasFilter && !opts.filter.call(this, change.doc, req)) { | |
return false; | |
} | |
if (opts.doc_ids && opts.doc_ids.indexOf(change.id) === -1) { | |
return false; | |
} | |
if (!opts.include_docs) { | |
delete change.doc; | |
} else { | |
for (var att in change.doc._attachments) { | |
if (change.doc._attachments.hasOwnProperty(att)) { | |
change.doc._attachments[att].stub = true; | |
} | |
} | |
} | |
return true; | |
}; | |
}; | |
// Preprocess documents, parse their revisions, assign an id and a | |
// revision for new writes that are missing them, etc | |
exports.parseDoc = function (doc, newEdits) { | |
var nRevNum; | |
var newRevId; | |
var revInfo; | |
var error; | |
var opts = {status: 'available'}; | |
if (doc._deleted) { | |
opts.deleted = true; | |
} | |
if (newEdits) { | |
if (!doc._id) { | |
doc._id = exports.uuid(); | |
} | |
newRevId = exports.uuid(32, 16).toLowerCase(); | |
if (doc._rev) { | |
revInfo = /^(\d+)-(.+)$/.exec(doc._rev); | |
if (!revInfo) { | |
var err = new TypeError("invalid value for property '_rev'"); | |
err.status = 400; | |
} | |
doc._rev_tree = [{ | |
pos: parseInt(revInfo[1], 10), | |
ids: [revInfo[2], {status: 'missing'}, [[newRevId, opts, []]]] | |
}]; | |
nRevNum = parseInt(revInfo[1], 10) + 1; | |
} else { | |
doc._rev_tree = [{ | |
pos: 1, | |
ids : [newRevId, opts, []] | |
}]; | |
nRevNum = 1; | |
} | |
} else { | |
if (doc._revisions) { | |
doc._rev_tree = [{ | |
pos: doc._revisions.start - doc._revisions.ids.length + 1, | |
ids: doc._revisions.ids.reduce(function (acc, x) { | |
if (acc === null) { | |
return [x, opts, []]; | |
} else { | |
return [x, {status: 'missing'}, [acc]]; | |
} | |
}, null) | |
}]; | |
nRevNum = doc._revisions.start; | |
newRevId = doc._revisions.ids[0]; | |
} | |
if (!doc._rev_tree) { | |
revInfo = /^(\d+)-(.+)$/.exec(doc._rev); | |
if (!revInfo) { | |
error = new TypeError(errors.BAD_ARG.message); | |
error.status = errors.BAD_ARG.status; | |
throw error; | |
} | |
nRevNum = parseInt(revInfo[1], 10); | |
newRevId = revInfo[2]; | |
doc._rev_tree = [{ | |
pos: parseInt(revInfo[1], 10), | |
ids: [revInfo[2], opts, []] | |
}]; | |
} | |
} | |
exports.invalidIdError(doc._id); | |
doc._rev = [nRevNum, newRevId].join('-'); | |
var result = {metadata : {}, data : {}}; | |
for (var key in doc) { | |
if (doc.hasOwnProperty(key)) { | |
var specialKey = key[0] === '_'; | |
if (specialKey && !reservedWords[key]) { | |
error = new Error(errors.DOC_VALIDATION.message + ': ' + key); | |
error.status = errors.DOC_VALIDATION.status; | |
throw error; | |
} else if (specialKey && !dataWords[key]) { | |
result.metadata[key.slice(1)] = doc[key]; | |
} else { | |
result.data[key] = doc[key]; | |
} | |
} | |
} | |
return result; | |
}; | |
exports.isCordova = function () { | |
return (typeof cordova !== "undefined" || | |
typeof PhoneGap !== "undefined" || | |
typeof phonegap !== "undefined"); | |
}; | |
exports.hasLocalStorage = function () { | |
if (isChromeApp()) { | |
return false; | |
} | |
try { | |
return global.localStorage; | |
} catch (e) { | |
return false; | |
} | |
}; | |
exports.Changes = Changes; | |
exports.inherits(Changes, EventEmitter); | |
function Changes() { | |
if (!(this instanceof Changes)) { | |
return new Changes(); | |
} | |
var self = this; | |
EventEmitter.call(this); | |
this.isChrome = isChromeApp(); | |
this.listeners = {}; | |
this.hasLocal = false; | |
if (!this.isChrome) { | |
this.hasLocal = exports.hasLocalStorage(); | |
} | |
if (this.isChrome) { | |
chrome.storage.onChanged.addListener(function (e) { | |
// make sure it's event addressed to us | |
if (e.db_name != null) { | |
//object only has oldValue, newValue members | |
self.emit(e.dbName.newValue); | |
} | |
}); | |
} else if (this.hasLocal) { | |
if (global.addEventListener) { | |
global.addEventListener("storage", function (e) { | |
self.emit(e.key); | |
}); | |
} else { | |
global.attachEvent("storage", function (e) { | |
self.emit(e.key); | |
}); | |
} | |
} | |
} | |
Changes.prototype.addListener = function (dbName, id, db, opts) { | |
if (this.listeners[id]) { | |
return; | |
} | |
function eventFunction() { | |
db.changes({ | |
include_docs: opts.include_docs, | |
conflicts: opts.conflicts, | |
continuous: false, | |
descending: false, | |
filter: opts.filter, | |
view: opts.view, | |
since: opts.since, | |
query_params: opts.query_params, | |
onChange: function (c) { | |
if (c.seq > opts.since && !opts.cancelled) { | |
opts.since = c.seq; | |
exports.call(opts.onChange, c); | |
} | |
} | |
}); | |
} | |
this.listeners[id] = eventFunction; | |
this.on(dbName, eventFunction); | |
}; | |
Changes.prototype.removeListener = function (dbName, id) { | |
if (!(id in this.listeners)) { | |
return; | |
} | |
EventEmitter.prototype.removeListener.call(this, dbName, | |
this.listeners[id]); | |
}; | |
Changes.prototype.notifyLocalWindows = function (dbName) { | |
//do a useless change on a storage thing | |
//in order to get other windows's listeners to activate | |
if (this.isChrome) { | |
chrome.storage.local.set({dbName: dbName}); | |
} else if (this.hasLocal) { | |
localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a"; | |
} | |
}; | |
Changes.prototype.notify = function (dbName) { | |
this.emit(dbName); | |
this.notifyLocalWindows(dbName); | |
}; | |
if (!process.browser || !('atob' in global)) { | |
exports.atob = function (str) { | |
var base64 = new buffer(str, 'base64'); | |
// Node.js will just skip the characters it can't encode instead of | |
// throwing and exception | |
if (base64.toString('base64') !== str) { | |
throw ("Cannot base64 encode full string"); | |
} | |
return base64.toString('binary'); | |
}; | |
} else { | |
exports.atob = function (str) { | |
return atob(str); | |
}; | |
} | |
if (!process.browser || !('btoa' in global)) { | |
exports.btoa = function (str) { | |
return new buffer(str, 'binary').toString('base64'); | |
}; | |
} else { | |
exports.btoa = function (str) { | |
return btoa(str); | |
}; | |
} | |
// From http://stackoverflow.com/questions/14967647/ (continues on next line) | |
// encode-decode-image-with-base64-breaks-image (2013-04-21) | |
exports.fixBinary = function (bin) { | |
if (!process.browser) { | |
// don't need to do this in Node | |
return bin; | |
} | |
var length = bin.length; | |
var buf = new ArrayBuffer(length); | |
var arr = new Uint8Array(buf); | |
for (var i = 0; i < length; i++) { | |
arr[i] = bin.charCodeAt(i); | |
} | |
return buf; | |
}; | |
exports.once = function (fun) { | |
var called = false; | |
return exports.getArguments(function (args) { | |
if (called) { | |
if (typeof console.trace === 'function') { | |
console.trace(); | |
} | |
throw new Error('once called more than once'); | |
} else { | |
called = true; | |
fun.apply(this, args); | |
} | |
}); | |
}; | |
exports.toPromise = function (func) { | |
//create the function we will be returning | |
return exports.getArguments(function (args) { | |
var self = this; | |
var tempCB = | |
(typeof args[args.length - 1] === 'function') ? args.pop() : false; | |
// if the last argument is a function, assume its a callback | |
var usedCB; | |
if (tempCB) { | |
// if it was a callback, create a new callback which calls it, | |
// but do so async so we don't trap any errors | |
usedCB = function (err, resp) { | |
process.nextTick(function () { | |
tempCB(err, resp); | |
}); | |
}; | |
} | |
var promise = new Promise(function (fulfill, reject) { | |
var resp; | |
try { | |
var callback = exports.once(function (err, mesg) { | |
if (err) { | |
reject(err); | |
} else { | |
fulfill(mesg); | |
} | |
}); | |
// create a callback for this invocation | |
// apply the function in the orig context | |
args.push(callback); | |
resp = func.apply(self, args); | |
if (resp && typeof resp.then === 'function') { | |
fulfill(resp); | |
} | |
} catch (e) { | |
reject(e); | |
} | |
}); | |
// if there is a callback, call it back | |
if (usedCB) { | |
promise.then(function (result) { | |
usedCB(null, result); | |
}, usedCB); | |
} | |
promise.cancel = function () { | |
return this; | |
}; | |
return promise; | |
}); | |
}; | |
exports.adapterFun = function (name, callback) { | |
return exports.toPromise(exports.getArguments(function (args) { | |
if (this._closed) { | |
return Promise.reject(new Error('database is closed')); | |
} | |
var self = this; | |
if (!this.taskqueue.isReady) { | |
return new exports.Promise(function (fulfill, reject) { | |
self.taskqueue.addTask(function (failed) { | |
if (failed) { | |
reject(failed); | |
} else { | |
fulfill(self[name].apply(self, args)); | |
} | |
}); | |
}); | |
} | |
return callback.apply(this, args); | |
})); | |
}; | |
//Can't find original post, but this is close | |
//http://stackoverflow.com/questions/6965107/ (continues on next line) | |
//converting-between-strings-and-arraybuffers | |
exports.arrayBufferToBinaryString = function (buffer) { | |
var binary = ""; | |
var bytes = new Uint8Array(buffer); | |
var length = bytes.byteLength; | |
for (var i = 0; i < length; i++) { | |
binary += String.fromCharCode(bytes[i]); | |
} | |
return binary; | |
}; | |
exports.cancellableFun = function (fun, self, opts) { | |
opts = opts ? exports.clone(true, {}, opts) : {}; | |
var emitter = new EventEmitter(); | |
var oldComplete = opts.complete || function () { }; | |
var complete = opts.complete = exports.once(function (err, resp) { | |
if (err) { | |
oldComplete(err); | |
} else { | |
emitter.emit('end', resp); | |
oldComplete(null, resp); | |
} | |
emitter.removeAllListeners(); | |
}); | |
var oldOnChange = opts.onChange || function () {}; | |
var lastChange = 0; | |
self.on('destroyed', function () { | |
emitter.removeAllListeners(); | |
}); | |
opts.onChange = function (change) { | |
oldOnChange(change); | |
if (change.seq <= lastChange) { | |
return; | |
} | |
lastChange = change.seq; | |
emitter.emit('change', change); | |
if (change.deleted) { | |
emitter.emit('delete', change); | |
} else if (change.changes.length === 1 && | |
change.changes[0].rev.slice(0, 1) === '1-') { | |
emitter.emit('create', change); | |
} else { | |
emitter.emit('update', change); | |
} | |
}; | |
var promise = new Promise(function (fulfill, reject) { | |
opts.complete = function (err, res) { | |
if (err) { | |
reject(err); | |
} else { | |
fulfill(res); | |
} | |
}; | |
}); | |
promise.then(function (result) { | |
complete(null, result); | |
}, complete); | |
// this needs to be overwridden by caller, dont fire complete until | |
// the task is ready | |
promise.cancel = function () { | |
promise.isCancelled = true; | |
if (self.taskqueue.isReady) { | |
opts.complete(null, {status: 'cancelled'}); | |
} | |
}; | |
if (!self.taskqueue.isReady) { | |
self.taskqueue.addTask(function () { | |
if (promise.isCancelled) { | |
opts.complete(null, {status: 'cancelled'}); | |
} else { | |
fun(self, opts, promise); | |
} | |
}); | |
} else { | |
fun(self, opts, promise); | |
} | |
promise.on = emitter.on.bind(emitter); | |
promise.once = emitter.once.bind(emitter); | |
promise.addListener = emitter.addListener.bind(emitter); | |
promise.removeListener = emitter.removeListener.bind(emitter); | |
promise.removeAllListeners = emitter.removeAllListeners.bind(emitter); | |
promise.setMaxListeners = emitter.setMaxListeners.bind(emitter); | |
promise.listeners = emitter.listeners.bind(emitter); | |
promise.emit = emitter.emit.bind(emitter); | |
return promise; | |
}; | |
exports.MD5 = exports.toPromise(require('./deps/md5')); | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"./deps/ajax":2,"./deps/blob":3,"./deps/buffer":17,"./deps/collections":4,"./deps/errors":5,"./deps/md5":6,"./deps/uuid":7,"./merge":8,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"argsarray":16,"bluebird":75,"events":18,"inherits":33,"pouchdb-extend":96}],16:[function(require,module,exports){ | |
'use strict'; | |
module.exports = argsArray; | |
function argsArray(fun) { | |
return function () { | |
var len = arguments.length; | |
if (len) { | |
var args = []; | |
var i = -1; | |
while (++i < len) { | |
args[i] = arguments[i]; | |
} | |
return fun.call(this, args); | |
} else { | |
return fun.call(this, []); | |
} | |
}; | |
} | |
},{}],17:[function(require,module,exports){ | |
},{}],18:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
function EventEmitter() { | |
this._events = this._events || {}; | |
this._maxListeners = this._maxListeners || undefined; | |
} | |
module.exports = EventEmitter; | |
// Backwards-compat with node 0.10.x | |
EventEmitter.EventEmitter = EventEmitter; | |
EventEmitter.prototype._events = undefined; | |
EventEmitter.prototype._maxListeners = undefined; | |
// By default EventEmitters will print a warning if more than 10 listeners are | |
// added to it. This is a useful default which helps finding memory leaks. | |
EventEmitter.defaultMaxListeners = 10; | |
// Obviously not all Emitters should be limited to 10. This function allows | |
// that to be increased. Set to zero for unlimited. | |
EventEmitter.prototype.setMaxListeners = function(n) { | |
if (!isNumber(n) || n < 0 || isNaN(n)) | |
throw TypeError('n must be a positive number'); | |
this._maxListeners = n; | |
return this; | |
}; | |
EventEmitter.prototype.emit = function(type) { | |
var er, handler, len, args, i, listeners; | |
if (!this._events) | |
this._events = {}; | |
// If there is no 'error' event listener then throw. | |
if (type === 'error') { | |
if (!this._events.error || | |
(isObject(this._events.error) && !this._events.error.length)) { | |
er = arguments[1]; | |
if (er instanceof Error) { | |
throw er; // Unhandled 'error' event | |
} | |
throw TypeError('Uncaught, unspecified "error" event.'); | |
} | |
} | |
handler = this._events[type]; | |
if (isUndefined(handler)) | |
return false; | |
if (isFunction(handler)) { | |
switch (arguments.length) { | |
// fast cases | |
case 1: | |
handler.call(this); | |
break; | |
case 2: | |
handler.call(this, arguments[1]); | |
break; | |
case 3: | |
handler.call(this, arguments[1], arguments[2]); | |
break; | |
// slower | |
default: | |
len = arguments.length; | |
args = new Array(len - 1); | |
for (i = 1; i < len; i++) | |
args[i - 1] = arguments[i]; | |
handler.apply(this, args); | |
} | |
} else if (isObject(handler)) { | |
len = arguments.length; | |
args = new Array(len - 1); | |
for (i = 1; i < len; i++) | |
args[i - 1] = arguments[i]; | |
listeners = handler.slice(); | |
len = listeners.length; | |
for (i = 0; i < len; i++) | |
listeners[i].apply(this, args); | |
} | |
return true; | |
}; | |
EventEmitter.prototype.addListener = function(type, listener) { | |
var m; | |
if (!isFunction(listener)) | |
throw TypeError('listener must be a function'); | |
if (!this._events) | |
this._events = {}; | |
// To avoid recursion in the case that type === "newListener"! Before | |
// adding it to the listeners, first emit "newListener". | |
if (this._events.newListener) | |
this.emit('newListener', type, | |
isFunction(listener.listener) ? | |
listener.listener : listener); | |
if (!this._events[type]) | |
// Optimize the case of one listener. Don't need the extra array object. | |
this._events[type] = listener; | |
else if (isObject(this._events[type])) | |
// If we've already got an array, just append. | |
this._events[type].push(listener); | |
else | |
// Adding the second element, need to change to array. | |
this._events[type] = [this._events[type], listener]; | |
// Check for listener leak | |
if (isObject(this._events[type]) && !this._events[type].warned) { | |
var m; | |
if (!isUndefined(this._maxListeners)) { | |
m = this._maxListeners; | |
} else { | |
m = EventEmitter.defaultMaxListeners; | |
} | |
if (m && m > 0 && this._events[type].length > m) { | |
this._events[type].warned = true; | |
console.error('(node) warning: possible EventEmitter memory ' + | |
'leak detected. %d listeners added. ' + | |
'Use emitter.setMaxListeners() to increase limit.', | |
this._events[type].length); | |
if (typeof console.trace === 'function') { | |
// not supported in IE 10 | |
console.trace(); | |
} | |
} | |
} | |
return this; | |
}; | |
EventEmitter.prototype.on = EventEmitter.prototype.addListener; | |
EventEmitter.prototype.once = function(type, listener) { | |
if (!isFunction(listener)) | |
throw TypeError('listener must be a function'); | |
var fired = false; | |
function g() { | |
this.removeListener(type, g); | |
if (!fired) { | |
fired = true; | |
listener.apply(this, arguments); | |
} | |
} | |
g.listener = listener; | |
this.on(type, g); | |
return this; | |
}; | |
// emits a 'removeListener' event iff the listener was removed | |
EventEmitter.prototype.removeListener = function(type, listener) { | |
var list, position, length, i; | |
if (!isFunction(listener)) | |
throw TypeError('listener must be a function'); | |
if (!this._events || !this._events[type]) | |
return this; | |
list = this._events[type]; | |
length = list.length; | |
position = -1; | |
if (list === listener || | |
(isFunction(list.listener) && list.listener === listener)) { | |
delete this._events[type]; | |
if (this._events.removeListener) | |
this.emit('removeListener', type, listener); | |
} else if (isObject(list)) { | |
for (i = length; i-- > 0;) { | |
if (list[i] === listener || | |
(list[i].listener && list[i].listener === listener)) { | |
position = i; | |
break; | |
} | |
} | |
if (position < 0) | |
return this; | |
if (list.length === 1) { | |
list.length = 0; | |
delete this._events[type]; | |
} else { | |
list.splice(position, 1); | |
} | |
if (this._events.removeListener) | |
this.emit('removeListener', type, listener); | |
} | |
return this; | |
}; | |
EventEmitter.prototype.removeAllListeners = function(type) { | |
var key, listeners; | |
if (!this._events) | |
return this; | |
// not listening for removeListener, no need to emit | |
if (!this._events.removeListener) { | |
if (arguments.length === 0) | |
this._events = {}; | |
else if (this._events[type]) | |
delete this._events[type]; | |
return this; | |
} | |
// emit removeListener for all listeners on all events | |
if (arguments.length === 0) { | |
for (key in this._events) { | |
if (key === 'removeListener') continue; | |
this.removeAllListeners(key); | |
} | |
this.removeAllListeners('removeListener'); | |
this._events = {}; | |
return this; | |
} | |
listeners = this._events[type]; | |
if (isFunction(listeners)) { | |
this.removeListener(type, listeners); | |
} else { | |
// LIFO order | |
while (listeners.length) | |
this.removeListener(type, listeners[listeners.length - 1]); | |
} | |
delete this._events[type]; | |
return this; | |
}; | |
EventEmitter.prototype.listeners = function(type) { | |
var ret; | |
if (!this._events || !this._events[type]) | |
ret = []; | |
else if (isFunction(this._events[type])) | |
ret = [this._events[type]]; | |
else | |
ret = this._events[type].slice(); | |
return ret; | |
}; | |
EventEmitter.listenerCount = function(emitter, type) { | |
var ret; | |
if (!emitter._events || !emitter._events[type]) | |
ret = 0; | |
else if (isFunction(emitter._events[type])) | |
ret = 1; | |
else | |
ret = emitter._events[type].length; | |
return ret; | |
}; | |
function isFunction(arg) { | |
return typeof arg === 'function'; | |
} | |
function isNumber(arg) { | |
return typeof arg === 'number'; | |
} | |
function isObject(arg) { | |
return typeof arg === 'object' && arg !== null; | |
} | |
function isUndefined(arg) { | |
return arg === void 0; | |
} | |
},{}],19:[function(require,module,exports){ | |
// shim for using process in browser | |
var process = module.exports = {}; | |
process.nextTick = (function () { | |
var canSetImmediate = typeof window !== 'undefined' | |
&& window.setImmediate; | |
var canPost = typeof window !== 'undefined' | |
&& window.postMessage && window.addEventListener | |
; | |
if (canSetImmediate) { | |
return function (f) { return window.setImmediate(f) }; | |
} | |
if (canPost) { | |
var queue = []; | |
window.addEventListener('message', function (ev) { | |
var source = ev.source; | |
if ((source === window || source === null) && ev.data === 'process-tick') { | |
ev.stopPropagation(); | |
if (queue.length > 0) { | |
var fn = queue.shift(); | |
fn(); | |
} | |
} | |
}, true); | |
return function nextTick(fn) { | |
queue.push(fn); | |
window.postMessage('process-tick', '*'); | |
}; | |
} | |
return function nextTick(fn) { | |
setTimeout(fn, 0); | |
}; | |
})(); | |
process.title = 'browser'; | |
process.browser = true; | |
process.env = {}; | |
process.argv = []; | |
process.binding = function (name) { | |
throw new Error('process.binding is not supported'); | |
} | |
// TODO(shtylman) | |
process.cwd = function () { return '/' }; | |
process.chdir = function (dir) { | |
throw new Error('process.chdir is not supported'); | |
}; | |
},{}],20:[function(require,module,exports){ | |
var base64 = require('base64-js') | |
var ieee754 = require('ieee754') | |
exports.Buffer = Buffer | |
exports.SlowBuffer = Buffer | |
exports.INSPECT_MAX_BYTES = 50 | |
Buffer.poolSize = 8192 | |
/** | |
* If `Buffer._useTypedArrays`: | |
* === true Use Uint8Array implementation (fastest) | |
* === false Use Object implementation (compatible down to IE6) | |
*/ | |
Buffer._useTypedArrays = (function () { | |
// Detect if browser supports Typed Arrays. Supported browsers are IE 10+, | |
// Firefox 4+, Chrome 7+, Safari 5.1+, Opera 11.6+, iOS 4.2+. | |
if (typeof Uint8Array === 'undefined' || typeof ArrayBuffer === 'undefined') | |
return false | |
// Does the browser support adding properties to `Uint8Array` instances? If | |
// not, then that's the same as no `Uint8Array` support. We need to be able to | |
// add all the node Buffer API methods. | |
// Relevant Firefox bug: https://bugzilla.mozilla.org/show_bug.cgi?id=695438 | |
try { | |
var arr = new Uint8Array(0) | |
arr.foo = function () { return 42 } | |
return 42 === arr.foo() && | |
typeof arr.subarray === 'function' // Chrome 9-10 lack `subarray` | |
} catch (e) { | |
return false | |
} | |
})() | |
/** | |
* Class: Buffer | |
* ============= | |
* | |
* The Buffer constructor returns instances of `Uint8Array` that are augmented | |
* with function properties for all the node `Buffer` API functions. We use | |
* `Uint8Array` so that square bracket notation works as expected -- it returns | |
* a single octet. | |
* | |
* By augmenting the instances, we can avoid modifying the `Uint8Array` | |
* prototype. | |
*/ | |
function Buffer (subject, encoding, noZero) { | |
if (!(this instanceof Buffer)) | |
return new Buffer(subject, encoding, noZero) | |
var type = typeof subject | |
// Workaround: node's base64 implementation allows for non-padded strings | |
// while base64-js does not. | |
if (encoding === 'base64' && type === 'string') { | |
subject = stringtrim(subject) | |
while (subject.length % 4 !== 0) { | |
subject = subject + '=' | |
} | |
} | |
// Find the length | |
var length | |
if (type === 'number') | |
length = coerce(subject) | |
else if (type === 'string') | |
length = Buffer.byteLength(subject, encoding) | |
else if (type === 'object') | |
length = coerce(subject.length) // Assume object is an array | |
else | |
throw new Error('First argument needs to be a number, array or string.') | |
var buf | |
if (Buffer._useTypedArrays) { | |
// Preferred: Return an augmented `Uint8Array` instance for best performance | |
buf = augment(new Uint8Array(length)) | |
} else { | |
// Fallback: Return THIS instance of Buffer (created by `new`) | |
buf = this | |
buf.length = length | |
buf._isBuffer = true | |
} | |
var i | |
if (Buffer._useTypedArrays && typeof Uint8Array === 'function' && | |
subject instanceof Uint8Array) { | |
// Speed optimization -- use set if we're copying from a Uint8Array | |
buf._set(subject) | |
} else if (isArrayish(subject)) { | |
// Treat array-ish objects as a byte array | |
for (i = 0; i < length; i++) { | |
if (Buffer.isBuffer(subject)) | |
buf[i] = subject.readUInt8(i) | |
else | |
buf[i] = subject[i] | |
} | |
} else if (type === 'string') { | |
buf.write(subject, 0, encoding) | |
} else if (type === 'number' && !Buffer._useTypedArrays && !noZero) { | |
for (i = 0; i < length; i++) { | |
buf[i] = 0 | |
} | |
} | |
return buf | |
} | |
// STATIC METHODS | |
// ============== | |
Buffer.isEncoding = function (encoding) { | |
switch (String(encoding).toLowerCase()) { | |
case 'hex': | |
case 'utf8': | |
case 'utf-8': | |
case 'ascii': | |
case 'binary': | |
case 'base64': | |
case 'raw': | |
case 'ucs2': | |
case 'ucs-2': | |
case 'utf16le': | |
case 'utf-16le': | |
return true | |
default: | |
return false | |
} | |
} | |
Buffer.isBuffer = function (b) { | |
return !!(b !== null && b !== undefined && b._isBuffer) | |
} | |
Buffer.byteLength = function (str, encoding) { | |
var ret | |
str = str + '' | |
switch (encoding || 'utf8') { | |
case 'hex': | |
ret = str.length / 2 | |
break | |
case 'utf8': | |
case 'utf-8': | |
ret = utf8ToBytes(str).length | |
break | |
case 'ascii': | |
case 'binary': | |
case 'raw': | |
ret = str.length | |
break | |
case 'base64': | |
ret = base64ToBytes(str).length | |
break | |
case 'ucs2': | |
case 'ucs-2': | |
case 'utf16le': | |
case 'utf-16le': | |
ret = str.length * 2 | |
break | |
default: | |
throw new Error('Unknown encoding') | |
} | |
return ret | |
} | |
Buffer.concat = function (list, totalLength) { | |
assert(isArray(list), 'Usage: Buffer.concat(list, [totalLength])\n' + | |
'list should be an Array.') | |
if (list.length === 0) { | |
return new Buffer(0) | |
} else if (list.length === 1) { | |
return list[0] | |
} | |
var i | |
if (typeof totalLength !== 'number') { | |
totalLength = 0 | |
for (i = 0; i < list.length; i++) { | |
totalLength += list[i].length | |
} | |
} | |
var buf = new Buffer(totalLength) | |
var pos = 0 | |
for (i = 0; i < list.length; i++) { | |
var item = list[i] | |
item.copy(buf, pos) | |
pos += item.length | |
} | |
return buf | |
} | |
// BUFFER INSTANCE METHODS | |
// ======================= | |
function _hexWrite (buf, string, offset, length) { | |
offset = Number(offset) || 0 | |
var remaining = buf.length - offset | |
if (!length) { | |
length = remaining | |
} else { | |
length = Number(length) | |
if (length > remaining) { | |
length = remaining | |
} | |
} | |
// must be an even number of digits | |
var strLen = string.length | |
assert(strLen % 2 === 0, 'Invalid hex string') | |
if (length > strLen / 2) { | |
length = strLen / 2 | |
} | |
for (var i = 0; i < length; i++) { | |
var byte = parseInt(string.substr(i * 2, 2), 16) | |
assert(!isNaN(byte), 'Invalid hex string') | |
buf[offset + i] = byte | |
} | |
Buffer._charsWritten = i * 2 | |
return i | |
} | |
function _utf8Write (buf, string, offset, length) { | |
var charsWritten = Buffer._charsWritten = | |
blitBuffer(utf8ToBytes(string), buf, offset, length) | |
return charsWritten | |
} | |
function _asciiWrite (buf, string, offset, length) { | |
var charsWritten = Buffer._charsWritten = | |
blitBuffer(asciiToBytes(string), buf, offset, length) | |
return charsWritten | |
} | |
function _binaryWrite (buf, string, offset, length) { | |
return _asciiWrite(buf, string, offset, length) | |
} | |
function _base64Write (buf, string, offset, length) { | |
var charsWritten = Buffer._charsWritten = | |
blitBuffer(base64ToBytes(string), buf, offset, length) | |
return charsWritten | |
} | |
Buffer.prototype.write = function (string, offset, length, encoding) { | |
// Support both (string, offset, length, encoding) | |
// and the legacy (string, encoding, offset, length) | |
if (isFinite(offset)) { | |
if (!isFinite(length)) { | |
encoding = length | |
length = undefined | |
} | |
} else { // legacy | |
var swap = encoding | |
encoding = offset | |
offset = length | |
length = swap | |
} | |
offset = Number(offset) || 0 | |
var remaining = this.length - offset | |
if (!length) { | |
length = remaining | |
} else { | |
length = Number(length) | |
if (length > remaining) { | |
length = remaining | |
} | |
} | |
encoding = String(encoding || 'utf8').toLowerCase() | |
switch (encoding) { | |
case 'hex': | |
return _hexWrite(this, string, offset, length) | |
case 'utf8': | |
case 'utf-8': | |
case 'ucs2': // TODO: No support for ucs2 or utf16le encodings yet | |
case 'ucs-2': | |
case 'utf16le': | |
case 'utf-16le': | |
return _utf8Write(this, string, offset, length) | |
case 'ascii': | |
return _asciiWrite(this, string, offset, length) | |
case 'binary': | |
return _binaryWrite(this, string, offset, length) | |
case 'base64': | |
return _base64Write(this, string, offset, length) | |
default: | |
throw new Error('Unknown encoding') | |
} | |
} | |
Buffer.prototype.toString = function (encoding, start, end) { | |
var self = this | |
encoding = String(encoding || 'utf8').toLowerCase() | |
start = Number(start) || 0 | |
end = (end !== undefined) | |
? Number(end) | |
: end = self.length | |
// Fastpath empty strings | |
if (end === start) | |
return '' | |
switch (encoding) { | |
case 'hex': | |
return _hexSlice(self, start, end) | |
case 'utf8': | |
case 'utf-8': | |
case 'ucs2': // TODO: No support for ucs2 or utf16le encodings yet | |
case 'ucs-2': | |
case 'utf16le': | |
case 'utf-16le': | |
return _utf8Slice(self, start, end) | |
case 'ascii': | |
return _asciiSlice(self, start, end) | |
case 'binary': | |
return _binarySlice(self, start, end) | |
case 'base64': | |
return _base64Slice(self, start, end) | |
default: | |
throw new Error('Unknown encoding') | |
} | |
} | |
Buffer.prototype.toJSON = function () { | |
return { | |
type: 'Buffer', | |
data: Array.prototype.slice.call(this._arr || this, 0) | |
} | |
} | |
// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) | |
Buffer.prototype.copy = function (target, target_start, start, end) { | |
var source = this | |
if (!start) start = 0 | |
if (!end && end !== 0) end = this.length | |
if (!target_start) target_start = 0 | |
// Copy 0 bytes; we're done | |
if (end === start) return | |
if (target.length === 0 || source.length === 0) return | |
// Fatal error conditions | |
assert(end >= start, 'sourceEnd < sourceStart') | |
assert(target_start >= 0 && target_start < target.length, | |
'targetStart out of bounds') | |
assert(start >= 0 && start < source.length, 'sourceStart out of bounds') | |
assert(end >= 0 && end <= source.length, 'sourceEnd out of bounds') | |
// Are we oob? | |
if (end > this.length) | |
end = this.length | |
if (target.length - target_start < end - start) | |
end = target.length - target_start + start | |
// copy! | |
for (var i = 0; i < end - start; i++) | |
target[i + target_start] = this[i + start] | |
} | |
function _base64Slice (buf, start, end) { | |
if (start === 0 && end === buf.length) { | |
return base64.fromByteArray(buf) | |
} else { | |
return base64.fromByteArray(buf.slice(start, end)) | |
} | |
} | |
function _utf8Slice (buf, start, end) { | |
var res = '' | |
var tmp = '' | |
end = Math.min(buf.length, end) | |
for (var i = start; i < end; i++) { | |
if (buf[i] <= 0x7F) { | |
res += decodeUtf8Char(tmp) + String.fromCharCode(buf[i]) | |
tmp = '' | |
} else { | |
tmp += '%' + buf[i].toString(16) | |
} | |
} | |
return res + decodeUtf8Char(tmp) | |
} | |
function _asciiSlice (buf, start, end) { | |
var ret = '' | |
end = Math.min(buf.length, end) | |
for (var i = start; i < end; i++) | |
ret += String.fromCharCode(buf[i]) | |
return ret | |
} | |
function _binarySlice (buf, start, end) { | |
return _asciiSlice(buf, start, end) | |
} | |
function _hexSlice (buf, start, end) { | |
var len = buf.length | |
if (!start || start < 0) start = 0 | |
if (!end || end < 0 || end > len) end = len | |
var out = '' | |
for (var i = start; i < end; i++) { | |
out += toHex(buf[i]) | |
} | |
return out | |
} | |
// http://nodejs.org/api/buffer.html#buffer_buf_slice_start_end | |
Buffer.prototype.slice = function (start, end) { | |
var len = this.length | |
start = clamp(start, len, 0) | |
end = clamp(end, len, len) | |
if (Buffer._useTypedArrays) { | |
return augment(this.subarray(start, end)) | |
} else { | |
var sliceLen = end - start | |
var newBuf = new Buffer(sliceLen, undefined, true) | |
for (var i = 0; i < sliceLen; i++) { | |
newBuf[i] = this[i + start] | |
} | |
return newBuf | |
} | |
} | |
// `get` will be removed in Node 0.13+ | |
Buffer.prototype.get = function (offset) { | |
console.log('.get() is deprecated. Access using array indexes instead.') | |
return this.readUInt8(offset) | |
} | |
// `set` will be removed in Node 0.13+ | |
Buffer.prototype.set = function (v, offset) { | |
console.log('.set() is deprecated. Access using array indexes instead.') | |
return this.writeUInt8(v, offset) | |
} | |
Buffer.prototype.readUInt8 = function (offset, noAssert) { | |
if (!noAssert) { | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset < this.length, 'Trying to read beyond buffer length') | |
} | |
if (offset >= this.length) | |
return | |
return this[offset] | |
} | |
function _readUInt16 (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 1 < buf.length, 'Trying to read beyond buffer length') | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
var val | |
if (littleEndian) { | |
val = buf[offset] | |
if (offset + 1 < len) | |
val |= buf[offset + 1] << 8 | |
} else { | |
val = buf[offset] << 8 | |
if (offset + 1 < len) | |
val |= buf[offset + 1] | |
} | |
return val | |
} | |
Buffer.prototype.readUInt16LE = function (offset, noAssert) { | |
return _readUInt16(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readUInt16BE = function (offset, noAssert) { | |
return _readUInt16(this, offset, false, noAssert) | |
} | |
function _readUInt32 (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'Trying to read beyond buffer length') | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
var val | |
if (littleEndian) { | |
if (offset + 2 < len) | |
val = buf[offset + 2] << 16 | |
if (offset + 1 < len) | |
val |= buf[offset + 1] << 8 | |
val |= buf[offset] | |
if (offset + 3 < len) | |
val = val + (buf[offset + 3] << 24 >>> 0) | |
} else { | |
if (offset + 1 < len) | |
val = buf[offset + 1] << 16 | |
if (offset + 2 < len) | |
val |= buf[offset + 2] << 8 | |
if (offset + 3 < len) | |
val |= buf[offset + 3] | |
val = val + (buf[offset] << 24 >>> 0) | |
} | |
return val | |
} | |
Buffer.prototype.readUInt32LE = function (offset, noAssert) { | |
return _readUInt32(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readUInt32BE = function (offset, noAssert) { | |
return _readUInt32(this, offset, false, noAssert) | |
} | |
Buffer.prototype.readInt8 = function (offset, noAssert) { | |
if (!noAssert) { | |
assert(offset !== undefined && offset !== null, | |
'missing offset') | |
assert(offset < this.length, 'Trying to read beyond buffer length') | |
} | |
if (offset >= this.length) | |
return | |
var neg = this[offset] & 0x80 | |
if (neg) | |
return (0xff - this[offset] + 1) * -1 | |
else | |
return this[offset] | |
} | |
function _readInt16 (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 1 < buf.length, 'Trying to read beyond buffer length') | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
var val = _readUInt16(buf, offset, littleEndian, true) | |
var neg = val & 0x8000 | |
if (neg) | |
return (0xffff - val + 1) * -1 | |
else | |
return val | |
} | |
Buffer.prototype.readInt16LE = function (offset, noAssert) { | |
return _readInt16(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readInt16BE = function (offset, noAssert) { | |
return _readInt16(this, offset, false, noAssert) | |
} | |
function _readInt32 (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'Trying to read beyond buffer length') | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
var val = _readUInt32(buf, offset, littleEndian, true) | |
var neg = val & 0x80000000 | |
if (neg) | |
return (0xffffffff - val + 1) * -1 | |
else | |
return val | |
} | |
Buffer.prototype.readInt32LE = function (offset, noAssert) { | |
return _readInt32(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readInt32BE = function (offset, noAssert) { | |
return _readInt32(this, offset, false, noAssert) | |
} | |
function _readFloat (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset + 3 < buf.length, 'Trying to read beyond buffer length') | |
} | |
return ieee754.read(buf, offset, littleEndian, 23, 4) | |
} | |
Buffer.prototype.readFloatLE = function (offset, noAssert) { | |
return _readFloat(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readFloatBE = function (offset, noAssert) { | |
return _readFloat(this, offset, false, noAssert) | |
} | |
function _readDouble (buf, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset + 7 < buf.length, 'Trying to read beyond buffer length') | |
} | |
return ieee754.read(buf, offset, littleEndian, 52, 8) | |
} | |
Buffer.prototype.readDoubleLE = function (offset, noAssert) { | |
return _readDouble(this, offset, true, noAssert) | |
} | |
Buffer.prototype.readDoubleBE = function (offset, noAssert) { | |
return _readDouble(this, offset, false, noAssert) | |
} | |
Buffer.prototype.writeUInt8 = function (value, offset, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset < this.length, 'trying to write beyond buffer length') | |
verifuint(value, 0xff) | |
} | |
if (offset >= this.length) return | |
this[offset] = value | |
} | |
function _writeUInt16 (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 1 < buf.length, 'trying to write beyond buffer length') | |
verifuint(value, 0xffff) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
for (var i = 0, j = Math.min(len - offset, 2); i < j; i++) { | |
buf[offset + i] = | |
(value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>> | |
(littleEndian ? i : 1 - i) * 8 | |
} | |
} | |
Buffer.prototype.writeUInt16LE = function (value, offset, noAssert) { | |
_writeUInt16(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeUInt16BE = function (value, offset, noAssert) { | |
_writeUInt16(this, value, offset, false, noAssert) | |
} | |
function _writeUInt32 (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'trying to write beyond buffer length') | |
verifuint(value, 0xffffffff) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
for (var i = 0, j = Math.min(len - offset, 4); i < j; i++) { | |
buf[offset + i] = | |
(value >>> (littleEndian ? i : 3 - i) * 8) & 0xff | |
} | |
} | |
Buffer.prototype.writeUInt32LE = function (value, offset, noAssert) { | |
_writeUInt32(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeUInt32BE = function (value, offset, noAssert) { | |
_writeUInt32(this, value, offset, false, noAssert) | |
} | |
Buffer.prototype.writeInt8 = function (value, offset, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset < this.length, 'Trying to write beyond buffer length') | |
verifsint(value, 0x7f, -0x80) | |
} | |
if (offset >= this.length) | |
return | |
if (value >= 0) | |
this.writeUInt8(value, offset, noAssert) | |
else | |
this.writeUInt8(0xff + value + 1, offset, noAssert) | |
} | |
function _writeInt16 (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 1 < buf.length, 'Trying to write beyond buffer length') | |
verifsint(value, 0x7fff, -0x8000) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
if (value >= 0) | |
_writeUInt16(buf, value, offset, littleEndian, noAssert) | |
else | |
_writeUInt16(buf, 0xffff + value + 1, offset, littleEndian, noAssert) | |
} | |
Buffer.prototype.writeInt16LE = function (value, offset, noAssert) { | |
_writeInt16(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeInt16BE = function (value, offset, noAssert) { | |
_writeInt16(this, value, offset, false, noAssert) | |
} | |
function _writeInt32 (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'Trying to write beyond buffer length') | |
verifsint(value, 0x7fffffff, -0x80000000) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
if (value >= 0) | |
_writeUInt32(buf, value, offset, littleEndian, noAssert) | |
else | |
_writeUInt32(buf, 0xffffffff + value + 1, offset, littleEndian, noAssert) | |
} | |
Buffer.prototype.writeInt32LE = function (value, offset, noAssert) { | |
_writeInt32(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeInt32BE = function (value, offset, noAssert) { | |
_writeInt32(this, value, offset, false, noAssert) | |
} | |
function _writeFloat (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 3 < buf.length, 'Trying to write beyond buffer length') | |
verifIEEE754(value, 3.4028234663852886e+38, -3.4028234663852886e+38) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
ieee754.write(buf, value, offset, littleEndian, 23, 4) | |
} | |
Buffer.prototype.writeFloatLE = function (value, offset, noAssert) { | |
_writeFloat(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeFloatBE = function (value, offset, noAssert) { | |
_writeFloat(this, value, offset, false, noAssert) | |
} | |
function _writeDouble (buf, value, offset, littleEndian, noAssert) { | |
if (!noAssert) { | |
assert(value !== undefined && value !== null, 'missing value') | |
assert(typeof littleEndian === 'boolean', 'missing or invalid endian') | |
assert(offset !== undefined && offset !== null, 'missing offset') | |
assert(offset + 7 < buf.length, | |
'Trying to write beyond buffer length') | |
verifIEEE754(value, 1.7976931348623157E+308, -1.7976931348623157E+308) | |
} | |
var len = buf.length | |
if (offset >= len) | |
return | |
ieee754.write(buf, value, offset, littleEndian, 52, 8) | |
} | |
Buffer.prototype.writeDoubleLE = function (value, offset, noAssert) { | |
_writeDouble(this, value, offset, true, noAssert) | |
} | |
Buffer.prototype.writeDoubleBE = function (value, offset, noAssert) { | |
_writeDouble(this, value, offset, false, noAssert) | |
} | |
// fill(value, start=0, end=buffer.length) | |
Buffer.prototype.fill = function (value, start, end) { | |
if (!value) value = 0 | |
if (!start) start = 0 | |
if (!end) end = this.length | |
if (typeof value === 'string') { | |
value = value.charCodeAt(0) | |
} | |
assert(typeof value === 'number' && !isNaN(value), 'value is not a number') | |
assert(end >= start, 'end < start') | |
// Fill 0 bytes; we're done | |
if (end === start) return | |
if (this.length === 0) return | |
assert(start >= 0 && start < this.length, 'start out of bounds') | |
assert(end >= 0 && end <= this.length, 'end out of bounds') | |
for (var i = start; i < end; i++) { | |
this[i] = value | |
} | |
} | |
Buffer.prototype.inspect = function () { | |
var out = [] | |
var len = this.length | |
for (var i = 0; i < len; i++) { | |
out[i] = toHex(this[i]) | |
if (i === exports.INSPECT_MAX_BYTES) { | |
out[i + 1] = '...' | |
break | |
} | |
} | |
return '<Buffer ' + out.join(' ') + '>' | |
} | |
/** | |
* Creates a new `ArrayBuffer` with the *copied* memory of the buffer instance. | |
* Added in Node 0.12. Only available in browsers that support ArrayBuffer. | |
*/ | |
Buffer.prototype.toArrayBuffer = function () { | |
if (typeof Uint8Array === 'function') { | |
if (Buffer._useTypedArrays) { | |
return (new Buffer(this)).buffer | |
} else { | |
var buf = new Uint8Array(this.length) | |
for (var i = 0, len = buf.length; i < len; i += 1) | |
buf[i] = this[i] | |
return buf.buffer | |
} | |
} else { | |
throw new Error('Buffer.toArrayBuffer not supported in this browser') | |
} | |
} | |
// HELPER FUNCTIONS | |
// ================ | |
function stringtrim (str) { | |
if (str.trim) return str.trim() | |
return str.replace(/^\s+|\s+$/g, '') | |
} | |
var BP = Buffer.prototype | |
/** | |
* Augment the Uint8Array *instance* (not the class!) with Buffer methods | |
*/ | |
function augment (arr) { | |
arr._isBuffer = true | |
// save reference to original Uint8Array get/set methods before overwriting | |
arr._get = arr.get | |
arr._set = arr.set | |
// deprecated, will be removed in node 0.13+ | |
arr.get = BP.get | |
arr.set = BP.set | |
arr.write = BP.write | |
arr.toString = BP.toString | |
arr.toLocaleString = BP.toString | |
arr.toJSON = BP.toJSON | |
arr.copy = BP.copy | |
arr.slice = BP.slice | |
arr.readUInt8 = BP.readUInt8 | |
arr.readUInt16LE = BP.readUInt16LE | |
arr.readUInt16BE = BP.readUInt16BE | |
arr.readUInt32LE = BP.readUInt32LE | |
arr.readUInt32BE = BP.readUInt32BE | |
arr.readInt8 = BP.readInt8 | |
arr.readInt16LE = BP.readInt16LE | |
arr.readInt16BE = BP.readInt16BE | |
arr.readInt32LE = BP.readInt32LE | |
arr.readInt32BE = BP.readInt32BE | |
arr.readFloatLE = BP.readFloatLE | |
arr.readFloatBE = BP.readFloatBE | |
arr.readDoubleLE = BP.readDoubleLE | |
arr.readDoubleBE = BP.readDoubleBE | |
arr.writeUInt8 = BP.writeUInt8 | |
arr.writeUInt16LE = BP.writeUInt16LE | |
arr.writeUInt16BE = BP.writeUInt16BE | |
arr.writeUInt32LE = BP.writeUInt32LE | |
arr.writeUInt32BE = BP.writeUInt32BE | |
arr.writeInt8 = BP.writeInt8 | |
arr.writeInt16LE = BP.writeInt16LE | |
arr.writeInt16BE = BP.writeInt16BE | |
arr.writeInt32LE = BP.writeInt32LE | |
arr.writeInt32BE = BP.writeInt32BE | |
arr.writeFloatLE = BP.writeFloatLE | |
arr.writeFloatBE = BP.writeFloatBE | |
arr.writeDoubleLE = BP.writeDoubleLE | |
arr.writeDoubleBE = BP.writeDoubleBE | |
arr.fill = BP.fill | |
arr.inspect = BP.inspect | |
arr.toArrayBuffer = BP.toArrayBuffer | |
return arr | |
} | |
// slice(start, end) | |
function clamp (index, len, defaultValue) { | |
if (typeof index !== 'number') return defaultValue | |
index = ~~index; // Coerce to integer. | |
if (index >= len) return len | |
if (index >= 0) return index | |
index += len | |
if (index >= 0) return index | |
return 0 | |
} | |
function coerce (length) { | |
// Coerce length to a number (possibly NaN), round up | |
// in case it's fractional (e.g. 123.456) then do a | |
// double negate to coerce a NaN to 0. Easy, right? | |
length = ~~Math.ceil(+length) | |
return length < 0 ? 0 : length | |
} | |
function isArray (subject) { | |
return (Array.isArray || function (subject) { | |
return Object.prototype.toString.call(subject) === '[object Array]' | |
})(subject) | |
} | |
function isArrayish (subject) { | |
return isArray(subject) || Buffer.isBuffer(subject) || | |
subject && typeof subject === 'object' && | |
typeof subject.length === 'number' | |
} | |
function toHex (n) { | |
if (n < 16) return '0' + n.toString(16) | |
return n.toString(16) | |
} | |
function utf8ToBytes (str) { | |
var byteArray = [] | |
for (var i = 0; i < str.length; i++) { | |
var b = str.charCodeAt(i) | |
if (b <= 0x7F) | |
byteArray.push(str.charCodeAt(i)) | |
else { | |
var start = i | |
if (b >= 0xD800 && b <= 0xDFFF) i++ | |
var h = encodeURIComponent(str.slice(start, i+1)).substr(1).split('%') | |
for (var j = 0; j < h.length; j++) | |
byteArray.push(parseInt(h[j], 16)) | |
} | |
} | |
return byteArray | |
} | |
function asciiToBytes (str) { | |
var byteArray = [] | |
for (var i = 0; i < str.length; i++) { | |
// Node's code seems to be doing this and not & 0x7F.. | |
byteArray.push(str.charCodeAt(i) & 0xFF) | |
} | |
return byteArray | |
} | |
function base64ToBytes (str) { | |
return base64.toByteArray(str) | |
} | |
function blitBuffer (src, dst, offset, length) { | |
var pos | |
for (var i = 0; i < length; i++) { | |
if ((i + offset >= dst.length) || (i >= src.length)) | |
break | |
dst[i + offset] = src[i] | |
} | |
return i | |
} | |
function decodeUtf8Char (str) { | |
try { | |
return decodeURIComponent(str) | |
} catch (err) { | |
return String.fromCharCode(0xFFFD) // UTF 8 invalid char | |
} | |
} | |
/* | |
* We have to make sure that the value is a valid integer. This means that it | |
* is non-negative. It has no fractional component and that it does not | |
* exceed the maximum allowed value. | |
*/ | |
function verifuint (value, max) { | |
assert(typeof value == 'number', 'cannot write a non-number as a number') | |
assert(value >= 0, | |
'specified a negative value for writing an unsigned value') | |
assert(value <= max, 'value is larger than maximum value for type') | |
assert(Math.floor(value) === value, 'value has a fractional component') | |
} | |
function verifsint(value, max, min) { | |
assert(typeof value == 'number', 'cannot write a non-number as a number') | |
assert(value <= max, 'value larger than maximum allowed value') | |
assert(value >= min, 'value smaller than minimum allowed value') | |
assert(Math.floor(value) === value, 'value has a fractional component') | |
} | |
function verifIEEE754(value, max, min) { | |
assert(typeof value == 'number', 'cannot write a non-number as a number') | |
assert(value <= max, 'value larger than maximum allowed value') | |
assert(value >= min, 'value smaller than minimum allowed value') | |
} | |
function assert (test, message) { | |
if (!test) throw new Error(message || 'Failed assertion') | |
} | |
},{"base64-js":21,"ieee754":22}],21:[function(require,module,exports){ | |
var lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; | |
;(function (exports) { | |
'use strict'; | |
var Arr = (typeof Uint8Array !== 'undefined') | |
? Uint8Array | |
: Array | |
var PLUS = '+'.charCodeAt(0) | |
var SLASH = '/'.charCodeAt(0) | |
var NUMBER = '0'.charCodeAt(0) | |
var LOWER = 'a'.charCodeAt(0) | |
var UPPER = 'A'.charCodeAt(0) | |
function decode (elt) { | |
var code = elt.charCodeAt(0) | |
if (code === PLUS) | |
return 62 // '+' | |
if (code === SLASH) | |
return 63 // '/' | |
if (code < NUMBER) | |
return -1 //no match | |
if (code < NUMBER + 10) | |
return code - NUMBER + 26 + 26 | |
if (code < UPPER + 26) | |
return code - UPPER | |
if (code < LOWER + 26) | |
return code - LOWER + 26 | |
} | |
function b64ToByteArray (b64) { | |
var i, j, l, tmp, placeHolders, arr | |
if (b64.length % 4 > 0) { | |
throw new Error('Invalid string. Length must be a multiple of 4') | |
} | |
// the number of equal signs (place holders) | |
// if there are two placeholders, than the two characters before it | |
// represent one byte | |
// if there is only one, then the three characters before it represent 2 bytes | |
// this is just a cheap hack to not do indexOf twice | |
var len = b64.length | |
placeHolders = '=' === b64.charAt(len - 2) ? 2 : '=' === b64.charAt(len - 1) ? 1 : 0 | |
// base64 is 4/3 + up to two characters of the original data | |
arr = new Arr(b64.length * 3 / 4 - placeHolders) | |
// if there are placeholders, only get up to the last complete 4 chars | |
l = placeHolders > 0 ? b64.length - 4 : b64.length | |
var L = 0 | |
function push (v) { | |
arr[L++] = v | |
} | |
for (i = 0, j = 0; i < l; i += 4, j += 3) { | |
tmp = (decode(b64.charAt(i)) << 18) | (decode(b64.charAt(i + 1)) << 12) | (decode(b64.charAt(i + 2)) << 6) | decode(b64.charAt(i + 3)) | |
push((tmp & 0xFF0000) >> 16) | |
push((tmp & 0xFF00) >> 8) | |
push(tmp & 0xFF) | |
} | |
if (placeHolders === 2) { | |
tmp = (decode(b64.charAt(i)) << 2) | (decode(b64.charAt(i + 1)) >> 4) | |
push(tmp & 0xFF) | |
} else if (placeHolders === 1) { | |
tmp = (decode(b64.charAt(i)) << 10) | (decode(b64.charAt(i + 1)) << 4) | (decode(b64.charAt(i + 2)) >> 2) | |
push((tmp >> 8) & 0xFF) | |
push(tmp & 0xFF) | |
} | |
return arr | |
} | |
function uint8ToBase64 (uint8) { | |
var i, | |
extraBytes = uint8.length % 3, // if we have 1 byte left, pad 2 bytes | |
output = "", | |
temp, length | |
function encode (num) { | |
return lookup.charAt(num) | |
} | |
function tripletToBase64 (num) { | |
return encode(num >> 18 & 0x3F) + encode(num >> 12 & 0x3F) + encode(num >> 6 & 0x3F) + encode(num & 0x3F) | |
} | |
// go through the array every three bytes, we'll deal with trailing stuff later | |
for (i = 0, length = uint8.length - extraBytes; i < length; i += 3) { | |
temp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2]) | |
output += tripletToBase64(temp) | |
} | |
// pad the end with zeros, but make sure to not forget the extra bytes | |
switch (extraBytes) { | |
case 1: | |
temp = uint8[uint8.length - 1] | |
output += encode(temp >> 2) | |
output += encode((temp << 4) & 0x3F) | |
output += '==' | |
break | |
case 2: | |
temp = (uint8[uint8.length - 2] << 8) + (uint8[uint8.length - 1]) | |
output += encode(temp >> 10) | |
output += encode((temp >> 4) & 0x3F) | |
output += encode((temp << 2) & 0x3F) | |
output += '=' | |
break | |
} | |
return output | |
} | |
exports.toByteArray = b64ToByteArray | |
exports.fromByteArray = uint8ToBase64 | |
}(typeof exports === 'undefined' ? (this.base64js = {}) : exports)) | |
},{}],22:[function(require,module,exports){ | |
exports.read = function(buffer, offset, isLE, mLen, nBytes) { | |
var e, m, | |
eLen = nBytes * 8 - mLen - 1, | |
eMax = (1 << eLen) - 1, | |
eBias = eMax >> 1, | |
nBits = -7, | |
i = isLE ? (nBytes - 1) : 0, | |
d = isLE ? -1 : 1, | |
s = buffer[offset + i]; | |
i += d; | |
e = s & ((1 << (-nBits)) - 1); | |
s >>= (-nBits); | |
nBits += eLen; | |
for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8); | |
m = e & ((1 << (-nBits)) - 1); | |
e >>= (-nBits); | |
nBits += mLen; | |
for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8); | |
if (e === 0) { | |
e = 1 - eBias; | |
} else if (e === eMax) { | |
return m ? NaN : ((s ? -1 : 1) * Infinity); | |
} else { | |
m = m + Math.pow(2, mLen); | |
e = e - eBias; | |
} | |
return (s ? -1 : 1) * m * Math.pow(2, e - mLen); | |
}; | |
exports.write = function(buffer, value, offset, isLE, mLen, nBytes) { | |
var e, m, c, | |
eLen = nBytes * 8 - mLen - 1, | |
eMax = (1 << eLen) - 1, | |
eBias = eMax >> 1, | |
rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0), | |
i = isLE ? 0 : (nBytes - 1), | |
d = isLE ? 1 : -1, | |
s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0; | |
value = Math.abs(value); | |
if (isNaN(value) || value === Infinity) { | |
m = isNaN(value) ? 1 : 0; | |
e = eMax; | |
} else { | |
e = Math.floor(Math.log(value) / Math.LN2); | |
if (value * (c = Math.pow(2, -e)) < 1) { | |
e--; | |
c *= 2; | |
} | |
if (e + eBias >= 1) { | |
value += rt / c; | |
} else { | |
value += rt * Math.pow(2, 1 - eBias); | |
} | |
if (value * c >= 2) { | |
e++; | |
c /= 2; | |
} | |
if (e + eBias >= eMax) { | |
m = 0; | |
e = eMax; | |
} else if (e + eBias >= 1) { | |
m = (value * c - 1) * Math.pow(2, mLen); | |
e = e + eBias; | |
} else { | |
m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen); | |
e = 0; | |
} | |
} | |
for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8); | |
e = (e << mLen) | m; | |
eLen += mLen; | |
for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8); | |
buffer[offset + i - d] |= s * 128; | |
}; | |
},{}],23:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// a duplex stream is just a stream that is both readable and writable. | |
// Since JS doesn't have multiple prototypal inheritance, this class | |
// prototypally inherits from Readable, and then parasitically from | |
// Writable. | |
module.exports = Duplex; | |
var inherits = require('inherits'); | |
var setImmediate = require('process/browser.js').nextTick; | |
var Readable = require('./readable.js'); | |
var Writable = require('./writable.js'); | |
inherits(Duplex, Readable); | |
Duplex.prototype.write = Writable.prototype.write; | |
Duplex.prototype.end = Writable.prototype.end; | |
Duplex.prototype._write = Writable.prototype._write; | |
function Duplex(options) { | |
if (!(this instanceof Duplex)) | |
return new Duplex(options); | |
Readable.call(this, options); | |
Writable.call(this, options); | |
if (options && options.readable === false) | |
this.readable = false; | |
if (options && options.writable === false) | |
this.writable = false; | |
this.allowHalfOpen = true; | |
if (options && options.allowHalfOpen === false) | |
this.allowHalfOpen = false; | |
this.once('end', onend); | |
} | |
// the no-half-open enforcer | |
function onend() { | |
// if we allow half-open state, or if the writable side ended, | |
// then we're ok. | |
if (this.allowHalfOpen || this._writableState.ended) | |
return; | |
// no more data can be written. | |
// But allow more writes to happen in this tick. | |
var self = this; | |
setImmediate(function () { | |
self.end(); | |
}); | |
} | |
},{"./readable.js":27,"./writable.js":29,"inherits":33,"process/browser.js":25}],24:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
module.exports = Stream; | |
var EE = require('events').EventEmitter; | |
var inherits = require('inherits'); | |
inherits(Stream, EE); | |
Stream.Readable = require('./readable.js'); | |
Stream.Writable = require('./writable.js'); | |
Stream.Duplex = require('./duplex.js'); | |
Stream.Transform = require('./transform.js'); | |
Stream.PassThrough = require('./passthrough.js'); | |
// Backwards-compat with node 0.4.x | |
Stream.Stream = Stream; | |
// old-style streams. Note that the pipe method (the only relevant | |
// part of this class) is overridden in the Readable class. | |
function Stream() { | |
EE.call(this); | |
} | |
Stream.prototype.pipe = function(dest, options) { | |
var source = this; | |
function ondata(chunk) { | |
if (dest.writable) { | |
if (false === dest.write(chunk) && source.pause) { | |
source.pause(); | |
} | |
} | |
} | |
source.on('data', ondata); | |
function ondrain() { | |
if (source.readable && source.resume) { | |
source.resume(); | |
} | |
} | |
dest.on('drain', ondrain); | |
// If the 'end' option is not supplied, dest.end() will be called when | |
// source gets the 'end' or 'close' events. Only dest.end() once. | |
if (!dest._isStdio && (!options || options.end !== false)) { | |
source.on('end', onend); | |
source.on('close', onclose); | |
} | |
var didOnEnd = false; | |
function onend() { | |
if (didOnEnd) return; | |
didOnEnd = true; | |
dest.end(); | |
} | |
function onclose() { | |
if (didOnEnd) return; | |
didOnEnd = true; | |
if (typeof dest.destroy === 'function') dest.destroy(); | |
} | |
// don't leave dangling pipes when there are errors. | |
function onerror(er) { | |
cleanup(); | |
if (EE.listenerCount(this, 'error') === 0) { | |
throw er; // Unhandled stream error in pipe. | |
} | |
} | |
source.on('error', onerror); | |
dest.on('error', onerror); | |
// remove all the event listeners that were added. | |
function cleanup() { | |
source.removeListener('data', ondata); | |
dest.removeListener('drain', ondrain); | |
source.removeListener('end', onend); | |
source.removeListener('close', onclose); | |
source.removeListener('error', onerror); | |
dest.removeListener('error', onerror); | |
source.removeListener('end', cleanup); | |
source.removeListener('close', cleanup); | |
dest.removeListener('close', cleanup); | |
} | |
source.on('end', cleanup); | |
source.on('close', cleanup); | |
dest.on('close', cleanup); | |
dest.emit('pipe', source); | |
// Allow for unix-like usage: A.pipe(B).pipe(C) | |
return dest; | |
}; | |
},{"./duplex.js":23,"./passthrough.js":26,"./readable.js":27,"./transform.js":28,"./writable.js":29,"events":18,"inherits":33}],25:[function(require,module,exports){ | |
module.exports=require(19) | |
},{}],26:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// a passthrough stream. | |
// basically just the most minimal sort of Transform stream. | |
// Every written chunk gets output as-is. | |
module.exports = PassThrough; | |
var Transform = require('./transform.js'); | |
var inherits = require('inherits'); | |
inherits(PassThrough, Transform); | |
function PassThrough(options) { | |
if (!(this instanceof PassThrough)) | |
return new PassThrough(options); | |
Transform.call(this, options); | |
} | |
PassThrough.prototype._transform = function(chunk, encoding, cb) { | |
cb(null, chunk); | |
}; | |
},{"./transform.js":28,"inherits":33}],27:[function(require,module,exports){ | |
(function (process){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
module.exports = Readable; | |
Readable.ReadableState = ReadableState; | |
var EE = require('events').EventEmitter; | |
var Stream = require('./index.js'); | |
var Buffer = require('buffer').Buffer; | |
var setImmediate = require('process/browser.js').nextTick; | |
var StringDecoder; | |
var inherits = require('inherits'); | |
inherits(Readable, Stream); | |
function ReadableState(options, stream) { | |
options = options || {}; | |
// the point at which it stops calling _read() to fill the buffer | |
// Note: 0 is a valid value, means "don't call _read preemptively ever" | |
var hwm = options.highWaterMark; | |
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024; | |
// cast to ints. | |
this.highWaterMark = ~~this.highWaterMark; | |
this.buffer = []; | |
this.length = 0; | |
this.pipes = null; | |
this.pipesCount = 0; | |
this.flowing = false; | |
this.ended = false; | |
this.endEmitted = false; | |
this.reading = false; | |
// In streams that never have any data, and do push(null) right away, | |
// the consumer can miss the 'end' event if they do some I/O before | |
// consuming the stream. So, we don't emit('end') until some reading | |
// happens. | |
this.calledRead = false; | |
// a flag to be able to tell if the onwrite cb is called immediately, | |
// or on a later tick. We set this to true at first, becuase any | |
// actions that shouldn't happen until "later" should generally also | |
// not happen before the first write call. | |
this.sync = true; | |
// whenever we return null, then we set a flag to say | |
// that we're awaiting a 'readable' event emission. | |
this.needReadable = false; | |
this.emittedReadable = false; | |
this.readableListening = false; | |
// object stream flag. Used to make read(n) ignore n and to | |
// make all the buffer merging and length checks go away | |
this.objectMode = !!options.objectMode; | |
// Crypto is kind of old and crusty. Historically, its default string | |
// encoding is 'binary' so we have to make this configurable. | |
// Everything else in the universe uses 'utf8', though. | |
this.defaultEncoding = options.defaultEncoding || 'utf8'; | |
// when piping, we only care about 'readable' events that happen | |
// after read()ing all the bytes and not getting any pushback. | |
this.ranOut = false; | |
// the number of writers that are awaiting a drain event in .pipe()s | |
this.awaitDrain = 0; | |
// if true, a maybeReadMore has been scheduled | |
this.readingMore = false; | |
this.decoder = null; | |
this.encoding = null; | |
if (options.encoding) { | |
if (!StringDecoder) | |
StringDecoder = require('string_decoder').StringDecoder; | |
this.decoder = new StringDecoder(options.encoding); | |
this.encoding = options.encoding; | |
} | |
} | |
function Readable(options) { | |
if (!(this instanceof Readable)) | |
return new Readable(options); | |
this._readableState = new ReadableState(options, this); | |
// legacy | |
this.readable = true; | |
Stream.call(this); | |
} | |
// Manually shove something into the read() buffer. | |
// This returns true if the highWaterMark has not been hit yet, | |
// similar to how Writable.write() returns true if you should | |
// write() some more. | |
Readable.prototype.push = function(chunk, encoding) { | |
var state = this._readableState; | |
if (typeof chunk === 'string' && !state.objectMode) { | |
encoding = encoding || state.defaultEncoding; | |
if (encoding !== state.encoding) { | |
chunk = new Buffer(chunk, encoding); | |
encoding = ''; | |
} | |
} | |
return readableAddChunk(this, state, chunk, encoding, false); | |
}; | |
// Unshift should *always* be something directly out of read() | |
Readable.prototype.unshift = function(chunk) { | |
var state = this._readableState; | |
return readableAddChunk(this, state, chunk, '', true); | |
}; | |
function readableAddChunk(stream, state, chunk, encoding, addToFront) { | |
var er = chunkInvalid(state, chunk); | |
if (er) { | |
stream.emit('error', er); | |
} else if (chunk === null || chunk === undefined) { | |
state.reading = false; | |
if (!state.ended) | |
onEofChunk(stream, state); | |
} else if (state.objectMode || chunk && chunk.length > 0) { | |
if (state.ended && !addToFront) { | |
var e = new Error('stream.push() after EOF'); | |
stream.emit('error', e); | |
} else if (state.endEmitted && addToFront) { | |
var e = new Error('stream.unshift() after end event'); | |
stream.emit('error', e); | |
} else { | |
if (state.decoder && !addToFront && !encoding) | |
chunk = state.decoder.write(chunk); | |
// update the buffer info. | |
state.length += state.objectMode ? 1 : chunk.length; | |
if (addToFront) { | |
state.buffer.unshift(chunk); | |
} else { | |
state.reading = false; | |
state.buffer.push(chunk); | |
} | |
if (state.needReadable) | |
emitReadable(stream); | |
maybeReadMore(stream, state); | |
} | |
} else if (!addToFront) { | |
state.reading = false; | |
} | |
return needMoreData(state); | |
} | |
// if it's past the high water mark, we can push in some more. | |
// Also, if we have no data yet, we can stand some | |
// more bytes. This is to work around cases where hwm=0, | |
// such as the repl. Also, if the push() triggered a | |
// readable event, and the user called read(largeNumber) such that | |
// needReadable was set, then we ought to push more, so that another | |
// 'readable' event will be triggered. | |
function needMoreData(state) { | |
return !state.ended && | |
(state.needReadable || | |
state.length < state.highWaterMark || | |
state.length === 0); | |
} | |
// backwards compatibility. | |
Readable.prototype.setEncoding = function(enc) { | |
if (!StringDecoder) | |
StringDecoder = require('string_decoder').StringDecoder; | |
this._readableState.decoder = new StringDecoder(enc); | |
this._readableState.encoding = enc; | |
}; | |
// Don't raise the hwm > 128MB | |
var MAX_HWM = 0x800000; | |
function roundUpToNextPowerOf2(n) { | |
if (n >= MAX_HWM) { | |
n = MAX_HWM; | |
} else { | |
// Get the next highest power of 2 | |
n--; | |
for (var p = 1; p < 32; p <<= 1) n |= n >> p; | |
n++; | |
} | |
return n; | |
} | |
function howMuchToRead(n, state) { | |
if (state.length === 0 && state.ended) | |
return 0; | |
if (state.objectMode) | |
return n === 0 ? 0 : 1; | |
if (isNaN(n) || n === null) { | |
// only flow one buffer at a time | |
if (state.flowing && state.buffer.length) | |
return state.buffer[0].length; | |
else | |
return state.length; | |
} | |
if (n <= 0) | |
return 0; | |
// If we're asking for more than the target buffer level, | |
// then raise the water mark. Bump up to the next highest | |
// power of 2, to prevent increasing it excessively in tiny | |
// amounts. | |
if (n > state.highWaterMark) | |
state.highWaterMark = roundUpToNextPowerOf2(n); | |
// don't have that much. return null, unless we've ended. | |
if (n > state.length) { | |
if (!state.ended) { | |
state.needReadable = true; | |
return 0; | |
} else | |
return state.length; | |
} | |
return n; | |
} | |
// you can override either this method, or the async _read(n) below. | |
Readable.prototype.read = function(n) { | |
var state = this._readableState; | |
state.calledRead = true; | |
var nOrig = n; | |
if (typeof n !== 'number' || n > 0) | |
state.emittedReadable = false; | |
// if we're doing read(0) to trigger a readable event, but we | |
// already have a bunch of data in the buffer, then just trigger | |
// the 'readable' event and move on. | |
if (n === 0 && | |
state.needReadable && | |
(state.length >= state.highWaterMark || state.ended)) { | |
emitReadable(this); | |
return null; | |
} | |
n = howMuchToRead(n, state); | |
// if we've ended, and we're now clear, then finish it up. | |
if (n === 0 && state.ended) { | |
if (state.length === 0) | |
endReadable(this); | |
return null; | |
} | |
// All the actual chunk generation logic needs to be | |
// *below* the call to _read. The reason is that in certain | |
// synthetic stream cases, such as passthrough streams, _read | |
// may be a completely synchronous operation which may change | |
// the state of the read buffer, providing enough data when | |
// before there was *not* enough. | |
// | |
// So, the steps are: | |
// 1. Figure out what the state of things will be after we do | |
// a read from the buffer. | |
// | |
// 2. If that resulting state will trigger a _read, then call _read. | |
// Note that this may be asynchronous, or synchronous. Yes, it is | |
// deeply ugly to write APIs this way, but that still doesn't mean | |
// that the Readable class should behave improperly, as streams are | |
// designed to be sync/async agnostic. | |
// Take note if the _read call is sync or async (ie, if the read call | |
// has returned yet), so that we know whether or not it's safe to emit | |
// 'readable' etc. | |
// | |
// 3. Actually pull the requested chunks out of the buffer and return. | |
// if we need a readable event, then we need to do some reading. | |
var doRead = state.needReadable; | |
// if we currently have less than the highWaterMark, then also read some | |
if (state.length - n <= state.highWaterMark) | |
doRead = true; | |
// however, if we've ended, then there's no point, and if we're already | |
// reading, then it's unnecessary. | |
if (state.ended || state.reading) | |
doRead = false; | |
if (doRead) { | |
state.reading = true; | |
state.sync = true; | |
// if the length is currently zero, then we *need* a readable event. | |
if (state.length === 0) | |
state.needReadable = true; | |
// call internal read method | |
this._read(state.highWaterMark); | |
state.sync = false; | |
} | |
// If _read called its callback synchronously, then `reading` | |
// will be false, and we need to re-evaluate how much data we | |
// can return to the user. | |
if (doRead && !state.reading) | |
n = howMuchToRead(nOrig, state); | |
var ret; | |
if (n > 0) | |
ret = fromList(n, state); | |
else | |
ret = null; | |
if (ret === null) { | |
state.needReadable = true; | |
n = 0; | |
} | |
state.length -= n; | |
// If we have nothing in the buffer, then we want to know | |
// as soon as we *do* get something into the buffer. | |
if (state.length === 0 && !state.ended) | |
state.needReadable = true; | |
// If we happened to read() exactly the remaining amount in the | |
// buffer, and the EOF has been seen at this point, then make sure | |
// that we emit 'end' on the very next tick. | |
if (state.ended && !state.endEmitted && state.length === 0) | |
endReadable(this); | |
return ret; | |
}; | |
function chunkInvalid(state, chunk) { | |
var er = null; | |
if (!Buffer.isBuffer(chunk) && | |
'string' !== typeof chunk && | |
chunk !== null && | |
chunk !== undefined && | |
!state.objectMode && | |
!er) { | |
er = new TypeError('Invalid non-string/buffer chunk'); | |
} | |
return er; | |
} | |
function onEofChunk(stream, state) { | |
if (state.decoder && !state.ended) { | |
var chunk = state.decoder.end(); | |
if (chunk && chunk.length) { | |
state.buffer.push(chunk); | |
state.length += state.objectMode ? 1 : chunk.length; | |
} | |
} | |
state.ended = true; | |
// if we've ended and we have some data left, then emit | |
// 'readable' now to make sure it gets picked up. | |
if (state.length > 0) | |
emitReadable(stream); | |
else | |
endReadable(stream); | |
} | |
// Don't emit readable right away in sync mode, because this can trigger | |
// another read() call => stack overflow. This way, it might trigger | |
// a nextTick recursion warning, but that's not so bad. | |
function emitReadable(stream) { | |
var state = stream._readableState; | |
state.needReadable = false; | |
if (state.emittedReadable) | |
return; | |
state.emittedReadable = true; | |
if (state.sync) | |
setImmediate(function() { | |
emitReadable_(stream); | |
}); | |
else | |
emitReadable_(stream); | |
} | |
function emitReadable_(stream) { | |
stream.emit('readable'); | |
} | |
// at this point, the user has presumably seen the 'readable' event, | |
// and called read() to consume some data. that may have triggered | |
// in turn another _read(n) call, in which case reading = true if | |
// it's in progress. | |
// However, if we're not ended, or reading, and the length < hwm, | |
// then go ahead and try to read some more preemptively. | |
function maybeReadMore(stream, state) { | |
if (!state.readingMore) { | |
state.readingMore = true; | |
setImmediate(function() { | |
maybeReadMore_(stream, state); | |
}); | |
} | |
} | |
function maybeReadMore_(stream, state) { | |
var len = state.length; | |
while (!state.reading && !state.flowing && !state.ended && | |
state.length < state.highWaterMark) { | |
stream.read(0); | |
if (len === state.length) | |
// didn't get any data, stop spinning. | |
break; | |
else | |
len = state.length; | |
} | |
state.readingMore = false; | |
} | |
// abstract method. to be overridden in specific implementation classes. | |
// call cb(er, data) where data is <= n in length. | |
// for virtual (non-string, non-buffer) streams, "length" is somewhat | |
// arbitrary, and perhaps not very meaningful. | |
Readable.prototype._read = function(n) { | |
this.emit('error', new Error('not implemented')); | |
}; | |
Readable.prototype.pipe = function(dest, pipeOpts) { | |
var src = this; | |
var state = this._readableState; | |
switch (state.pipesCount) { | |
case 0: | |
state.pipes = dest; | |
break; | |
case 1: | |
state.pipes = [state.pipes, dest]; | |
break; | |
default: | |
state.pipes.push(dest); | |
break; | |
} | |
state.pipesCount += 1; | |
var doEnd = (!pipeOpts || pipeOpts.end !== false) && | |
dest !== process.stdout && | |
dest !== process.stderr; | |
var endFn = doEnd ? onend : cleanup; | |
if (state.endEmitted) | |
setImmediate(endFn); | |
else | |
src.once('end', endFn); | |
dest.on('unpipe', onunpipe); | |
function onunpipe(readable) { | |
if (readable !== src) return; | |
cleanup(); | |
} | |
function onend() { | |
dest.end(); | |
} | |
// when the dest drains, it reduces the awaitDrain counter | |
// on the source. This would be more elegant with a .once() | |
// handler in flow(), but adding and removing repeatedly is | |
// too slow. | |
var ondrain = pipeOnDrain(src); | |
dest.on('drain', ondrain); | |
function cleanup() { | |
// cleanup event handlers once the pipe is broken | |
dest.removeListener('close', onclose); | |
dest.removeListener('finish', onfinish); | |
dest.removeListener('drain', ondrain); | |
dest.removeListener('error', onerror); | |
dest.removeListener('unpipe', onunpipe); | |
src.removeListener('end', onend); | |
src.removeListener('end', cleanup); | |
// if the reader is waiting for a drain event from this | |
// specific writer, then it would cause it to never start | |
// flowing again. | |
// So, if this is awaiting a drain, then we just call it now. | |
// If we don't know, then assume that we are waiting for one. | |
if (!dest._writableState || dest._writableState.needDrain) | |
ondrain(); | |
} | |
// if the dest has an error, then stop piping into it. | |
// however, don't suppress the throwing behavior for this. | |
// check for listeners before emit removes one-time listeners. | |
var errListeners = EE.listenerCount(dest, 'error'); | |
function onerror(er) { | |
unpipe(); | |
if (errListeners === 0 && EE.listenerCount(dest, 'error') === 0) | |
dest.emit('error', er); | |
} | |
dest.once('error', onerror); | |
// Both close and finish should trigger unpipe, but only once. | |
function onclose() { | |
dest.removeListener('finish', onfinish); | |
unpipe(); | |
} | |
dest.once('close', onclose); | |
function onfinish() { | |
dest.removeListener('close', onclose); | |
unpipe(); | |
} | |
dest.once('finish', onfinish); | |
function unpipe() { | |
src.unpipe(dest); | |
} | |
// tell the dest that it's being piped to | |
dest.emit('pipe', src); | |
// start the flow if it hasn't been started already. | |
if (!state.flowing) { | |
// the handler that waits for readable events after all | |
// the data gets sucked out in flow. | |
// This would be easier to follow with a .once() handler | |
// in flow(), but that is too slow. | |
this.on('readable', pipeOnReadable); | |
state.flowing = true; | |
setImmediate(function() { | |
flow(src); | |
}); | |
} | |
return dest; | |
}; | |
function pipeOnDrain(src) { | |
return function() { | |
var dest = this; | |
var state = src._readableState; | |
state.awaitDrain--; | |
if (state.awaitDrain === 0) | |
flow(src); | |
}; | |
} | |
function flow(src) { | |
var state = src._readableState; | |
var chunk; | |
state.awaitDrain = 0; | |
function write(dest, i, list) { | |
var written = dest.write(chunk); | |
if (false === written) { | |
state.awaitDrain++; | |
} | |
} | |
while (state.pipesCount && null !== (chunk = src.read())) { | |
if (state.pipesCount === 1) | |
write(state.pipes, 0, null); | |
else | |
forEach(state.pipes, write); | |
src.emit('data', chunk); | |
// if anyone needs a drain, then we have to wait for that. | |
if (state.awaitDrain > 0) | |
return; | |
} | |
// if every destination was unpiped, either before entering this | |
// function, or in the while loop, then stop flowing. | |
// | |
// NB: This is a pretty rare edge case. | |
if (state.pipesCount === 0) { | |
state.flowing = false; | |
// if there were data event listeners added, then switch to old mode. | |
if (EE.listenerCount(src, 'data') > 0) | |
emitDataEvents(src); | |
return; | |
} | |
// at this point, no one needed a drain, so we just ran out of data | |
// on the next readable event, start it over again. | |
state.ranOut = true; | |
} | |
function pipeOnReadable() { | |
if (this._readableState.ranOut) { | |
this._readableState.ranOut = false; | |
flow(this); | |
} | |
} | |
Readable.prototype.unpipe = function(dest) { | |
var state = this._readableState; | |
// if we're not piping anywhere, then do nothing. | |
if (state.pipesCount === 0) | |
return this; | |
// just one destination. most common case. | |
if (state.pipesCount === 1) { | |
// passed in one, but it's not the right one. | |
if (dest && dest !== state.pipes) | |
return this; | |
if (!dest) | |
dest = state.pipes; | |
// got a match. | |
state.pipes = null; | |
state.pipesCount = 0; | |
this.removeListener('readable', pipeOnReadable); | |
state.flowing = false; | |
if (dest) | |
dest.emit('unpipe', this); | |
return this; | |
} | |
// slow case. multiple pipe destinations. | |
if (!dest) { | |
// remove all. | |
var dests = state.pipes; | |
var len = state.pipesCount; | |
state.pipes = null; | |
state.pipesCount = 0; | |
this.removeListener('readable', pipeOnReadable); | |
state.flowing = false; | |
for (var i = 0; i < len; i++) | |
dests[i].emit('unpipe', this); | |
return this; | |
} | |
// try to find the right one. | |
var i = indexOf(state.pipes, dest); | |
if (i === -1) | |
return this; | |
state.pipes.splice(i, 1); | |
state.pipesCount -= 1; | |
if (state.pipesCount === 1) | |
state.pipes = state.pipes[0]; | |
dest.emit('unpipe', this); | |
return this; | |
}; | |
// set up data events if they are asked for | |
// Ensure readable listeners eventually get something | |
Readable.prototype.on = function(ev, fn) { | |
var res = Stream.prototype.on.call(this, ev, fn); | |
if (ev === 'data' && !this._readableState.flowing) | |
emitDataEvents(this); | |
if (ev === 'readable' && this.readable) { | |
var state = this._readableState; | |
if (!state.readableListening) { | |
state.readableListening = true; | |
state.emittedReadable = false; | |
state.needReadable = true; | |
if (!state.reading) { | |
this.read(0); | |
} else if (state.length) { | |
emitReadable(this, state); | |
} | |
} | |
} | |
return res; | |
}; | |
Readable.prototype.addListener = Readable.prototype.on; | |
// pause() and resume() are remnants of the legacy readable stream API | |
// If the user uses them, then switch into old mode. | |
Readable.prototype.resume = function() { | |
emitDataEvents(this); | |
this.read(0); | |
this.emit('resume'); | |
}; | |
Readable.prototype.pause = function() { | |
emitDataEvents(this, true); | |
this.emit('pause'); | |
}; | |
function emitDataEvents(stream, startPaused) { | |
var state = stream._readableState; | |
if (state.flowing) { | |
// https://github.com/isaacs/readable-stream/issues/16 | |
throw new Error('Cannot switch to old mode now.'); | |
} | |
var paused = startPaused || false; | |
var readable = false; | |
// convert to an old-style stream. | |
stream.readable = true; | |
stream.pipe = Stream.prototype.pipe; | |
stream.on = stream.addListener = Stream.prototype.on; | |
stream.on('readable', function() { | |
readable = true; | |
var c; | |
while (!paused && (null !== (c = stream.read()))) | |
stream.emit('data', c); | |
if (c === null) { | |
readable = false; | |
stream._readableState.needReadable = true; | |
} | |
}); | |
stream.pause = function() { | |
paused = true; | |
this.emit('pause'); | |
}; | |
stream.resume = function() { | |
paused = false; | |
if (readable) | |
setImmediate(function() { | |
stream.emit('readable'); | |
}); | |
else | |
this.read(0); | |
this.emit('resume'); | |
}; | |
// now make it start, just in case it hadn't already. | |
stream.emit('readable'); | |
} | |
// wrap an old-style stream as the async data source. | |
// This is *not* part of the readable stream interface. | |
// It is an ugly unfortunate mess of history. | |
Readable.prototype.wrap = function(stream) { | |
var state = this._readableState; | |
var paused = false; | |
var self = this; | |
stream.on('end', function() { | |
if (state.decoder && !state.ended) { | |
var chunk = state.decoder.end(); | |
if (chunk && chunk.length) | |
self.push(chunk); | |
} | |
self.push(null); | |
}); | |
stream.on('data', function(chunk) { | |
if (state.decoder) | |
chunk = state.decoder.write(chunk); | |
if (!chunk || !state.objectMode && !chunk.length) | |
return; | |
var ret = self.push(chunk); | |
if (!ret) { | |
paused = true; | |
stream.pause(); | |
} | |
}); | |
// proxy all the other methods. | |
// important when wrapping filters and duplexes. | |
for (var i in stream) { | |
if (typeof stream[i] === 'function' && | |
typeof this[i] === 'undefined') { | |
this[i] = function(method) { return function() { | |
return stream[method].apply(stream, arguments); | |
}}(i); | |
} | |
} | |
// proxy certain important events. | |
var events = ['error', 'close', 'destroy', 'pause', 'resume']; | |
forEach(events, function(ev) { | |
stream.on(ev, function (x) { | |
return self.emit.apply(self, ev, x); | |
}); | |
}); | |
// when we try to consume some more bytes, simply unpause the | |
// underlying stream. | |
self._read = function(n) { | |
if (paused) { | |
paused = false; | |
stream.resume(); | |
} | |
}; | |
return self; | |
}; | |
// exposed for testing purposes only. | |
Readable._fromList = fromList; | |
// Pluck off n bytes from an array of buffers. | |
// Length is the combined lengths of all the buffers in the list. | |
function fromList(n, state) { | |
var list = state.buffer; | |
var length = state.length; | |
var stringMode = !!state.decoder; | |
var objectMode = !!state.objectMode; | |
var ret; | |
// nothing in the list, definitely empty. | |
if (list.length === 0) | |
return null; | |
if (length === 0) | |
ret = null; | |
else if (objectMode) | |
ret = list.shift(); | |
else if (!n || n >= length) { | |
// read it all, truncate the array. | |
if (stringMode) | |
ret = list.join(''); | |
else | |
ret = Buffer.concat(list, length); | |
list.length = 0; | |
} else { | |
// read just some of it. | |
if (n < list[0].length) { | |
// just take a part of the first list item. | |
// slice is the same for buffers and strings. | |
var buf = list[0]; | |
ret = buf.slice(0, n); | |
list[0] = buf.slice(n); | |
} else if (n === list[0].length) { | |
// first list is a perfect match | |
ret = list.shift(); | |
} else { | |
// complex case. | |
// we have enough to cover it, but it spans past the first buffer. | |
if (stringMode) | |
ret = ''; | |
else | |
ret = new Buffer(n); | |
var c = 0; | |
for (var i = 0, l = list.length; i < l && c < n; i++) { | |
var buf = list[0]; | |
var cpy = Math.min(n - c, buf.length); | |
if (stringMode) | |
ret += buf.slice(0, cpy); | |
else | |
buf.copy(ret, c, 0, cpy); | |
if (cpy < buf.length) | |
list[0] = buf.slice(cpy); | |
else | |
list.shift(); | |
c += cpy; | |
} | |
} | |
} | |
return ret; | |
} | |
function endReadable(stream) { | |
var state = stream._readableState; | |
// If we get here before consuming all the bytes, then that is a | |
// bug in node. Should never happen. | |
if (state.length > 0) | |
throw new Error('endReadable called on non-empty stream'); | |
if (!state.endEmitted && state.calledRead) { | |
state.ended = true; | |
setImmediate(function() { | |
// Check that we didn't get one last unshift. | |
if (!state.endEmitted && state.length === 0) { | |
state.endEmitted = true; | |
stream.readable = false; | |
stream.emit('end'); | |
} | |
}); | |
} | |
} | |
function forEach (xs, f) { | |
for (var i = 0, l = xs.length; i < l; i++) { | |
f(xs[i], i); | |
} | |
} | |
function indexOf (xs, x) { | |
for (var i = 0, l = xs.length; i < l; i++) { | |
if (xs[i] === x) return i; | |
} | |
return -1; | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"./index.js":24,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"buffer":20,"events":18,"inherits":33,"process/browser.js":25,"string_decoder":30}],28:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// a transform stream is a readable/writable stream where you do | |
// something with the data. Sometimes it's called a "filter", | |
// but that's not a great name for it, since that implies a thing where | |
// some bits pass through, and others are simply ignored. (That would | |
// be a valid example of a transform, of course.) | |
// | |
// While the output is causally related to the input, it's not a | |
// necessarily symmetric or synchronous transformation. For example, | |
// a zlib stream might take multiple plain-text writes(), and then | |
// emit a single compressed chunk some time in the future. | |
// | |
// Here's how this works: | |
// | |
// The Transform stream has all the aspects of the readable and writable | |
// stream classes. When you write(chunk), that calls _write(chunk,cb) | |
// internally, and returns false if there's a lot of pending writes | |
// buffered up. When you call read(), that calls _read(n) until | |
// there's enough pending readable data buffered up. | |
// | |
// In a transform stream, the written data is placed in a buffer. When | |
// _read(n) is called, it transforms the queued up data, calling the | |
// buffered _write cb's as it consumes chunks. If consuming a single | |
// written chunk would result in multiple output chunks, then the first | |
// outputted bit calls the readcb, and subsequent chunks just go into | |
// the read buffer, and will cause it to emit 'readable' if necessary. | |
// | |
// This way, back-pressure is actually determined by the reading side, | |
// since _read has to be called to start processing a new chunk. However, | |
// a pathological inflate type of transform can cause excessive buffering | |
// here. For example, imagine a stream where every byte of input is | |
// interpreted as an integer from 0-255, and then results in that many | |
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in | |
// 1kb of data being output. In this case, you could write a very small | |
// amount of input, and end up with a very large amount of output. In | |
// such a pathological inflating mechanism, there'd be no way to tell | |
// the system to stop doing the transform. A single 4MB write could | |
// cause the system to run out of memory. | |
// | |
// However, even in such a pathological case, only a single written chunk | |
// would be consumed, and then the rest would wait (un-transformed) until | |
// the results of the previous transformed chunk were consumed. | |
module.exports = Transform; | |
var Duplex = require('./duplex.js'); | |
var inherits = require('inherits'); | |
inherits(Transform, Duplex); | |
function TransformState(options, stream) { | |
this.afterTransform = function(er, data) { | |
return afterTransform(stream, er, data); | |
}; | |
this.needTransform = false; | |
this.transforming = false; | |
this.writecb = null; | |
this.writechunk = null; | |
} | |
function afterTransform(stream, er, data) { | |
var ts = stream._transformState; | |
ts.transforming = false; | |
var cb = ts.writecb; | |
if (!cb) | |
return stream.emit('error', new Error('no writecb in Transform class')); | |
ts.writechunk = null; | |
ts.writecb = null; | |
if (data !== null && data !== undefined) | |
stream.push(data); | |
if (cb) | |
cb(er); | |
var rs = stream._readableState; | |
rs.reading = false; | |
if (rs.needReadable || rs.length < rs.highWaterMark) { | |
stream._read(rs.highWaterMark); | |
} | |
} | |
function Transform(options) { | |
if (!(this instanceof Transform)) | |
return new Transform(options); | |
Duplex.call(this, options); | |
var ts = this._transformState = new TransformState(options, this); | |
// when the writable side finishes, then flush out anything remaining. | |
var stream = this; | |
// start out asking for a readable event once data is transformed. | |
this._readableState.needReadable = true; | |
// we have implemented the _read method, and done the other things | |
// that Readable wants before the first _read call, so unset the | |
// sync guard flag. | |
this._readableState.sync = false; | |
this.once('finish', function() { | |
if ('function' === typeof this._flush) | |
this._flush(function(er) { | |
done(stream, er); | |
}); | |
else | |
done(stream); | |
}); | |
} | |
Transform.prototype.push = function(chunk, encoding) { | |
this._transformState.needTransform = false; | |
return Duplex.prototype.push.call(this, chunk, encoding); | |
}; | |
// This is the part where you do stuff! | |
// override this function in implementation classes. | |
// 'chunk' is an input chunk. | |
// | |
// Call `push(newChunk)` to pass along transformed output | |
// to the readable side. You may call 'push' zero or more times. | |
// | |
// Call `cb(err)` when you are done with this chunk. If you pass | |
// an error, then that'll put the hurt on the whole operation. If you | |
// never call cb(), then you'll never get another chunk. | |
Transform.prototype._transform = function(chunk, encoding, cb) { | |
throw new Error('not implemented'); | |
}; | |
Transform.prototype._write = function(chunk, encoding, cb) { | |
var ts = this._transformState; | |
ts.writecb = cb; | |
ts.writechunk = chunk; | |
ts.writeencoding = encoding; | |
if (!ts.transforming) { | |
var rs = this._readableState; | |
if (ts.needTransform || | |
rs.needReadable || | |
rs.length < rs.highWaterMark) | |
this._read(rs.highWaterMark); | |
} | |
}; | |
// Doesn't matter what the args are here. | |
// _transform does all the work. | |
// That we got here means that the readable side wants more data. | |
Transform.prototype._read = function(n) { | |
var ts = this._transformState; | |
if (ts.writechunk && ts.writecb && !ts.transforming) { | |
ts.transforming = true; | |
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); | |
} else { | |
// mark that we need a transform, so that any data that comes in | |
// will get processed, now that we've asked for it. | |
ts.needTransform = true; | |
} | |
}; | |
function done(stream, er) { | |
if (er) | |
return stream.emit('error', er); | |
// if there's nothing in the write buffer, then that means | |
// that nothing more will ever be provided | |
var ws = stream._writableState; | |
var rs = stream._readableState; | |
var ts = stream._transformState; | |
if (ws.length) | |
throw new Error('calling transform done when ws.length != 0'); | |
if (ts.transforming) | |
throw new Error('calling transform done when still transforming'); | |
return stream.push(null); | |
} | |
},{"./duplex.js":23,"inherits":33}],29:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
// A bit simpler than readable streams. | |
// Implement an async ._write(chunk, cb), and it'll handle all | |
// the drain event emission and buffering. | |
module.exports = Writable; | |
Writable.WritableState = WritableState; | |
var isUint8Array = typeof Uint8Array !== 'undefined' | |
? function (x) { return x instanceof Uint8Array } | |
: function (x) { | |
return x && x.constructor && x.constructor.name === 'Uint8Array' | |
} | |
; | |
var isArrayBuffer = typeof ArrayBuffer !== 'undefined' | |
? function (x) { return x instanceof ArrayBuffer } | |
: function (x) { | |
return x && x.constructor && x.constructor.name === 'ArrayBuffer' | |
} | |
; | |
var inherits = require('inherits'); | |
var Stream = require('./index.js'); | |
var setImmediate = require('process/browser.js').nextTick; | |
var Buffer = require('buffer').Buffer; | |
inherits(Writable, Stream); | |
function WriteReq(chunk, encoding, cb) { | |
this.chunk = chunk; | |
this.encoding = encoding; | |
this.callback = cb; | |
} | |
function WritableState(options, stream) { | |
options = options || {}; | |
// the point at which write() starts returning false | |
// Note: 0 is a valid value, means that we always return false if | |
// the entire buffer is not flushed immediately on write() | |
var hwm = options.highWaterMark; | |
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024; | |
// object stream flag to indicate whether or not this stream | |
// contains buffers or objects. | |
this.objectMode = !!options.objectMode; | |
// cast to ints. | |
this.highWaterMark = ~~this.highWaterMark; | |
this.needDrain = false; | |
// at the start of calling end() | |
this.ending = false; | |
// when end() has been called, and returned | |
this.ended = false; | |
// when 'finish' is emitted | |
this.finished = false; | |
// should we decode strings into buffers before passing to _write? | |
// this is here so that some node-core streams can optimize string | |
// handling at a lower level. | |
var noDecode = options.decodeStrings === false; | |
this.decodeStrings = !noDecode; | |
// Crypto is kind of old and crusty. Historically, its default string | |
// encoding is 'binary' so we have to make this configurable. | |
// Everything else in the universe uses 'utf8', though. | |
this.defaultEncoding = options.defaultEncoding || 'utf8'; | |
// not an actual buffer we keep track of, but a measurement | |
// of how much we're waiting to get pushed to some underlying | |
// socket or file. | |
this.length = 0; | |
// a flag to see when we're in the middle of a write. | |
this.writing = false; | |
// a flag to be able to tell if the onwrite cb is called immediately, | |
// or on a later tick. We set this to true at first, becuase any | |
// actions that shouldn't happen until "later" should generally also | |
// not happen before the first write call. | |
this.sync = true; | |
// a flag to know if we're processing previously buffered items, which | |
// may call the _write() callback in the same tick, so that we don't | |
// end up in an overlapped onwrite situation. | |
this.bufferProcessing = false; | |
// the callback that's passed to _write(chunk,cb) | |
this.onwrite = function(er) { | |
onwrite(stream, er); | |
}; | |
// the callback that the user supplies to write(chunk,encoding,cb) | |
this.writecb = null; | |
// the amount that is being written when _write is called. | |
this.writelen = 0; | |
this.buffer = []; | |
} | |
function Writable(options) { | |
// Writable ctor is applied to Duplexes, though they're not | |
// instanceof Writable, they're instanceof Readable. | |
if (!(this instanceof Writable) && !(this instanceof Stream.Duplex)) | |
return new Writable(options); | |
this._writableState = new WritableState(options, this); | |
// legacy. | |
this.writable = true; | |
Stream.call(this); | |
} | |
// Otherwise people can pipe Writable streams, which is just wrong. | |
Writable.prototype.pipe = function() { | |
this.emit('error', new Error('Cannot pipe. Not readable.')); | |
}; | |
function writeAfterEnd(stream, state, cb) { | |
var er = new Error('write after end'); | |
// TODO: defer error events consistently everywhere, not just the cb | |
stream.emit('error', er); | |
setImmediate(function() { | |
cb(er); | |
}); | |
} | |
// If we get something that is not a buffer, string, null, or undefined, | |
// and we're not in objectMode, then that's an error. | |
// Otherwise stream chunks are all considered to be of length=1, and the | |
// watermarks determine how many objects to keep in the buffer, rather than | |
// how many bytes or characters. | |
function validChunk(stream, state, chunk, cb) { | |
var valid = true; | |
if (!Buffer.isBuffer(chunk) && | |
'string' !== typeof chunk && | |
chunk !== null && | |
chunk !== undefined && | |
!state.objectMode) { | |
var er = new TypeError('Invalid non-string/buffer chunk'); | |
stream.emit('error', er); | |
setImmediate(function() { | |
cb(er); | |
}); | |
valid = false; | |
} | |
return valid; | |
} | |
Writable.prototype.write = function(chunk, encoding, cb) { | |
var state = this._writableState; | |
var ret = false; | |
if (typeof encoding === 'function') { | |
cb = encoding; | |
encoding = null; | |
} | |
if (!Buffer.isBuffer(chunk) && isUint8Array(chunk)) | |
chunk = new Buffer(chunk); | |
if (isArrayBuffer(chunk) && typeof Uint8Array !== 'undefined') | |
chunk = new Buffer(new Uint8Array(chunk)); | |
if (Buffer.isBuffer(chunk)) | |
encoding = 'buffer'; | |
else if (!encoding) | |
encoding = state.defaultEncoding; | |
if (typeof cb !== 'function') | |
cb = function() {}; | |
if (state.ended) | |
writeAfterEnd(this, state, cb); | |
else if (validChunk(this, state, chunk, cb)) | |
ret = writeOrBuffer(this, state, chunk, encoding, cb); | |
return ret; | |
}; | |
function decodeChunk(state, chunk, encoding) { | |
if (!state.objectMode && | |
state.decodeStrings !== false && | |
typeof chunk === 'string') { | |
chunk = new Buffer(chunk, encoding); | |
} | |
return chunk; | |
} | |
// if we're already writing something, then just put this | |
// in the queue, and wait our turn. Otherwise, call _write | |
// If we return false, then we need a drain event, so set that flag. | |
function writeOrBuffer(stream, state, chunk, encoding, cb) { | |
chunk = decodeChunk(state, chunk, encoding); | |
var len = state.objectMode ? 1 : chunk.length; | |
state.length += len; | |
var ret = state.length < state.highWaterMark; | |
state.needDrain = !ret; | |
if (state.writing) | |
state.buffer.push(new WriteReq(chunk, encoding, cb)); | |
else | |
doWrite(stream, state, len, chunk, encoding, cb); | |
return ret; | |
} | |
function doWrite(stream, state, len, chunk, encoding, cb) { | |
state.writelen = len; | |
state.writecb = cb; | |
state.writing = true; | |
state.sync = true; | |
stream._write(chunk, encoding, state.onwrite); | |
state.sync = false; | |
} | |
function onwriteError(stream, state, sync, er, cb) { | |
if (sync) | |
setImmediate(function() { | |
cb(er); | |
}); | |
else | |
cb(er); | |
stream.emit('error', er); | |
} | |
function onwriteStateUpdate(state) { | |
state.writing = false; | |
state.writecb = null; | |
state.length -= state.writelen; | |
state.writelen = 0; | |
} | |
function onwrite(stream, er) { | |
var state = stream._writableState; | |
var sync = state.sync; | |
var cb = state.writecb; | |
onwriteStateUpdate(state); | |
if (er) | |
onwriteError(stream, state, sync, er, cb); | |
else { | |
// Check if we're actually ready to finish, but don't emit yet | |
var finished = needFinish(stream, state); | |
if (!finished && !state.bufferProcessing && state.buffer.length) | |
clearBuffer(stream, state); | |
if (sync) { | |
setImmediate(function() { | |
afterWrite(stream, state, finished, cb); | |
}); | |
} else { | |
afterWrite(stream, state, finished, cb); | |
} | |
} | |
} | |
function afterWrite(stream, state, finished, cb) { | |
if (!finished) | |
onwriteDrain(stream, state); | |
cb(); | |
if (finished) | |
finishMaybe(stream, state); | |
} | |
// Must force callback to be called on nextTick, so that we don't | |
// emit 'drain' before the write() consumer gets the 'false' return | |
// value, and has a chance to attach a 'drain' listener. | |
function onwriteDrain(stream, state) { | |
if (state.length === 0 && state.needDrain) { | |
state.needDrain = false; | |
stream.emit('drain'); | |
} | |
} | |
// if there's something in the buffer waiting, then process it | |
function clearBuffer(stream, state) { | |
state.bufferProcessing = true; | |
for (var c = 0; c < state.buffer.length; c++) { | |
var entry = state.buffer[c]; | |
var chunk = entry.chunk; | |
var encoding = entry.encoding; | |
var cb = entry.callback; | |
var len = state.objectMode ? 1 : chunk.length; | |
doWrite(stream, state, len, chunk, encoding, cb); | |
// if we didn't call the onwrite immediately, then | |
// it means that we need to wait until it does. | |
// also, that means that the chunk and cb are currently | |
// being processed, so move the buffer counter past them. | |
if (state.writing) { | |
c++; | |
break; | |
} | |
} | |
state.bufferProcessing = false; | |
if (c < state.buffer.length) | |
state.buffer = state.buffer.slice(c); | |
else | |
state.buffer.length = 0; | |
} | |
Writable.prototype._write = function(chunk, encoding, cb) { | |
cb(new Error('not implemented')); | |
}; | |
Writable.prototype.end = function(chunk, encoding, cb) { | |
var state = this._writableState; | |
if (typeof chunk === 'function') { | |
cb = chunk; | |
chunk = null; | |
encoding = null; | |
} else if (typeof encoding === 'function') { | |
cb = encoding; | |
encoding = null; | |
} | |
if (typeof chunk !== 'undefined' && chunk !== null) | |
this.write(chunk, encoding); | |
// ignore unnecessary end() calls. | |
if (!state.ending && !state.finished) | |
endWritable(this, state, cb); | |
}; | |
function needFinish(stream, state) { | |
return (state.ending && | |
state.length === 0 && | |
!state.finished && | |
!state.writing); | |
} | |
function finishMaybe(stream, state) { | |
var need = needFinish(stream, state); | |
if (need) { | |
state.finished = true; | |
stream.emit('finish'); | |
} | |
return need; | |
} | |
function endWritable(stream, state, cb) { | |
state.ending = true; | |
finishMaybe(stream, state); | |
if (cb) { | |
if (state.finished) | |
setImmediate(cb); | |
else | |
stream.once('finish', cb); | |
} | |
state.ended = true; | |
} | |
},{"./index.js":24,"buffer":20,"inherits":33,"process/browser.js":25}],30:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
var Buffer = require('buffer').Buffer; | |
function assertEncoding(encoding) { | |
if (encoding && !Buffer.isEncoding(encoding)) { | |
throw new Error('Unknown encoding: ' + encoding); | |
} | |
} | |
var StringDecoder = exports.StringDecoder = function(encoding) { | |
this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); | |
assertEncoding(encoding); | |
switch (this.encoding) { | |
case 'utf8': | |
// CESU-8 represents each of Surrogate Pair by 3-bytes | |
this.surrogateSize = 3; | |
break; | |
case 'ucs2': | |
case 'utf16le': | |
// UTF-16 represents each of Surrogate Pair by 2-bytes | |
this.surrogateSize = 2; | |
this.detectIncompleteChar = utf16DetectIncompleteChar; | |
break; | |
case 'base64': | |
// Base-64 stores 3 bytes in 4 chars, and pads the remainder. | |
this.surrogateSize = 3; | |
this.detectIncompleteChar = base64DetectIncompleteChar; | |
break; | |
default: | |
this.write = passThroughWrite; | |
return; | |
} | |
this.charBuffer = new Buffer(6); | |
this.charReceived = 0; | |
this.charLength = 0; | |
}; | |
StringDecoder.prototype.write = function(buffer) { | |
var charStr = ''; | |
var offset = 0; | |
// if our last write ended with an incomplete multibyte character | |
while (this.charLength) { | |
// determine how many remaining bytes this buffer has to offer for this char | |
var i = (buffer.length >= this.charLength - this.charReceived) ? | |
this.charLength - this.charReceived : | |
buffer.length; | |
// add the new bytes to the char buffer | |
buffer.copy(this.charBuffer, this.charReceived, offset, i); | |
this.charReceived += (i - offset); | |
offset = i; | |
if (this.charReceived < this.charLength) { | |
// still not enough chars in this buffer? wait for more ... | |
return ''; | |
} | |
// get the character that was split | |
charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); | |
// lead surrogate (D800-DBFF) is also the incomplete character | |
var charCode = charStr.charCodeAt(charStr.length - 1); | |
if (charCode >= 0xD800 && charCode <= 0xDBFF) { | |
this.charLength += this.surrogateSize; | |
charStr = ''; | |
continue; | |
} | |
this.charReceived = this.charLength = 0; | |
// if there are no more bytes in this buffer, just emit our char | |
if (i == buffer.length) return charStr; | |
// otherwise cut off the characters end from the beginning of this buffer | |
buffer = buffer.slice(i, buffer.length); | |
break; | |
} | |
var lenIncomplete = this.detectIncompleteChar(buffer); | |
var end = buffer.length; | |
if (this.charLength) { | |
// buffer the incomplete character bytes we got | |
buffer.copy(this.charBuffer, 0, buffer.length - lenIncomplete, end); | |
this.charReceived = lenIncomplete; | |
end -= lenIncomplete; | |
} | |
charStr += buffer.toString(this.encoding, 0, end); | |
var end = charStr.length - 1; | |
var charCode = charStr.charCodeAt(end); | |
// lead surrogate (D800-DBFF) is also the incomplete character | |
if (charCode >= 0xD800 && charCode <= 0xDBFF) { | |
var size = this.surrogateSize; | |
this.charLength += size; | |
this.charReceived += size; | |
this.charBuffer.copy(this.charBuffer, size, 0, size); | |
this.charBuffer.write(charStr.charAt(charStr.length - 1), this.encoding); | |
return charStr.substring(0, end); | |
} | |
// or just emit the charStr | |
return charStr; | |
}; | |
StringDecoder.prototype.detectIncompleteChar = function(buffer) { | |
// determine how many bytes we have to check at the end of this buffer | |
var i = (buffer.length >= 3) ? 3 : buffer.length; | |
// Figure out if one of the last i bytes of our buffer announces an | |
// incomplete char. | |
for (; i > 0; i--) { | |
var c = buffer[buffer.length - i]; | |
// See http://en.wikipedia.org/wiki/UTF-8#Description | |
// 110XXXXX | |
if (i == 1 && c >> 5 == 0x06) { | |
this.charLength = 2; | |
break; | |
} | |
// 1110XXXX | |
if (i <= 2 && c >> 4 == 0x0E) { | |
this.charLength = 3; | |
break; | |
} | |
// 11110XXX | |
if (i <= 3 && c >> 3 == 0x1E) { | |
this.charLength = 4; | |
break; | |
} | |
} | |
return i; | |
}; | |
StringDecoder.prototype.end = function(buffer) { | |
var res = ''; | |
if (buffer && buffer.length) | |
res = this.write(buffer); | |
if (this.charReceived) { | |
var cr = this.charReceived; | |
var buf = this.charBuffer; | |
var enc = this.encoding; | |
res += buf.slice(0, cr).toString(enc); | |
} | |
return res; | |
}; | |
function passThroughWrite(buffer) { | |
return buffer.toString(this.encoding); | |
} | |
function utf16DetectIncompleteChar(buffer) { | |
var incomplete = this.charReceived = buffer.length % 2; | |
this.charLength = incomplete ? 2 : 0; | |
return incomplete; | |
} | |
function base64DetectIncompleteChar(buffer) { | |
var incomplete = this.charReceived = buffer.length % 3; | |
this.charLength = incomplete ? 3 : 0; | |
return incomplete; | |
} | |
},{"buffer":20}],31:[function(require,module,exports){ | |
module.exports = function isBuffer(arg) { | |
return arg && typeof arg === 'object' | |
&& typeof arg.copy === 'function' | |
&& typeof arg.fill === 'function' | |
&& typeof arg.readUInt8 === 'function'; | |
} | |
},{}],32:[function(require,module,exports){ | |
(function (process,global){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
var formatRegExp = /%[sdj%]/g; | |
exports.format = function(f) { | |
if (!isString(f)) { | |
var objects = []; | |
for (var i = 0; i < arguments.length; i++) { | |
objects.push(inspect(arguments[i])); | |
} | |
return objects.join(' '); | |
} | |
var i = 1; | |
var args = arguments; | |
var len = args.length; | |
var str = String(f).replace(formatRegExp, function(x) { | |
if (x === '%%') return '%'; | |
if (i >= len) return x; | |
switch (x) { | |
case '%s': return String(args[i++]); | |
case '%d': return Number(args[i++]); | |
case '%j': | |
try { | |
return JSON.stringify(args[i++]); | |
} catch (_) { | |
return '[Circular]'; | |
} | |
default: | |
return x; | |
} | |
}); | |
for (var x = args[i]; i < len; x = args[++i]) { | |
if (isNull(x) || !isObject(x)) { | |
str += ' ' + x; | |
} else { | |
str += ' ' + inspect(x); | |
} | |
} | |
return str; | |
}; | |
// Mark that a method should not be used. | |
// Returns a modified function which warns once by default. | |
// If --no-deprecation is set, then it is a no-op. | |
exports.deprecate = function(fn, msg) { | |
// Allow for deprecating things in the process of starting up. | |
if (isUndefined(global.process)) { | |
return function() { | |
return exports.deprecate(fn, msg).apply(this, arguments); | |
}; | |
} | |
if (process.noDeprecation === true) { | |
return fn; | |
} | |
var warned = false; | |
function deprecated() { | |
if (!warned) { | |
if (process.throwDeprecation) { | |
throw new Error(msg); | |
} else if (process.traceDeprecation) { | |
console.trace(msg); | |
} else { | |
console.error(msg); | |
} | |
warned = true; | |
} | |
return fn.apply(this, arguments); | |
} | |
return deprecated; | |
}; | |
var debugs = {}; | |
var debugEnviron; | |
exports.debuglog = function(set) { | |
if (isUndefined(debugEnviron)) | |
debugEnviron = process.env.NODE_DEBUG || ''; | |
set = set.toUpperCase(); | |
if (!debugs[set]) { | |
if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) { | |
var pid = process.pid; | |
debugs[set] = function() { | |
var msg = exports.format.apply(exports, arguments); | |
console.error('%s %d: %s', set, pid, msg); | |
}; | |
} else { | |
debugs[set] = function() {}; | |
} | |
} | |
return debugs[set]; | |
}; | |
/** | |
* Echos the value of a value. Trys to print the value out | |
* in the best way possible given the different types. | |
* | |
* @param {Object} obj The object to print out. | |
* @param {Object} opts Optional options object that alters the output. | |
*/ | |
/* legacy: obj, showHidden, depth, colors*/ | |
function inspect(obj, opts) { | |
// default options | |
var ctx = { | |
seen: [], | |
stylize: stylizeNoColor | |
}; | |
// legacy... | |
if (arguments.length >= 3) ctx.depth = arguments[2]; | |
if (arguments.length >= 4) ctx.colors = arguments[3]; | |
if (isBoolean(opts)) { | |
// legacy... | |
ctx.showHidden = opts; | |
} else if (opts) { | |
// got an "options" object | |
exports._extend(ctx, opts); | |
} | |
// set default options | |
if (isUndefined(ctx.showHidden)) ctx.showHidden = false; | |
if (isUndefined(ctx.depth)) ctx.depth = 2; | |
if (isUndefined(ctx.colors)) ctx.colors = false; | |
if (isUndefined(ctx.customInspect)) ctx.customInspect = true; | |
if (ctx.colors) ctx.stylize = stylizeWithColor; | |
return formatValue(ctx, obj, ctx.depth); | |
} | |
exports.inspect = inspect; | |
// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics | |
inspect.colors = { | |
'bold' : [1, 22], | |
'italic' : [3, 23], | |
'underline' : [4, 24], | |
'inverse' : [7, 27], | |
'white' : [37, 39], | |
'grey' : [90, 39], | |
'black' : [30, 39], | |
'blue' : [34, 39], | |
'cyan' : [36, 39], | |
'green' : [32, 39], | |
'magenta' : [35, 39], | |
'red' : [31, 39], | |
'yellow' : [33, 39] | |
}; | |
// Don't use 'blue' not visible on cmd.exe | |
inspect.styles = { | |
'special': 'cyan', | |
'number': 'yellow', | |
'boolean': 'yellow', | |
'undefined': 'grey', | |
'null': 'bold', | |
'string': 'green', | |
'date': 'magenta', | |
// "name": intentionally not styling | |
'regexp': 'red' | |
}; | |
function stylizeWithColor(str, styleType) { | |
var style = inspect.styles[styleType]; | |
if (style) { | |
return '\u001b[' + inspect.colors[style][0] + 'm' + str + | |
'\u001b[' + inspect.colors[style][1] + 'm'; | |
} else { | |
return str; | |
} | |
} | |
function stylizeNoColor(str, styleType) { | |
return str; | |
} | |
function arrayToHash(array) { | |
var hash = {}; | |
array.forEach(function(val, idx) { | |
hash[val] = true; | |
}); | |
return hash; | |
} | |
function formatValue(ctx, value, recurseTimes) { | |
// Provide a hook for user-specified inspect functions. | |
// Check that value is an object with an inspect function on it | |
if (ctx.customInspect && | |
value && | |
isFunction(value.inspect) && | |
// Filter out the util module, it's inspect function is special | |
value.inspect !== exports.inspect && | |
// Also filter out any prototype objects using the circular check. | |
!(value.constructor && value.constructor.prototype === value)) { | |
var ret = value.inspect(recurseTimes, ctx); | |
if (!isString(ret)) { | |
ret = formatValue(ctx, ret, recurseTimes); | |
} | |
return ret; | |
} | |
// Primitive types cannot have properties | |
var primitive = formatPrimitive(ctx, value); | |
if (primitive) { | |
return primitive; | |
} | |
// Look up the keys of the object. | |
var keys = Object.keys(value); | |
var visibleKeys = arrayToHash(keys); | |
if (ctx.showHidden) { | |
keys = Object.getOwnPropertyNames(value); | |
} | |
// IE doesn't make error fields non-enumerable | |
// http://msdn.microsoft.com/en-us/library/ie/dww52sbt(v=vs.94).aspx | |
if (isError(value) | |
&& (keys.indexOf('message') >= 0 || keys.indexOf('description') >= 0)) { | |
return formatError(value); | |
} | |
// Some type of object without properties can be shortcutted. | |
if (keys.length === 0) { | |
if (isFunction(value)) { | |
var name = value.name ? ': ' + value.name : ''; | |
return ctx.stylize('[Function' + name + ']', 'special'); | |
} | |
if (isRegExp(value)) { | |
return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); | |
} | |
if (isDate(value)) { | |
return ctx.stylize(Date.prototype.toString.call(value), 'date'); | |
} | |
if (isError(value)) { | |
return formatError(value); | |
} | |
} | |
var base = '', array = false, braces = ['{', '}']; | |
// Make Array say that they are Array | |
if (isArray(value)) { | |
array = true; | |
braces = ['[', ']']; | |
} | |
// Make functions say that they are functions | |
if (isFunction(value)) { | |
var n = value.name ? ': ' + value.name : ''; | |
base = ' [Function' + n + ']'; | |
} | |
// Make RegExps say that they are RegExps | |
if (isRegExp(value)) { | |
base = ' ' + RegExp.prototype.toString.call(value); | |
} | |
// Make dates with properties first say the date | |
if (isDate(value)) { | |
base = ' ' + Date.prototype.toUTCString.call(value); | |
} | |
// Make error with message first say the error | |
if (isError(value)) { | |
base = ' ' + formatError(value); | |
} | |
if (keys.length === 0 && (!array || value.length == 0)) { | |
return braces[0] + base + braces[1]; | |
} | |
if (recurseTimes < 0) { | |
if (isRegExp(value)) { | |
return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); | |
} else { | |
return ctx.stylize('[Object]', 'special'); | |
} | |
} | |
ctx.seen.push(value); | |
var output; | |
if (array) { | |
output = formatArray(ctx, value, recurseTimes, visibleKeys, keys); | |
} else { | |
output = keys.map(function(key) { | |
return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array); | |
}); | |
} | |
ctx.seen.pop(); | |
return reduceToSingleString(output, base, braces); | |
} | |
function formatPrimitive(ctx, value) { | |
if (isUndefined(value)) | |
return ctx.stylize('undefined', 'undefined'); | |
if (isString(value)) { | |
var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '') | |
.replace(/'/g, "\\'") | |
.replace(/\\"/g, '"') + '\''; | |
return ctx.stylize(simple, 'string'); | |
} | |
if (isNumber(value)) | |
return ctx.stylize('' + value, 'number'); | |
if (isBoolean(value)) | |
return ctx.stylize('' + value, 'boolean'); | |
// For some reason typeof null is "object", so special case here. | |
if (isNull(value)) | |
return ctx.stylize('null', 'null'); | |
} | |
function formatError(value) { | |
return '[' + Error.prototype.toString.call(value) + ']'; | |
} | |
function formatArray(ctx, value, recurseTimes, visibleKeys, keys) { | |
var output = []; | |
for (var i = 0, l = value.length; i < l; ++i) { | |
if (hasOwnProperty(value, String(i))) { | |
output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, | |
String(i), true)); | |
} else { | |
output.push(''); | |
} | |
} | |
keys.forEach(function(key) { | |
if (!key.match(/^\d+$/)) { | |
output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, | |
key, true)); | |
} | |
}); | |
return output; | |
} | |
function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) { | |
var name, str, desc; | |
desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] }; | |
if (desc.get) { | |
if (desc.set) { | |
str = ctx.stylize('[Getter/Setter]', 'special'); | |
} else { | |
str = ctx.stylize('[Getter]', 'special'); | |
} | |
} else { | |
if (desc.set) { | |
str = ctx.stylize('[Setter]', 'special'); | |
} | |
} | |
if (!hasOwnProperty(visibleKeys, key)) { | |
name = '[' + key + ']'; | |
} | |
if (!str) { | |
if (ctx.seen.indexOf(desc.value) < 0) { | |
if (isNull(recurseTimes)) { | |
str = formatValue(ctx, desc.value, null); | |
} else { | |
str = formatValue(ctx, desc.value, recurseTimes - 1); | |
} | |
if (str.indexOf('\n') > -1) { | |
if (array) { | |
str = str.split('\n').map(function(line) { | |
return ' ' + line; | |
}).join('\n').substr(2); | |
} else { | |
str = '\n' + str.split('\n').map(function(line) { | |
return ' ' + line; | |
}).join('\n'); | |
} | |
} | |
} else { | |
str = ctx.stylize('[Circular]', 'special'); | |
} | |
} | |
if (isUndefined(name)) { | |
if (array && key.match(/^\d+$/)) { | |
return str; | |
} | |
name = JSON.stringify('' + key); | |
if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) { | |
name = name.substr(1, name.length - 2); | |
name = ctx.stylize(name, 'name'); | |
} else { | |
name = name.replace(/'/g, "\\'") | |
.replace(/\\"/g, '"') | |
.replace(/(^"|"$)/g, "'"); | |
name = ctx.stylize(name, 'string'); | |
} | |
} | |
return name + ': ' + str; | |
} | |
function reduceToSingleString(output, base, braces) { | |
var numLinesEst = 0; | |
var length = output.reduce(function(prev, cur) { | |
numLinesEst++; | |
if (cur.indexOf('\n') >= 0) numLinesEst++; | |
return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1; | |
}, 0); | |
if (length > 60) { | |
return braces[0] + | |
(base === '' ? '' : base + '\n ') + | |
' ' + | |
output.join(',\n ') + | |
' ' + | |
braces[1]; | |
} | |
return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1]; | |
} | |
// NOTE: These type checking functions intentionally don't use `instanceof` | |
// because it is fragile and can be easily faked with `Object.create()`. | |
function isArray(ar) { | |
return Array.isArray(ar); | |
} | |
exports.isArray = isArray; | |
function isBoolean(arg) { | |
return typeof arg === 'boolean'; | |
} | |
exports.isBoolean = isBoolean; | |
function isNull(arg) { | |
return arg === null; | |
} | |
exports.isNull = isNull; | |
function isNullOrUndefined(arg) { | |
return arg == null; | |
} | |
exports.isNullOrUndefined = isNullOrUndefined; | |
function isNumber(arg) { | |
return typeof arg === 'number'; | |
} | |
exports.isNumber = isNumber; | |
function isString(arg) { | |
return typeof arg === 'string'; | |
} | |
exports.isString = isString; | |
function isSymbol(arg) { | |
return typeof arg === 'symbol'; | |
} | |
exports.isSymbol = isSymbol; | |
function isUndefined(arg) { | |
return arg === void 0; | |
} | |
exports.isUndefined = isUndefined; | |
function isRegExp(re) { | |
return isObject(re) && objectToString(re) === '[object RegExp]'; | |
} | |
exports.isRegExp = isRegExp; | |
function isObject(arg) { | |
return typeof arg === 'object' && arg !== null; | |
} | |
exports.isObject = isObject; | |
function isDate(d) { | |
return isObject(d) && objectToString(d) === '[object Date]'; | |
} | |
exports.isDate = isDate; | |
function isError(e) { | |
return isObject(e) && | |
(objectToString(e) === '[object Error]' || e instanceof Error); | |
} | |
exports.isError = isError; | |
function isFunction(arg) { | |
return typeof arg === 'function'; | |
} | |
exports.isFunction = isFunction; | |
function isPrimitive(arg) { | |
return arg === null || | |
typeof arg === 'boolean' || | |
typeof arg === 'number' || | |
typeof arg === 'string' || | |
typeof arg === 'symbol' || // ES6 symbol | |
typeof arg === 'undefined'; | |
} | |
exports.isPrimitive = isPrimitive; | |
exports.isBuffer = require('./support/isBuffer'); | |
function objectToString(o) { | |
return Object.prototype.toString.call(o); | |
} | |
function pad(n) { | |
return n < 10 ? '0' + n.toString(10) : n.toString(10); | |
} | |
var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', | |
'Oct', 'Nov', 'Dec']; | |
// 26 Feb 16:19:34 | |
function timestamp() { | |
var d = new Date(); | |
var time = [pad(d.getHours()), | |
pad(d.getMinutes()), | |
pad(d.getSeconds())].join(':'); | |
return [d.getDate(), months[d.getMonth()], time].join(' '); | |
} | |
// log is just a thin wrapper to console.log that prepends a timestamp | |
exports.log = function() { | |
console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments)); | |
}; | |
/** | |
* Inherit the prototype methods from one constructor into another. | |
* | |
* The Function.prototype.inherits from lang.js rewritten as a standalone | |
* function (not on Function.prototype). NOTE: If this file is to be loaded | |
* during bootstrapping this function needs to be rewritten using some native | |
* functions as prototype setup using normal JavaScript does not work as | |
* expected during bootstrapping (see mirror.js in r114903). | |
* | |
* @param {function} ctor Constructor function which needs to inherit the | |
* prototype. | |
* @param {function} superCtor Constructor function to inherit prototype from. | |
*/ | |
exports.inherits = require('inherits'); | |
exports._extend = function(origin, add) { | |
// Don't do anything if add isn't an object | |
if (!add || !isObject(add)) return origin; | |
var keys = Object.keys(add); | |
var i = keys.length; | |
while (i--) { | |
origin[keys[i]] = add[keys[i]]; | |
} | |
return origin; | |
}; | |
function hasOwnProperty(obj, prop) { | |
return Object.prototype.hasOwnProperty.call(obj, prop); | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"./support/isBuffer":31,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"inherits":33}],33:[function(require,module,exports){ | |
if (typeof Object.create === 'function') { | |
// implementation from standard node.js 'util' module | |
module.exports = function inherits(ctor, superCtor) { | |
ctor.super_ = superCtor | |
ctor.prototype = Object.create(superCtor.prototype, { | |
constructor: { | |
value: ctor, | |
enumerable: false, | |
writable: true, | |
configurable: true | |
} | |
}); | |
}; | |
} else { | |
// old school shim for old browsers | |
module.exports = function inherits(ctor, superCtor) { | |
ctor.super_ = superCtor | |
var TempCtor = function () {} | |
TempCtor.prototype = superCtor.prototype | |
ctor.prototype = new TempCtor() | |
ctor.prototype.constructor = ctor | |
} | |
} | |
},{}],34:[function(require,module,exports){ | |
function addOperation (type, key, value, options) { | |
var operation = { | |
type: type, | |
key: key, | |
value: value, | |
options: options | |
} | |
if (options && options.prefix) { | |
operation.prefix = options.prefix | |
delete options.prefix | |
} | |
this._operations.push(operation) | |
return this | |
} | |
function Batch(sdb) { | |
this._operations = [] | |
this._sdb = sdb | |
this.put = addOperation.bind(this, 'put') | |
this.del = addOperation.bind(this, 'del') | |
} | |
var B = Batch.prototype | |
B.clear = function () { | |
this._operations = [] | |
} | |
B.write = function (cb) { | |
this._sdb.batch(this._operations, cb) | |
} | |
module.exports = Batch | |
},{}],35:[function(require,module,exports){ | |
(function (process){ | |
var EventEmitter = require('events').EventEmitter | |
var next = process.nextTick | |
var SubDb = require('./sub') | |
var Batch = require('./batch') | |
var fixRange = require('level-fix-range') | |
var Hooks = require('level-hooks') | |
module.exports = function (_db, options) { | |
function DB () {} | |
DB.prototype = _db | |
var db = new DB() | |
if (db.sublevel) return db | |
options = options || {} | |
//use \xff (255) as the seperator, | |
//so that sections of the database will sort after the regular keys | |
var sep = options.sep = options.sep || '\xff' | |
db._options = options | |
Hooks(db) | |
db.sublevels = {} | |
db.sublevel = function (prefix, options) { | |
if(db.sublevels[prefix]) | |
return db.sublevels[prefix] | |
return new SubDb(db, prefix, options || this._options) | |
} | |
db.methods = {} | |
db.prefix = function (key) { | |
return '' + (key || '') | |
} | |
db.pre = function (range, hook) { | |
if(!hook) | |
hook = range, range = { | |
max : sep | |
} | |
return db.hooks.pre(range, hook) | |
} | |
db.post = function (range, hook) { | |
if(!hook) | |
hook = range, range = { | |
max : sep | |
} | |
return db.hooks.post(range, hook) | |
} | |
function safeRange(fun) { | |
return function (opts) { | |
opts = opts || {} | |
opts = fixRange(opts) | |
if(opts.reverse) opts.start = opts.start || sep | |
else opts.end = opts.end || sep | |
return fun.call(db, opts) | |
} | |
} | |
db.readStream = | |
db.createReadStream = safeRange(db.createReadStream) | |
db.keyStream = | |
db.createKeyStream = safeRange(db.createKeyStream) | |
db.valuesStream = | |
db.createValueStream = safeRange(db.createValueStream) | |
var batch = db.batch | |
db.batch = function (changes, opts, cb) { | |
if(!Array.isArray(changes)) | |
return new Batch(db) | |
changes.forEach(function (e) { | |
if(e.prefix) { | |
if('function' === typeof e.prefix.prefix) | |
e.key = e.prefix.prefix(e.key) | |
else if('string' === typeof e.prefix) | |
e.key = e.prefix + e.key | |
} | |
}) | |
batch.call(db, changes, opts, cb) | |
} | |
return db | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"./batch":34,"./sub":46,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"events":18,"level-fix-range":36,"level-hooks":38}],36:[function(require,module,exports){ | |
var clone = require('clone') | |
module.exports = | |
function fixRange(opts) { | |
opts = clone(opts) | |
var reverse = opts.reverse | |
var end = opts.max || opts.end | |
var start = opts.min || opts.start | |
var range = [start, end] | |
if(start != null && end != null) | |
range.sort() | |
if(reverse) | |
range = range.reverse() | |
opts.start = range[0] | |
opts.end = range[1] | |
delete opts.min | |
delete opts.max | |
return opts | |
} | |
},{"clone":37}],37:[function(require,module,exports){ | |
(function (Buffer){ | |
'use strict'; | |
function objectToString(o) { | |
return Object.prototype.toString.call(o); | |
} | |
// shim for Node's 'util' package | |
// DO NOT REMOVE THIS! It is required for compatibility with EnderJS (http://enderjs.com/). | |
var util = { | |
isArray: function (ar) { | |
return Array.isArray(ar) || (typeof ar === 'object' && objectToString(ar) === '[object Array]'); | |
}, | |
isDate: function (d) { | |
return typeof d === 'object' && objectToString(d) === '[object Date]'; | |
}, | |
isRegExp: function (re) { | |
return typeof re === 'object' && objectToString(re) === '[object RegExp]'; | |
}, | |
getRegExpFlags: function (re) { | |
var flags = ''; | |
re.global && (flags += 'g'); | |
re.ignoreCase && (flags += 'i'); | |
re.multiline && (flags += 'm'); | |
return flags; | |
} | |
}; | |
if (typeof module === 'object') | |
module.exports = clone; | |
/** | |
* Clones (copies) an Object using deep copying. | |
* | |
* This function supports circular references by default, but if you are certain | |
* there are no circular references in your object, you can save some CPU time | |
* by calling clone(obj, false). | |
* | |
* Caution: if `circular` is false and `parent` contains circular references, | |
* your program may enter an infinite loop and crash. | |
* | |
* @param `parent` - the object to be cloned | |
* @param `circular` - set to true if the object to be cloned may contain | |
* circular references. (optional - true by default) | |
* @param `depth` - set to a number if the object is only to be cloned to | |
* a particular depth. (optional - defaults to Infinity) | |
* @param `prototype` - sets the prototype to be used when cloning an object. | |
* (optional - defaults to parent prototype). | |
*/ | |
function clone(parent, circular, depth, prototype) { | |
// maintain two arrays for circular references, where corresponding parents | |
// and children have the same index | |
var allParents = []; | |
var allChildren = []; | |
var useBuffer = typeof Buffer != 'undefined'; | |
if (typeof circular == 'undefined') | |
circular = true; | |
if (typeof depth == 'undefined') | |
depth = Infinity; | |
// recurse this function so we don't reset allParents and allChildren | |
function _clone(parent, depth) { | |
// cloning null always returns null | |
if (parent === null) | |
return null; | |
if (depth == 0) | |
return parent; | |
var child; | |
if (typeof parent != 'object') { | |
return parent; | |
} | |
if (util.isArray(parent)) { | |
child = []; | |
} else if (util.isRegExp(parent)) { | |
child = new RegExp(parent.source, util.getRegExpFlags(parent)); | |
if (parent.lastIndex) child.lastIndex = parent.lastIndex; | |
} else if (util.isDate(parent)) { | |
child = new Date(parent.getTime()); | |
} else if (useBuffer && Buffer.isBuffer(parent)) { | |
child = new Buffer(parent.length); | |
parent.copy(child); | |
return child; | |
} else { | |
if (typeof prototype == 'undefined') child = Object.create(Object.getPrototypeOf(parent)); | |
else child = Object.create(prototype); | |
} | |
if (circular) { | |
var index = allParents.indexOf(parent); | |
if (index != -1) { | |
return allChildren[index]; | |
} | |
allParents.push(parent); | |
allChildren.push(child); | |
} | |
for (var i in parent) { | |
child[i] = _clone(parent[i], depth - 1); | |
} | |
return child; | |
} | |
return _clone(parent, depth); | |
} | |
/** | |
* Simple flat clone using prototype, accepts only objects, usefull for property | |
* override on FLAT configuration object (no nested props). | |
* | |
* USE WITH CAUTION! This may not behave as you wish if you do not know how this | |
* works. | |
*/ | |
clone.clonePrototype = function(parent) { | |
if (parent === null) | |
return null; | |
var c = function () {}; | |
c.prototype = parent; | |
return new c(); | |
}; | |
}).call(this,require("buffer").Buffer) | |
},{"buffer":20}],38:[function(require,module,exports){ | |
var ranges = require('string-range') | |
module.exports = function (db) { | |
if(db.hooks) { | |
return | |
} | |
var posthooks = [] | |
var prehooks = [] | |
function getPrefix (p) { | |
return p && ( | |
'string' === typeof p ? p | |
: 'string' === typeof p.prefix ? p.prefix | |
: 'function' === typeof p.prefix ? p.prefix() | |
: '' | |
) | |
} | |
function getKeyEncoding (db) { | |
if(db && db._getKeyEncoding) | |
return db._getKeyEncoding(db) | |
} | |
function getValueEncoding (db) { | |
if(db && db._getValueEncoding) | |
return db._getValueEncoding(db) | |
} | |
function remover (array, item) { | |
return function () { | |
var i = array.indexOf(item) | |
if(!~i) return false | |
array.splice(i, 1) | |
return true | |
} | |
} | |
db.hooks = { | |
post: function (prefix, hook) { | |
if(!hook) hook = prefix, prefix = '' | |
var h = {test: ranges.checker(prefix), hook: hook} | |
posthooks.push(h) | |
return remover(posthooks, h) | |
}, | |
pre: function (prefix, hook) { | |
if(!hook) hook = prefix, prefix = '' | |
var h = { | |
test: ranges.checker(prefix), | |
hook: hook, | |
safe: false !== prefix.safe | |
} | |
prehooks.push(h) | |
return remover(prehooks, h) | |
}, | |
posthooks: posthooks, | |
prehooks: prehooks | |
} | |
//POST HOOKS | |
function each (e) { | |
if(e && e.type) { | |
posthooks.forEach(function (h) { | |
if(h.test(e.key)) h.hook(e) | |
}) | |
} | |
} | |
db.on('put', function (key, val) { | |
each({type: 'put', key: key, value: val}) | |
}) | |
db.on('del', function (key, val) { | |
each({type: 'del', key: key, value: val}) | |
}) | |
db.on('batch', function onBatch (ary) { | |
ary.forEach(each) | |
}) | |
//PRE HOOKS | |
var put = db.put | |
var del = db.del | |
var batch = db.batch | |
function callHooks (isBatch, b, opts, cb) { | |
try { | |
b.forEach(function hook(e, i) { | |
prehooks.forEach(function (h) { | |
if(h.test(String(e.key))) { | |
//optimize this? | |
//maybe faster to not create a new object each time? | |
//have one object and expose scope to it? | |
var context = { | |
add: function (ch, db) { | |
if(typeof ch === 'undefined') { | |
return this | |
} | |
if(ch === false) | |
return delete b[i] | |
var prefix = ( | |
getPrefix(ch.prefix) || | |
getPrefix(db) || | |
h.prefix || '' | |
) | |
//don't leave a circular json object there incase using multilevel. | |
if(prefix) ch.prefix = prefix | |
ch.key = prefix + ch.key | |
if(h.safe && h.test(String(ch.key))) { | |
//this usually means a stack overflow. | |
throw new Error('prehook cannot insert into own range') | |
} | |
var ke = ch.keyEncoding || getKeyEncoding(ch.prefix) | |
var ve = ch.valueEncoding || getValueEncoding(ch.prefix) | |
if(ke) ch.keyEncoding = ke | |
if(ve) ch.valueEncoding = ve | |
b.push(ch) | |
hook(ch, b.length - 1) | |
return this | |
}, | |
put: function (ch, db) { | |
if('object' === typeof ch) ch.type = 'put' | |
return this.add(ch, db) | |
}, | |
del: function (ch, db) { | |
if('object' === typeof ch) ch.type = 'del' | |
return this.add(ch, db) | |
}, | |
veto: function () { | |
return this.add(false) | |
} | |
} | |
h.hook.call(context, e, context.add, b) | |
} | |
}) | |
}) | |
} catch (err) { | |
return (cb || opts)(err) | |
} | |
b = b.filter(function (e) { | |
return e && e.type //filter out empty items | |
}) | |
if(b.length == 1 && !isBatch) { | |
var change = b[0] | |
return change.type == 'put' | |
? put.call(db, change.key, change.value, opts, cb) | |
: del.call(db, change.key, opts, cb) | |
} | |
return batch.call(db, b, opts, cb) | |
} | |
db.put = function (key, value, opts, cb ) { | |
var batch = [{key: key, value: value, type: 'put'}] | |
return callHooks(false, batch, opts, cb) | |
} | |
db.del = function (key, opts, cb) { | |
var batch = [{key: key, type: 'del'}] | |
return callHooks(false, batch, opts, cb) | |
} | |
db.batch = function (batch, opts, cb) { | |
return callHooks(true, batch, opts, cb) | |
} | |
} | |
},{"string-range":39}],39:[function(require,module,exports){ | |
//force to a valid range | |
var range = exports.range = function (obj) { | |
return null == obj ? {} : 'string' === typeof range ? { | |
min: range, max: range + '\xff' | |
} : obj | |
} | |
//turn into a sub range. | |
var prefix = exports.prefix = function (range, within, term) { | |
range = exports.range(range) | |
var _range = {} | |
term = term || '\xff' | |
if(range instanceof RegExp || 'function' == typeof range) { | |
_range.min = within | |
_range.max = within + term, | |
_range.inner = function (k) { | |
var j = k.substring(within.length) | |
if(range.test) | |
return range.test(j) | |
return range(j) | |
} | |
} | |
else if('object' === typeof range) { | |
_range.min = within + (range.min || range.start || '') | |
_range.max = within + (range.max || range.end || (term || '~')) | |
_range.reverse = !!range.reverse | |
} | |
return _range | |
} | |
//return a function that checks a range | |
var checker = exports.checker = function (range) { | |
if(!range) range = {} | |
if ('string' === typeof range) | |
return function (key) { | |
return key.indexOf(range) == 0 | |
} | |
else if(range instanceof RegExp) | |
return function (key) { | |
return range.test(key) | |
} | |
else if('object' === typeof range) | |
return function (key) { | |
var min = range.min || range.start | |
var max = range.max || range.end | |
// fixes keys passed as ints from sublevels | |
key = String(key) | |
return ( | |
!min || key >= min | |
) && ( | |
!max || key <= max | |
) && ( | |
!range.inner || ( | |
range.inner.test | |
? range.inner.test(key) | |
: range.inner(key) | |
) | |
) | |
} | |
else if('function' === typeof range) | |
return range | |
} | |
//check if a key is within a range. | |
var satifies = exports.satisfies = function (key, range) { | |
return checker(range)(key) | |
} | |
},{}],40:[function(require,module,exports){ | |
module.exports = hasKeys | |
function hasKeys(source) { | |
return source !== null && | |
(typeof source === "object" || | |
typeof source === "function") | |
} | |
},{}],41:[function(require,module,exports){ | |
var Keys = require("object-keys") | |
var hasKeys = require("./has-keys") | |
module.exports = extend | |
function extend() { | |
var target = {} | |
for (var i = 0; i < arguments.length; i++) { | |
var source = arguments[i] | |
if (!hasKeys(source)) { | |
continue | |
} | |
var keys = Keys(source) | |
for (var j = 0; j < keys.length; j++) { | |
var name = keys[j] | |
target[name] = source[name] | |
} | |
} | |
return target | |
} | |
},{"./has-keys":40,"object-keys":42}],42:[function(require,module,exports){ | |
module.exports = Object.keys || require('./shim'); | |
},{"./shim":45}],43:[function(require,module,exports){ | |
var hasOwn = Object.prototype.hasOwnProperty; | |
var toString = Object.prototype.toString; | |
module.exports = function forEach (obj, fn, ctx) { | |
if (toString.call(fn) !== '[object Function]') { | |
throw new TypeError('iterator must be a function'); | |
} | |
var l = obj.length; | |
if (l === +l) { | |
for (var i = 0; i < l; i++) { | |
fn.call(ctx, obj[i], i, obj); | |
} | |
} else { | |
for (var k in obj) { | |
if (hasOwn.call(obj, k)) { | |
fn.call(ctx, obj[k], k, obj); | |
} | |
} | |
} | |
}; | |
},{}],44:[function(require,module,exports){ | |
/**! | |
* is | |
* the definitive JavaScript type testing library | |
* | |
* @copyright 2013 Enrico Marino | |
* @license MIT | |
*/ | |
var objProto = Object.prototype; | |
var owns = objProto.hasOwnProperty; | |
var toString = objProto.toString; | |
var isActualNaN = function (value) { | |
return value !== value; | |
}; | |
var NON_HOST_TYPES = { | |
"boolean": 1, | |
"number": 1, | |
"string": 1, | |
"undefined": 1 | |
}; | |
/** | |
* Expose `is` | |
*/ | |
var is = module.exports = {}; | |
/** | |
* Test general. | |
*/ | |
/** | |
* is.type | |
* Test if `value` is a type of `type`. | |
* | |
* @param {Mixed} value value to test | |
* @param {String} type type | |
* @return {Boolean} true if `value` is a type of `type`, false otherwise | |
* @api public | |
*/ | |
is.a = | |
is.type = function (value, type) { | |
return typeof value === type; | |
}; | |
/** | |
* is.defined | |
* Test if `value` is defined. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if 'value' is defined, false otherwise | |
* @api public | |
*/ | |
is.defined = function (value) { | |
return value !== undefined; | |
}; | |
/** | |
* is.empty | |
* Test if `value` is empty. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is empty, false otherwise | |
* @api public | |
*/ | |
is.empty = function (value) { | |
var type = toString.call(value); | |
var key; | |
if ('[object Array]' === type || '[object Arguments]' === type) { | |
return value.length === 0; | |
} | |
if ('[object Object]' === type) { | |
for (key in value) if (owns.call(value, key)) return false; | |
return true; | |
} | |
if ('[object String]' === type) { | |
return '' === value; | |
} | |
return false; | |
}; | |
/** | |
* is.equal | |
* Test if `value` is equal to `other`. | |
* | |
* @param {Mixed} value value to test | |
* @param {Mixed} other value to compare with | |
* @return {Boolean} true if `value` is equal to `other`, false otherwise | |
*/ | |
is.equal = function (value, other) { | |
var type = toString.call(value) | |
var key; | |
if (type !== toString.call(other)) { | |
return false; | |
} | |
if ('[object Object]' === type) { | |
for (key in value) { | |
if (!is.equal(value[key], other[key])) { | |
return false; | |
} | |
} | |
return true; | |
} | |
if ('[object Array]' === type) { | |
key = value.length; | |
if (key !== other.length) { | |
return false; | |
} | |
while (--key) { | |
if (!is.equal(value[key], other[key])) { | |
return false; | |
} | |
} | |
return true; | |
} | |
if ('[object Function]' === type) { | |
return value.prototype === other.prototype; | |
} | |
if ('[object Date]' === type) { | |
return value.getTime() === other.getTime(); | |
} | |
return value === other; | |
}; | |
/** | |
* is.hosted | |
* Test if `value` is hosted by `host`. | |
* | |
* @param {Mixed} value to test | |
* @param {Mixed} host host to test with | |
* @return {Boolean} true if `value` is hosted by `host`, false otherwise | |
* @api public | |
*/ | |
is.hosted = function (value, host) { | |
var type = typeof host[value]; | |
return type === 'object' ? !!host[value] : !NON_HOST_TYPES[type]; | |
}; | |
/** | |
* is.instance | |
* Test if `value` is an instance of `constructor`. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an instance of `constructor` | |
* @api public | |
*/ | |
is.instance = is['instanceof'] = function (value, constructor) { | |
return value instanceof constructor; | |
}; | |
/** | |
* is.null | |
* Test if `value` is null. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is null, false otherwise | |
* @api public | |
*/ | |
is['null'] = function (value) { | |
return value === null; | |
}; | |
/** | |
* is.undefined | |
* Test if `value` is undefined. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is undefined, false otherwise | |
* @api public | |
*/ | |
is.undefined = function (value) { | |
return value === undefined; | |
}; | |
/** | |
* Test arguments. | |
*/ | |
/** | |
* is.arguments | |
* Test if `value` is an arguments object. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an arguments object, false otherwise | |
* @api public | |
*/ | |
is.arguments = function (value) { | |
var isStandardArguments = '[object Arguments]' === toString.call(value); | |
var isOldArguments = !is.array(value) && is.arraylike(value) && is.object(value) && is.fn(value.callee); | |
return isStandardArguments || isOldArguments; | |
}; | |
/** | |
* Test array. | |
*/ | |
/** | |
* is.array | |
* Test if 'value' is an array. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an array, false otherwise | |
* @api public | |
*/ | |
is.array = function (value) { | |
return '[object Array]' === toString.call(value); | |
}; | |
/** | |
* is.arguments.empty | |
* Test if `value` is an empty arguments object. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an empty arguments object, false otherwise | |
* @api public | |
*/ | |
is.arguments.empty = function (value) { | |
return is.arguments(value) && value.length === 0; | |
}; | |
/** | |
* is.array.empty | |
* Test if `value` is an empty array. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an empty array, false otherwise | |
* @api public | |
*/ | |
is.array.empty = function (value) { | |
return is.array(value) && value.length === 0; | |
}; | |
/** | |
* is.arraylike | |
* Test if `value` is an arraylike object. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an arguments object, false otherwise | |
* @api public | |
*/ | |
is.arraylike = function (value) { | |
return !!value && !is.boolean(value) | |
&& owns.call(value, 'length') | |
&& isFinite(value.length) | |
&& is.number(value.length) | |
&& value.length >= 0; | |
}; | |
/** | |
* Test boolean. | |
*/ | |
/** | |
* is.boolean | |
* Test if `value` is a boolean. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a boolean, false otherwise | |
* @api public | |
*/ | |
is.boolean = function (value) { | |
return '[object Boolean]' === toString.call(value); | |
}; | |
/** | |
* is.false | |
* Test if `value` is false. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is false, false otherwise | |
* @api public | |
*/ | |
is['false'] = function (value) { | |
return is.boolean(value) && (value === false || value.valueOf() === false); | |
}; | |
/** | |
* is.true | |
* Test if `value` is true. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is true, false otherwise | |
* @api public | |
*/ | |
is['true'] = function (value) { | |
return is.boolean(value) && (value === true || value.valueOf() === true); | |
}; | |
/** | |
* Test date. | |
*/ | |
/** | |
* is.date | |
* Test if `value` is a date. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a date, false otherwise | |
* @api public | |
*/ | |
is.date = function (value) { | |
return '[object Date]' === toString.call(value); | |
}; | |
/** | |
* Test element. | |
*/ | |
/** | |
* is.element | |
* Test if `value` is an html element. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an HTML Element, false otherwise | |
* @api public | |
*/ | |
is.element = function (value) { | |
return value !== undefined | |
&& typeof HTMLElement !== 'undefined' | |
&& value instanceof HTMLElement | |
&& value.nodeType === 1; | |
}; | |
/** | |
* Test error. | |
*/ | |
/** | |
* is.error | |
* Test if `value` is an error object. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an error object, false otherwise | |
* @api public | |
*/ | |
is.error = function (value) { | |
return '[object Error]' === toString.call(value); | |
}; | |
/** | |
* Test function. | |
*/ | |
/** | |
* is.fn / is.function (deprecated) | |
* Test if `value` is a function. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a function, false otherwise | |
* @api public | |
*/ | |
is.fn = is['function'] = function (value) { | |
var isAlert = typeof window !== 'undefined' && value === window.alert; | |
return isAlert || '[object Function]' === toString.call(value); | |
}; | |
/** | |
* Test number. | |
*/ | |
/** | |
* is.number | |
* Test if `value` is a number. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a number, false otherwise | |
* @api public | |
*/ | |
is.number = function (value) { | |
return '[object Number]' === toString.call(value); | |
}; | |
/** | |
* is.infinite | |
* Test if `value` is positive or negative infinity. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is positive or negative Infinity, false otherwise | |
* @api public | |
*/ | |
is.infinite = function (value) { | |
return value === Infinity || value === -Infinity; | |
}; | |
/** | |
* is.decimal | |
* Test if `value` is a decimal number. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a decimal number, false otherwise | |
* @api public | |
*/ | |
is.decimal = function (value) { | |
return is.number(value) && !isActualNaN(value) && !is.infinite(value) && value % 1 !== 0; | |
}; | |
/** | |
* is.divisibleBy | |
* Test if `value` is divisible by `n`. | |
* | |
* @param {Number} value value to test | |
* @param {Number} n dividend | |
* @return {Boolean} true if `value` is divisible by `n`, false otherwise | |
* @api public | |
*/ | |
is.divisibleBy = function (value, n) { | |
var isDividendInfinite = is.infinite(value); | |
var isDivisorInfinite = is.infinite(n); | |
var isNonZeroNumber = is.number(value) && !isActualNaN(value) && is.number(n) && !isActualNaN(n) && n !== 0; | |
return isDividendInfinite || isDivisorInfinite || (isNonZeroNumber && value % n === 0); | |
}; | |
/** | |
* is.int | |
* Test if `value` is an integer. | |
* | |
* @param value to test | |
* @return {Boolean} true if `value` is an integer, false otherwise | |
* @api public | |
*/ | |
is.int = function (value) { | |
return is.number(value) && !isActualNaN(value) && value % 1 === 0; | |
}; | |
/** | |
* is.maximum | |
* Test if `value` is greater than 'others' values. | |
* | |
* @param {Number} value value to test | |
* @param {Array} others values to compare with | |
* @return {Boolean} true if `value` is greater than `others` values | |
* @api public | |
*/ | |
is.maximum = function (value, others) { | |
if (isActualNaN(value)) { | |
throw new TypeError('NaN is not a valid value'); | |
} else if (!is.arraylike(others)) { | |
throw new TypeError('second argument must be array-like'); | |
} | |
var len = others.length; | |
while (--len >= 0) { | |
if (value < others[len]) { | |
return false; | |
} | |
} | |
return true; | |
}; | |
/** | |
* is.minimum | |
* Test if `value` is less than `others` values. | |
* | |
* @param {Number} value value to test | |
* @param {Array} others values to compare with | |
* @return {Boolean} true if `value` is less than `others` values | |
* @api public | |
*/ | |
is.minimum = function (value, others) { | |
if (isActualNaN(value)) { | |
throw new TypeError('NaN is not a valid value'); | |
} else if (!is.arraylike(others)) { | |
throw new TypeError('second argument must be array-like'); | |
} | |
var len = others.length; | |
while (--len >= 0) { | |
if (value > others[len]) { | |
return false; | |
} | |
} | |
return true; | |
}; | |
/** | |
* is.nan | |
* Test if `value` is not a number. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is not a number, false otherwise | |
* @api public | |
*/ | |
is.nan = function (value) { | |
return !is.number(value) || value !== value; | |
}; | |
/** | |
* is.even | |
* Test if `value` is an even number. | |
* | |
* @param {Number} value value to test | |
* @return {Boolean} true if `value` is an even number, false otherwise | |
* @api public | |
*/ | |
is.even = function (value) { | |
return is.infinite(value) || (is.number(value) && value === value && value % 2 === 0); | |
}; | |
/** | |
* is.odd | |
* Test if `value` is an odd number. | |
* | |
* @param {Number} value value to test | |
* @return {Boolean} true if `value` is an odd number, false otherwise | |
* @api public | |
*/ | |
is.odd = function (value) { | |
return is.infinite(value) || (is.number(value) && value === value && value % 2 !== 0); | |
}; | |
/** | |
* is.ge | |
* Test if `value` is greater than or equal to `other`. | |
* | |
* @param {Number} value value to test | |
* @param {Number} other value to compare with | |
* @return {Boolean} | |
* @api public | |
*/ | |
is.ge = function (value, other) { | |
if (isActualNaN(value) || isActualNaN(other)) { | |
throw new TypeError('NaN is not a valid value'); | |
} | |
return !is.infinite(value) && !is.infinite(other) && value >= other; | |
}; | |
/** | |
* is.gt | |
* Test if `value` is greater than `other`. | |
* | |
* @param {Number} value value to test | |
* @param {Number} other value to compare with | |
* @return {Boolean} | |
* @api public | |
*/ | |
is.gt = function (value, other) { | |
if (isActualNaN(value) || isActualNaN(other)) { | |
throw new TypeError('NaN is not a valid value'); | |
} | |
return !is.infinite(value) && !is.infinite(other) && value > other; | |
}; | |
/** | |
* is.le | |
* Test if `value` is less than or equal to `other`. | |
* | |
* @param {Number} value value to test | |
* @param {Number} other value to compare with | |
* @return {Boolean} if 'value' is less than or equal to 'other' | |
* @api public | |
*/ | |
is.le = function (value, other) { | |
if (isActualNaN(value) || isActualNaN(other)) { | |
throw new TypeError('NaN is not a valid value'); | |
} | |
return !is.infinite(value) && !is.infinite(other) && value <= other; | |
}; | |
/** | |
* is.lt | |
* Test if `value` is less than `other`. | |
* | |
* @param {Number} value value to test | |
* @param {Number} other value to compare with | |
* @return {Boolean} if `value` is less than `other` | |
* @api public | |
*/ | |
is.lt = function (value, other) { | |
if (isActualNaN(value) || isActualNaN(other)) { | |
throw new TypeError('NaN is not a valid value'); | |
} | |
return !is.infinite(value) && !is.infinite(other) && value < other; | |
}; | |
/** | |
* is.within | |
* Test if `value` is within `start` and `finish`. | |
* | |
* @param {Number} value value to test | |
* @param {Number} start lower bound | |
* @param {Number} finish upper bound | |
* @return {Boolean} true if 'value' is is within 'start' and 'finish' | |
* @api public | |
*/ | |
is.within = function (value, start, finish) { | |
if (isActualNaN(value) || isActualNaN(start) || isActualNaN(finish)) { | |
throw new TypeError('NaN is not a valid value'); | |
} else if (!is.number(value) || !is.number(start) || !is.number(finish)) { | |
throw new TypeError('all arguments must be numbers'); | |
} | |
var isAnyInfinite = is.infinite(value) || is.infinite(start) || is.infinite(finish); | |
return isAnyInfinite || (value >= start && value <= finish); | |
}; | |
/** | |
* Test object. | |
*/ | |
/** | |
* is.object | |
* Test if `value` is an object. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is an object, false otherwise | |
* @api public | |
*/ | |
is.object = function (value) { | |
return value && '[object Object]' === toString.call(value); | |
}; | |
/** | |
* is.hash | |
* Test if `value` is a hash - a plain object literal. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a hash, false otherwise | |
* @api public | |
*/ | |
is.hash = function (value) { | |
return is.object(value) && value.constructor === Object && !value.nodeType && !value.setInterval; | |
}; | |
/** | |
* Test regexp. | |
*/ | |
/** | |
* is.regexp | |
* Test if `value` is a regular expression. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if `value` is a regexp, false otherwise | |
* @api public | |
*/ | |
is.regexp = function (value) { | |
return '[object RegExp]' === toString.call(value); | |
}; | |
/** | |
* Test string. | |
*/ | |
/** | |
* is.string | |
* Test if `value` is a string. | |
* | |
* @param {Mixed} value value to test | |
* @return {Boolean} true if 'value' is a string, false otherwise | |
* @api public | |
*/ | |
is.string = function (value) { | |
return '[object String]' === toString.call(value); | |
}; | |
},{}],45:[function(require,module,exports){ | |
(function () { | |
"use strict"; | |
// modified from https://github.com/kriskowal/es5-shim | |
var has = Object.prototype.hasOwnProperty, | |
is = require('is'), | |
forEach = require('foreach'), | |
hasDontEnumBug = !({'toString': null}).propertyIsEnumerable('toString'), | |
dontEnums = [ | |
"toString", | |
"toLocaleString", | |
"valueOf", | |
"hasOwnProperty", | |
"isPrototypeOf", | |
"propertyIsEnumerable", | |
"constructor" | |
], | |
keysShim; | |
keysShim = function keys(object) { | |
if (!is.object(object) && !is.array(object)) { | |
throw new TypeError("Object.keys called on a non-object"); | |
} | |
var name, theKeys = []; | |
for (name in object) { | |
if (has.call(object, name)) { | |
theKeys.push(name); | |
} | |
} | |
if (hasDontEnumBug) { | |
forEach(dontEnums, function (dontEnum) { | |
if (has.call(object, dontEnum)) { | |
theKeys.push(dontEnum); | |
} | |
}); | |
} | |
return theKeys; | |
}; | |
module.exports = keysShim; | |
}()); | |
},{"foreach":43,"is":44}],46:[function(require,module,exports){ | |
var EventEmitter = require('events').EventEmitter | |
var inherits = require('util').inherits | |
var ranges = require('string-range') | |
var fixRange = require('level-fix-range') | |
var xtend = require('xtend') | |
var Batch = require('./batch') | |
inherits(SubDB, EventEmitter) | |
function SubDB (db, prefix, options) { | |
if('string' === typeof options) { | |
console.error('db.sublevel(name, seperator<string>) is depreciated') | |
console.error('use db.sublevel(name, {sep: separator})) if you must') | |
options = {sep: options} | |
} | |
if(!(this instanceof SubDB)) return new SubDB(db, prefix, options) | |
if(!db) throw new Error('must provide db') | |
if(!prefix) throw new Error('must provide prefix') | |
options = options || {} | |
options.sep = options.sep || '\xff' | |
this._parent = db | |
this._options = options | |
this.options = options | |
this._prefix = prefix | |
this._root = root(this) | |
db.sublevels[prefix] = this | |
this.sublevels = {} | |
this.methods = {} | |
var self = this | |
this.hooks = { | |
pre: function () { | |
return self.pre.apply(self, arguments) | |
}, | |
post: function () { | |
return self.post.apply(self, arguments) | |
} | |
} | |
} | |
var SDB = SubDB.prototype | |
SDB._key = function (key) { | |
var sep = this._options.sep | |
return sep | |
+ this._prefix | |
+ sep | |
+ key | |
} | |
SDB._getOptsAndCb = function (opts, cb) { | |
if (typeof opts == 'function') { | |
cb = opts | |
opts = {} | |
} | |
return { opts: xtend(opts, this._options), cb: cb } | |
} | |
SDB.sublevel = function (prefix, options) { | |
if(this.sublevels[prefix]) | |
return this.sublevels[prefix] | |
return new SubDB(this, prefix, options || this._options) | |
} | |
SDB.put = function (key, value, opts, cb) { | |
var res = this._getOptsAndCb(opts, cb) | |
this._root.put(this.prefix(key), value, res.opts, res.cb) | |
} | |
SDB.get = function (key, opts, cb) { | |
var res = this._getOptsAndCb(opts, cb) | |
this._root.get(this.prefix(key), res.opts, res.cb) | |
} | |
SDB.del = function (key, opts, cb) { | |
var res = this._getOptsAndCb(opts, cb) | |
this._root.del(this.prefix(key), res.opts, res.cb) | |
} | |
SDB.batch = function (changes, opts, cb) { | |
if(!Array.isArray(changes)) | |
return new Batch(this) | |
var self = this, | |
res = this._getOptsAndCb(opts, cb) | |
changes.forEach(function (ch) { | |
//OH YEAH, WE NEED TO VALIDATE THAT UPDATING THIS KEY/PREFIX IS ALLOWED | |
if('string' === typeof ch.prefix) | |
ch.key = ch.prefix + ch.key | |
else | |
ch.key = (ch.prefix || self).prefix(ch.key) | |
if(ch.prefix) ch.prefix = null | |
}) | |
this._root.batch(changes, res.opts, res.cb) | |
} | |
SDB._getKeyEncoding = function () { | |
if(this.options.keyEncoding) | |
return this.options.keyEncoding | |
if(this._parent && this._parent._getKeyEncoding) | |
return this._parent._getKeyEncoding() | |
} | |
SDB._getValueEncoding = function () { | |
if(this.options.valueEncoding) | |
return this.options.valueEncoding | |
if(this._parent && this._parent._getValueEncoding) | |
return this._parent._getValueEncoding() | |
} | |
SDB.prefix = function (key) { | |
var sep = this._options.sep | |
return this._parent.prefix() + sep + this._prefix + sep + (key || '') | |
} | |
SDB.keyStream = | |
SDB.createKeyStream = function (opts) { | |
opts = opts || {} | |
opts.keys = true | |
opts.values = false | |
return this.createReadStream(opts) | |
} | |
SDB.valueStream = | |
SDB.createValueStream = function (opts) { | |
opts = opts || {} | |
opts.keys = false | |
opts.values = true | |
opts.keys = false | |
return this.createReadStream(opts) | |
} | |
function selectivelyMerge(_opts, opts) { | |
[ 'valueEncoding' | |
, 'encoding' | |
, 'keyEncoding' | |
, 'reverse' | |
, 'values' | |
, 'keys' | |
, 'limit' | |
, 'fillCache' | |
] | |
.forEach(function (k) { | |
if (opts.hasOwnProperty(k)) _opts[k] = opts[k] | |
}) | |
} | |
SDB.readStream = | |
SDB.createReadStream = function (opts) { | |
opts = opts || {} | |
var r = root(this) | |
var p = this.prefix() | |
var _opts = ranges.prefix(opts, p) | |
selectivelyMerge(_opts, xtend(opts, this._options)) | |
var s = r.createReadStream(_opts) | |
if(_opts.values === false) { | |
var read = s.read | |
if (read) { | |
s.read = function (size) { | |
var val = read.call(this, size) | |
if (val) val = val.substring(p.length) | |
return val | |
} | |
} else { | |
var emit = s.emit | |
s.emit = function (event, val) { | |
if(event === 'data') { | |
emit.call(this, 'data', val.substring(p.length)) | |
} else | |
emit.call(this, event, val) | |
} | |
} | |
return s | |
} else if(_opts.keys === false) | |
return s | |
else { | |
var read = s.read | |
if (read) { | |
s.read = function (size) { | |
var d = read.call(this, size) | |
if (d) d.key = d.key.substring(p.length) | |
return d | |
} | |
} else { | |
s.on('data', function (d) { | |
//mutate the prefix! | |
//this doesn't work for createKeyStream admittedly. | |
d.key = d.key.substring(p.length) | |
}) | |
} | |
return s | |
} | |
} | |
SDB.writeStream = | |
SDB.createWriteStream = function () { | |
var r = root(this) | |
var p = this.prefix() | |
var ws = r.createWriteStream.apply(r, arguments) | |
var write = ws.write | |
var encoding = this._options.encoding | |
var valueEncoding = this._options.valueEncoding | |
var keyEncoding = this._options.keyEncoding | |
// slight optimization, if no encoding was specified at all, | |
// which will be the case most times, make write not check at all | |
var nocheck = !encoding && !valueEncoding && !keyEncoding | |
ws.write = nocheck | |
? function (data) { | |
data.key = p + data.key | |
return write.call(ws, data) | |
} | |
: function (data) { | |
data.key = p + data.key | |
// not merging all options here since this happens on every write and things could get slowed down | |
// at this point we only consider encoding important to propagate | |
if (encoding && typeof data.encoding === 'undefined') | |
data.encoding = encoding | |
if (valueEncoding && typeof data.valueEncoding === 'undefined') | |
data.valueEncoding = valueEncoding | |
if (keyEncoding && typeof data.keyEncoding === 'undefined') | |
data.keyEncoding = keyEncoding | |
return write.call(ws, data) | |
} | |
return ws | |
} | |
SDB.approximateSize = function () { | |
var r = root(db) | |
return r.approximateSize.apply(r, arguments) | |
} | |
function root(db) { | |
if(!db._parent) return db | |
return root(db._parent) | |
} | |
SDB.pre = function (range, hook) { | |
if(!hook) hook = range, range = null | |
range = ranges.prefix(range, this.prefix(), this._options.sep) | |
var r = root(this._parent) | |
var p = this.prefix() | |
return r.hooks.pre(fixRange(range), function (ch, add, batch) { | |
hook({ | |
key: ch.key.substring(p.length), | |
value: ch.value, | |
type: ch.type | |
}, function (ch, _p) { | |
//maybe remove the second add arg now | |
//that op can have prefix? | |
add(ch, ch.prefix ? _p : (_p || p)) | |
}, batch) | |
}) | |
} | |
SDB.post = function (range, hook) { | |
if(!hook) hook = range, range = null | |
var r = root(this._parent) | |
var p = this.prefix() | |
range = ranges.prefix(range, p, this._options.sep) | |
return r.hooks.post(fixRange(range), function (data) { | |
hook({key: data.key.substring(p.length), value: data.value, type: data.type}) | |
}) | |
} | |
var exports = module.exports = SubDB | |
},{"./batch":34,"events":18,"level-fix-range":36,"string-range":39,"util":32,"xtend":41}],47:[function(require,module,exports){ | |
/* Copyright (c) 2012-2014 LevelUP contributors | |
* See list at <https://github.com/rvagg/node-levelup#contributing> | |
* MIT License | |
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> | |
*/ | |
var util = require('./util') | |
, WriteError = require('./errors').WriteError | |
, getOptions = util.getOptions | |
, dispatchError = util.dispatchError | |
function Batch (levelup) { | |
this._levelup = levelup | |
this.batch = levelup.db.batch() | |
this.ops = [] | |
} | |
Batch.prototype.put = function (key_, value_, options) { | |
options = getOptions(this._levelup, options) | |
var key = util.encodeKey(key_, options) | |
, value = util.encodeValue(value_, options) | |
try { | |
this.batch.put(key, value) | |
} catch (e) { | |
throw new WriteError(e) | |
} | |
this.ops.push({ type : 'put', key : key, value : value }) | |
return this | |
} | |
Batch.prototype.del = function (key_, options) { | |
options = getOptions(this._levelup, options) | |
var key = util.encodeKey(key_, options) | |
try { | |
this.batch.del(key) | |
} catch (err) { | |
throw new WriteError(err) | |
} | |
this.ops.push({ type : 'del', key : key }) | |
return this | |
} | |
Batch.prototype.clear = function () { | |
try { | |
this.batch.clear() | |
} catch (err) { | |
throw new WriteError(err) | |
} | |
this.ops = [] | |
return this | |
} | |
Batch.prototype.write = function (callback) { | |
var levelup = this._levelup | |
, ops = this.ops | |
try { | |
this.batch.write(function (err) { | |
if (err) | |
return dispatchError(levelup, new WriteError(err), callback) | |
levelup.emit('batch', ops) | |
if (callback) | |
callback() | |
}) | |
} catch (err) { | |
throw new WriteError(err) | |
} | |
} | |
module.exports = Batch | |
},{"./errors":48,"./util":51}],48:[function(require,module,exports){ | |
/* Copyright (c) 2012-2014 LevelUP contributors | |
* See list at <https://github.com/rvagg/node-levelup#contributing> | |
* MIT License | |
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> | |
*/ | |
var createError = require('errno').create | |
, LevelUPError = createError('LevelUPError') | |
, NotFoundError = createError('NotFoundError', LevelUPError) | |
NotFoundError.prototype.notFound = true | |
NotFoundError.prototype.status = 404 | |
module.exports = { | |
LevelUPError : LevelUPError | |
, InitializationError : createError('InitializationError', LevelUPError) | |
, OpenError : createError('OpenError', LevelUPError) | |
, ReadError : createError('ReadError', LevelUPError) | |
, WriteError : createError('WriteError', LevelUPError) | |
, NotFoundError : NotFoundError | |
, EncodingError : createError('EncodingError', LevelUPError) | |
} | |
},{"errno":59}],49:[function(require,module,exports){ | |
(function (process){ | |
/* Copyright (c) 2012-2014 LevelUP contributors | |
* See list at <https://github.com/rvagg/node-levelup#contributing> | |
* MIT License | |
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> | |
*/ | |
var EventEmitter = require('events').EventEmitter | |
, inherits = require('util').inherits | |
, extend = require('xtend') | |
, prr = require('prr') | |
, DeferredLevelDOWN = require('deferred-leveldown') | |
, WriteError = require('./errors').WriteError | |
, ReadError = require('./errors').ReadError | |
, NotFoundError = require('./errors').NotFoundError | |
, OpenError = require('./errors').OpenError | |
, EncodingError = require('./errors').EncodingError | |
, InitializationError = require('./errors').InitializationError | |
, ReadStream = require('./read-stream') | |
, WriteStream = require('./write-stream') | |
, util = require('./util') | |
, Batch = require('./batch') | |
, getOptions = util.getOptions | |
, defaultOptions = util.defaultOptions | |
, getLevelDOWN = util.getLevelDOWN | |
, dispatchError = util.dispatchError | |
function getCallback (options, callback) { | |
return typeof options == 'function' ? options : callback | |
} | |
// Possible LevelUP#_status values: | |
// - 'new' - newly created, not opened or closed | |
// - 'opening' - waiting for the database to be opened, post open() | |
// - 'open' - successfully opened the database, available for use | |
// - 'closing' - waiting for the database to be closed, post close() | |
// - 'closed' - database has been successfully closed, should not be | |
// used except for another open() operation | |
function LevelUP (location, options, callback) { | |
if (!(this instanceof LevelUP)) | |
return new LevelUP(location, options, callback) | |
var error | |
EventEmitter.call(this) | |
this.setMaxListeners(Infinity) | |
if (typeof location == 'function') { | |
options = typeof options == 'object' ? options : {} | |
options.db = location | |
location = null | |
} else if (typeof location == 'object' && typeof location.db == 'function') { | |
options = location | |
location = null | |
} | |
if (typeof options == 'function') { | |
callback = options | |
options = {} | |
} | |
if ((!options || typeof options.db != 'function') && typeof location != 'string') { | |
error = new InitializationError( | |
'Must provide a location for the database') | |
if (callback) { | |
return process.nextTick(function () { | |
callback(error) | |
}) | |
} | |
throw error | |
} | |
options = getOptions(this, options) | |
this.options = extend(defaultOptions, options) | |
this._status = 'new' | |
// set this.location as enumerable but not configurable or writable | |
prr(this, 'location', location, 'e') | |
this.open(callback) | |
} | |
inherits(LevelUP, EventEmitter) | |
LevelUP.prototype.open = function (callback) { | |
var self = this | |
, dbFactory | |
, db | |
if (this.isOpen()) { | |
if (callback) | |
process.nextTick(function () { callback(null, self) }) | |
return this | |
} | |
if (this._isOpening()) { | |
return callback && this.once( | |
'open' | |
, function () { callback(null, self) } | |
) | |
} | |
this.emit('opening') | |
this._status = 'opening' | |
this.db = new DeferredLevelDOWN(this.location) | |
dbFactory = this.options.db || getLevelDOWN() | |
db = dbFactory(this.location) | |
db.open(this.options, function (err) { | |
if (err) { | |
return dispatchError(self, new OpenError(err), callback) | |
} else { | |
self.db.setDb(db) | |
self.db = db | |
self._status = 'open' | |
if (callback) | |
callback(null, self) | |
self.emit('open') | |
self.emit('ready') | |
} | |
}) | |
} | |
LevelUP.prototype.close = function (callback) { | |
var self = this | |
if (this.isOpen()) { | |
this._status = 'closing' | |
this.db.close(function () { | |
self._status = 'closed' | |
self.emit('closed') | |
if (callback) | |
callback.apply(null, arguments) | |
}) | |
this.emit('closing') | |
this.db = null | |
} else if (this._status == 'closed' && callback) { | |
return process.nextTick(callback) | |
} else if (this._status == 'closing' && callback) { | |
this.once('closed', callback) | |
} else if (this._isOpening()) { | |
this.once('open', function () { | |
self.close(callback) | |
}) | |
} | |
} | |
LevelUP.prototype.isOpen = function () { | |
return this._status == 'open' | |
} | |
LevelUP.prototype._isOpening = function () { | |
return this._status == 'opening' | |
} | |
LevelUP.prototype.isClosed = function () { | |
return (/^clos/).test(this._status) | |
} | |
LevelUP.prototype.get = function (key_, options, callback) { | |
var self = this | |
, key | |
callback = getCallback(options, callback) | |
if (typeof callback != 'function') { | |
return dispatchError( | |
this | |
, new ReadError('get() requires key and callback arguments') | |
) | |
} | |
if (!this._isOpening() && !this.isOpen()) { | |
return dispatchError( | |
this | |
, new ReadError('Database is not open') | |
, callback | |
) | |
} | |
options = util.getOptions(this, options) | |
key = util.encodeKey(key_, options) | |
options.asBuffer = util.isValueAsBuffer(options) | |
this.db.get(key, options, function (err, value) { | |
if (err) { | |
if ((/notfound/i).test(err)) { | |
err = new NotFoundError( | |
'Key not found in database [' + key_ + ']', err) | |
} else { | |
err = new ReadError(err) | |
} | |
return dispatchError(self, err, callback) | |
} | |
if (callback) { | |
try { | |
value = util.decodeValue(value, options) | |
} catch (e) { | |
return callback(new EncodingError(e)) | |
} | |
callback(null, value) | |
} | |
}) | |
} | |
LevelUP.prototype.put = function (key_, value_, options, callback) { | |
var self = this | |
, key | |
, value | |
callback = getCallback(options, callback) | |
if (key_ === null || key_ === undefined | |
|| value_ === null || value_ === undefined) { | |
return dispatchError( | |
this | |
, new WriteError('put() requires key and value arguments') | |
, callback | |
) | |
} | |
if (!this._isOpening() && !this.isOpen()) { | |
return dispatchError( | |
this | |
, new WriteError('Database is not open') | |
, callback | |
) | |
} | |
options = getOptions(this, options) | |
key = util.encodeKey(key_, options) | |
value = util.encodeValue(value_, options) | |
this.db.put(key, value, options, function (err) { | |
if (err) { | |
return dispatchError(self, new WriteError(err), callback) | |
} else { | |
self.emit('put', key_, value_) | |
if (callback) | |
callback() | |
} | |
}) | |
} | |
LevelUP.prototype.del = function (key_, options, callback) { | |
var self = this | |
, key | |
callback = getCallback(options, callback) | |
if (key_ === null || key_ === undefined) { | |
return dispatchError( | |
this | |
, new WriteError('del() requires a key argument') | |
, callback | |
) | |
} | |
if (!this._isOpening() && !this.isOpen()) { | |
return dispatchError( | |
this | |
, new WriteError('Database is not open') | |
, callback | |
) | |
} | |
options = getOptions(this, options) | |
key = util.encodeKey(key_, options) | |
this.db.del(key, options, function (err) { | |
if (err) { | |
return dispatchError(self, new WriteError(err), callback) | |
} else { | |
self.emit('del', key_) | |
if (callback) | |
callback() | |
} | |
}) | |
} | |
LevelUP.prototype.batch = function (arr_, options, callback) { | |
var self = this | |
, keyEnc | |
, valueEnc | |
, arr | |
if (!arguments.length) | |
return new Batch(this) | |
callback = getCallback(options, callback) | |
if (!Array.isArray(arr_)) { | |
return dispatchError( | |
this | |
, new WriteError('batch() requires an array argument') | |
, callback | |
) | |
} | |
if (!this._isOpening() && !this.isOpen()) { | |
return dispatchError( | |
this | |
, new WriteError('Database is not open') | |
, callback | |
) | |
} | |
options = getOptions(this, options) | |
keyEnc = options.keyEncoding | |
valueEnc = options.valueEncoding | |
arr = arr_.map(function (e) { | |
if (e.type === undefined || e.key === undefined) | |
return {} | |
// inherit encoding | |
var kEnc = e.keyEncoding || keyEnc | |
, vEnc = e.valueEncoding || e.encoding || valueEnc | |
, o | |
// If we're not dealing with plain utf8 strings or plain | |
// Buffers then we have to do some work on the array to | |
// encode the keys and/or values. This includes JSON types. | |
if (kEnc != 'utf8' && kEnc != 'binary' | |
|| vEnc != 'utf8' && vEnc != 'binary') { | |
o = { | |
type: e.type | |
, key: util.encodeKey(e.key, options, e) | |
} | |
if (e.value !== undefined) | |
o.value = util.encodeValue(e.value, options, e) | |
return o | |
} else { | |
return e | |
} | |
}) | |
this.db.batch(arr, options, function (err) { | |
if (err) { | |
return dispatchError(self, new WriteError(err), callback) | |
} else { | |
self.emit('batch', arr_) | |
if (callback) | |
callback() | |
} | |
}) | |
} | |
// DEPRECATED: prefer accessing LevelDOWN for this: db.db.approximateSize() | |
LevelUP.prototype.approximateSize = function (start_, end_, callback) { | |
var self = this | |
, start | |
, end | |
if (start_ === null || start_ === undefined | |
|| end_ === null || end_ === undefined | |
|| typeof callback != 'function') { | |
return dispatchError( | |
this | |
, new ReadError('approximateSize() requires start, end and callback arguments') | |
, callback | |
) | |
} | |
start = util.encodeKey(start_, this.options) | |
end = util.encodeKey(end_, this.options) | |
if (!this._isOpening() && !this.isOpen()) { | |
return dispatchError( | |
this | |
, new WriteError('Database is not open') | |
, callback | |
) | |
} | |
this.db.approximateSize(start, end, function (err, size) { | |
if (err) { | |
return dispatchError(self, new OpenError(err), callback) | |
} else if (callback) { | |
callback(null, size) | |
} | |
}) | |
} | |
LevelUP.prototype.readStream = | |
LevelUP.prototype.createReadStream = function (options) { | |
var self = this | |
options = extend(this.options, options) | |
return new ReadStream( | |
options | |
, this | |
, function (options) { | |
return self.db.iterator(options) | |
} | |
) | |
} | |
LevelUP.prototype.keyStream = | |
LevelUP.prototype.createKeyStream = function (options) { | |
return this.createReadStream(extend(options, { keys: true, values: false })) | |
} | |
LevelUP.prototype.valueStream = | |
LevelUP.prototype.createValueStream = function (options) { | |
return this.createReadStream(extend(options, { keys: false, values: true })) | |
} | |
LevelUP.prototype.writeStream = | |
LevelUP.prototype.createWriteStream = function (options) { | |
return new WriteStream(extend(options), this) | |
} | |
LevelUP.prototype.toString = function () { | |
return 'LevelUP' | |
} | |
function utilStatic (name) { | |
return function (location, callback) { | |
getLevelDOWN()[name](location, callback || function () {}) | |
} | |
} | |
module.exports = LevelUP | |
module.exports.copy = util.copy | |
// DEPRECATED: prefer accessing LevelDOWN for this: require('leveldown').destroy() | |
module.exports.destroy = utilStatic('destroy') | |
// DEPRECATED: prefer accessing LevelDOWN for this: require('leveldown').repair() | |
module.exports.repair = utilStatic('repair') | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"./batch":47,"./errors":48,"./read-stream":50,"./util":51,"./write-stream":52,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"deferred-leveldown":54,"events":18,"prr":60,"util":32,"xtend":70}],50:[function(require,module,exports){ | |
/* Copyright (c) 2012-2014 LevelUP contributors | |
* See list at <https://github.com/rvagg/node-levelup#contributing> | |
* MIT License <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> | |
*/ | |
// NOTE: we are fixed to [email protected] for now | |
// for pure Streams2 across Node versions | |
var Readable = require('readable-stream').Readable | |
, inherits = require('util').inherits | |
, extend = require('xtend') | |
, EncodingError = require('./errors').EncodingError | |
, util = require('./util') | |
, defaultOptions = { keys: true, values: true } | |
, makeKeyValueData = function (key, value) { | |
return { | |
key: util.decodeKey(key, this._options) | |
, value: util.decodeValue(value, this._options) | |
} | |
} | |
, makeKeyData = function (key) { | |
return util.decodeKey(key, this._options) | |
} | |
, makeValueData = function (_, value) { | |
return util.decodeValue(value, this._options) | |
} | |
, makeNoData = function () { return null } | |
function ReadStream (options, db, iteratorFactory) { | |
if (!(this instanceof ReadStream)) | |
return new ReadStream(options, db, iteratorFactory) | |
Readable.call(this, { objectMode: true, highWaterMark: options.highWaterMark }) | |
// purely to keep `db` around until we're done so it's not GCed if the user doesn't keep a ref | |
this._db = db | |
options = this._options = extend(defaultOptions, options) | |
this._keyEncoding = options.keyEncoding || options.encoding | |
this._valueEncoding = options.valueEncoding || options.encoding | |
if (typeof this._options.start != 'undefined') | |
this._options.start = util.encodeKey(this._options.start, this._options) | |
if (typeof this._options.end != 'undefined') | |
this._options.end = util.encodeKey(this._options.end, this._options) | |
if (typeof this._options.limit != 'number') | |
this._options.limit = -1 | |
this._options.keyAsBuffer = util.isKeyAsBuffer(this._options) | |
this._options.valueAsBuffer = util.isValueAsBuffer(this._options) | |
this._makeData = this._options.keys && this._options.values | |
? makeKeyValueData : this._options.keys | |
? makeKeyData : this._options.values | |
? makeValueData : makeNoData | |
var self = this | |
if (!this._db.isOpen()) { | |
this._db.once('ready', function () { | |
if (!self._destroyed) { | |
self._iterator = iteratorFactory(self._options) | |
} | |
}) | |
} else | |
this._iterator = iteratorFactory(this._options) | |
} | |
inherits(ReadStream, Readable) | |
ReadStream.prototype._read = function read () { | |
var self = this | |
if (!self._db.isOpen()) { | |
return self._db.once('ready', function () { read.call(self) }) | |
} | |
if (self._destroyed) | |
return | |
self._iterator.next(function(err, key, value) { | |
if (err || (key === undefined && value === undefined)) { | |
if (!err && !self._destroyed) | |
self.push(null) | |
return self._cleanup(err) | |
} | |
try { | |
value = self._makeData(key, value) | |
} catch (e) { | |
return self._cleanup(new EncodingError(e)) | |
} | |
if (!self._destroyed) | |
self.push(value) | |
}) | |
} | |
ReadStream.prototype._cleanup = function (err) { | |
if (this._destroyed) | |
return | |
this._destroyed = true | |
var self = this | |
if (err) | |
self.emit('error', err) | |
if (self._iterator) { | |
self._iterator.end(function () { | |
self._iterator = null | |
self.emit('close') | |
}) | |
} else { | |
self.emit('close') | |
} | |
} | |
ReadStream.prototype.destroy = function () { | |
this._cleanup() | |
} | |
ReadStream.prototype.toString = function () { | |
return 'LevelUP.ReadStream' | |
} | |
module.exports = ReadStream | |
},{"./errors":48,"./util":51,"readable-stream":69,"util":32,"xtend":70}],51:[function(require,module,exports){ | |
(function (process,Buffer){ | |
/* Copyright (c) 2012-2014 LevelUP contributors | |
* See list at <https://github.com/rvagg/node-levelup#contributing> | |
* MIT License | |
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> | |
*/ | |
var extend = require('xtend') | |
, LevelUPError = require('./errors').LevelUPError | |
, encodingNames = [ | |
'hex' | |
, 'utf8' | |
, 'utf-8' | |
, 'ascii' | |
, 'binary' | |
, 'base64' | |
, 'ucs2' | |
, 'ucs-2' | |
, 'utf16le' | |
, 'utf-16le' | |
] | |
, defaultOptions = { | |
createIfMissing : true | |
, errorIfExists : false | |
, keyEncoding : 'utf8' | |
, valueEncoding : 'utf8' | |
, compression : true | |
} | |
, leveldown | |
, encodings = (function () { | |
function isBinary (data) { | |
return data === undefined || data === null || Buffer.isBuffer(data) | |
} | |
var encodings = {} | |
encodings.utf8 = encodings['utf-8'] = { | |
encode : function (data) { | |
return isBinary(data) ? data : String(data) | |
} | |
, decode : function (data) { | |
return data | |
} | |
, buffer : false | |
, type : 'utf8' | |
} | |
encodings.json = { | |
encode : JSON.stringify | |
, decode : JSON.parse | |
, buffer : false | |
, type : 'json' | |
} | |
encodingNames.forEach(function (type) { | |
if (encodings[type]) | |
return | |
encodings[type] = { | |
encode : function (data) { | |
return isBinary(data) ? data : new Buffer(data, type) | |
} | |
, decode : function (buffer) { | |
return process.browser ? buffer.toString(type) : buffer; | |
} | |
, buffer : true | |
, type : type // useful for debugging purposes | |
} | |
}) | |
return encodings | |
})() | |
, encodingOpts = (function () { | |
var eo = {} | |
encodingNames.forEach(function (e) { | |
eo[e] = { valueEncoding : e } | |
}) | |
return eo | |
}()) | |
function copy (srcdb, dstdb, callback) { | |
srcdb.readStream() | |
.pipe(dstdb.writeStream()) | |
.on('close', callback ? callback : function () {}) | |
.on('error', callback ? callback : function (err) { throw err }) | |
} | |
function getOptions (levelup, options) { | |
var s = typeof options == 'string' // just an encoding | |
if (!s && options && options.encoding && !options.valueEncoding) | |
options.valueEncoding = options.encoding | |
return extend( | |
(levelup && levelup.options) || {} | |
, s ? encodingOpts[options] || encodingOpts[defaultOptions.valueEncoding] | |
: options | |
) | |
} | |
function getLevelDOWN () { | |
if (leveldown) | |
return leveldown | |
var requiredVersion = require('../package.json').devDependencies.leveldown | |
, missingLevelDOWNError = 'Could not locate LevelDOWN, try `npm install leveldown`' | |
, leveldownVersion | |
try { | |
leveldownVersion = require('leveldown/package').version | |
} catch (e) { | |
throw new LevelUPError(missingLevelDOWNError) | |
} | |
if (!require('semver').satisfies(leveldownVersion, requiredVersion)) { | |
throw new LevelUPError( | |
'Installed version of LevelDOWN (' | |
+ leveldownVersion | |
+ ') does not match required version (' | |
+ requiredVersion | |
+ ')' | |
) | |
} | |
try { | |
return leveldown = require('leveldown') | |
} catch (e) { | |
throw new LevelUPError(missingLevelDOWNError) | |
} | |
} | |
function dispatchError (levelup, error, callback) { | |
return typeof callback == 'function' | |
? callback(error) | |
: levelup.emit('error', error) | |
} | |
function getKeyEncoder (options, op) { | |
var type = ((op && op.keyEncoding) || options.keyEncoding) || 'utf8' | |
return encodings[type] || type | |
} | |
function getValueEncoder (options, op) { | |
var type = (((op && (op.valueEncoding || op.encoding)) | |
|| options.valueEncoding || options.encoding)) || 'utf8' | |
return encodings[type] || type | |
} | |
function encodeKey (key, options, op) { | |
return getKeyEncoder(options, op).encode(key) | |
} | |
function encodeValue (value, options, op) { | |
return getValueEncoder(options, op).encode(value) | |
} | |
function decodeKey (key, options) { | |
return getKeyEncoder(options).decode(key) | |
} | |
function decodeValue (value, options) { | |
return getValueEncoder(options).decode(value) | |
} | |
function isValueAsBuffer (options, op) { | |
return getValueEncoder(options, op).buffer | |
} | |
function isKeyAsBuffer (options, op) { | |
return getKeyEncoder(options, op).buffer | |
} | |
module.exports = { | |
defaultOptions : defaultOptions | |
, copy : copy | |
, getOptions : getOptions | |
, getLevelDOWN : getLevelDOWN | |
, dispatchError : dispatchError | |
, encodeKey : encodeKey | |
, encodeValue : encodeValue | |
, isValueAsBuffer : isValueAsBuffer | |
, isKeyAsBuffer : isKeyAsBuffer | |
, decodeValue : decodeValue | |
, decodeKey : decodeKey | |
} | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),require("buffer").Buffer) | |
},{"../package.json":71,"./errors":48,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"buffer":20,"leveldown":"1XEYOF","leveldown/package":17,"semver":17,"xtend":70}],52:[function(require,module,exports){ | |
(function (process,global){ | |
/* Copyright (c) 2012-2014 LevelUP contributors | |
* See list at <https://github.com/rvagg/node-levelup#contributing> | |
* MIT License | |
* <https://github.com/rvagg/node-levelup/blob/master/LICENSE.md> | |
*/ | |
var Stream = require('stream').Stream | |
, inherits = require('util').inherits | |
, extend = require('xtend') | |
, bl = require('bl') | |
, setImmediate = global.setImmediate || process.nextTick | |
, getOptions = require('./util').getOptions | |
, defaultOptions = { type: 'put' } | |
function WriteStream (options, db) { | |
if (!(this instanceof WriteStream)) | |
return new WriteStream(options, db) | |
Stream.call(this) | |
this._options = extend(defaultOptions, getOptions(db, options)) | |
this._db = db | |
this._buffer = [] | |
this._status = 'init' | |
this._end = false | |
this.writable = true | |
this.readable = false | |
var self = this | |
, ready = function () { | |
if (!self.writable) | |
return | |
self._status = 'ready' | |
self.emit('ready') | |
self._process() | |
} | |
if (db.isOpen()) | |
setImmediate(ready) | |
else | |
db.once('ready', ready) | |
} | |
inherits(WriteStream, Stream) | |
WriteStream.prototype.write = function (data) { | |
if (!this.writable) | |
return false | |
this._buffer.push(data) | |
if (this._status != 'init') | |
this._processDelayed() | |
if (this._options.maxBufferLength && | |
this._buffer.length > this._options.maxBufferLength) { | |
this._writeBlock = true | |
return false | |
} | |
return true | |
} | |
WriteStream.prototype.end = function (data) { | |
var self = this | |
if (data) | |
this.write(data) | |
setImmediate(function () { | |
self._end = true | |
self._process() | |
}) | |
} | |
WriteStream.prototype.destroy = function () { | |
this.writable = false | |
this.end() | |
} | |
WriteStream.prototype.destroySoon = function () { | |
this.end() | |
} | |
WriteStream.prototype.add = function (entry) { | |
if (!entry.props) | |
return | |
if (entry.props.Directory) | |
entry.pipe(this._db.writeStream(this._options)) | |
else if (entry.props.File || entry.File || entry.type == 'File') | |
this._write(entry) | |
return true | |
} | |
WriteStream.prototype._processDelayed = function () { | |
var self = this | |
setImmediate(function () { | |
self._process() | |
}) | |
} | |
WriteStream.prototype._process = function () { | |
var buffer | |
, self = this | |
, cb = function (err) { | |
if (!self.writable) | |
return | |
if (self._status != 'closed') | |
self._status = 'ready' | |
if (err) { | |
self.writable = false | |
return self.emit('error', err) | |
} | |
self._process() | |
} | |
if (self._status != 'ready' && self.writable) { | |
if (self._buffer.length && self._status != 'closed') | |
self._processDelayed() | |
return | |
} | |
if (self._buffer.length && self.writable) { | |
self._status = 'writing' | |
buffer = self._buffer | |
self._buffer = [] | |
self._db.batch(buffer.map(function (d) { | |
return { | |
type : d.type || self._options.type | |
, key : d.key | |
, value : d.value | |
, keyEncoding : d.keyEncoding || self._options.keyEncoding | |
, valueEncoding : d.valueEncoding | |
|| d.encoding | |
|| self._options.valueEncoding | |
} | |
}), cb) | |
if (self._writeBlock) { | |
self._writeBlock = false | |
self.emit('drain') | |
} | |
// don't allow close until callback has returned | |
return | |
} | |
if (self._end && self._status != 'closed') { | |
self._status = 'closed' | |
self.writable = false | |
self.emit('close') | |
} | |
} | |
WriteStream.prototype._write = function (entry) { | |
var key = entry.path || entry.props.path | |
, self = this | |
if (!key) | |
return | |
entry.pipe(bl(function (err, data) { | |
if (err) { | |
self.writable = false | |
return self.emit('error', err) | |
} | |
if (self._options.fstreamRoot && | |
key.indexOf(self._options.fstreamRoot) > -1) | |
key = key.substr(self._options.fstreamRoot.length + 1) | |
self.write({ key: key, value: data.slice(0) }) | |
})) | |
} | |
WriteStream.prototype.toString = function () { | |
return 'LevelUP.WriteStream' | |
} | |
module.exports = WriteStream | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"./util":51,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"bl":53,"stream":24,"util":32,"xtend":70}],53:[function(require,module,exports){ | |
(function (Buffer){ | |
var DuplexStream = require('readable-stream').Duplex | |
, util = require('util') | |
function BufferList (callback) { | |
if (!(this instanceof BufferList)) | |
return new BufferList(callback) | |
this._bufs = [] | |
this.length = 0 | |
if (typeof callback == 'function') { | |
this._callback = callback | |
var piper = function (err) { | |
if (this._callback) { | |
this._callback(err) | |
this._callback = null | |
} | |
}.bind(this) | |
this.on('pipe', function (src) { | |
src.on('error', piper) | |
}) | |
this.on('unpipe', function (src) { | |
src.removeListener('error', piper) | |
}) | |
} | |
else if (Buffer.isBuffer(callback)) | |
this.append(callback) | |
else if (Array.isArray(callback)) { | |
callback.forEach(function (b) { | |
Buffer.isBuffer(b) && this.append(b) | |
}.bind(this)) | |
} | |
DuplexStream.call(this) | |
} | |
util.inherits(BufferList, DuplexStream) | |
BufferList.prototype._offset = function (offset) { | |
var tot = 0, i = 0, _t | |
for (; i < this._bufs.length; i++) { | |
_t = tot + this._bufs[i].length | |
if (offset < _t) | |
return [ i, offset - tot ] | |
tot = _t | |
} | |
} | |
BufferList.prototype.append = function (buf) { | |
this._bufs.push(Buffer.isBuffer(buf) ? buf : new Buffer(buf)) | |
this.length += buf.length | |
return this | |
} | |
BufferList.prototype._write = function (buf, encoding, callback) { | |
this.append(buf) | |
if (callback) | |
callback() | |
} | |
BufferList.prototype._read = function (size) { | |
if (!this.length) | |
return this.push(null) | |
size = Math.min(size, this.length) | |
this.push(this.slice(0, size)) | |
this.consume(size) | |
} | |
BufferList.prototype.end = function (chunk) { | |
DuplexStream.prototype.end.call(this, chunk) | |
if (this._callback) { | |
this._callback(null, this.slice()) | |
this._callback = null | |
} | |
} | |
BufferList.prototype.get = function (index) { | |
return this.slice(index, index + 1)[0] | |
} | |
BufferList.prototype.slice = function (start, end) { | |
return this.copy(null, 0, start, end) | |
} | |
BufferList.prototype.copy = function (dst, dstStart, srcStart, srcEnd) { | |
if (typeof srcStart != 'number' || srcStart < 0) | |
srcStart = 0 | |
if (typeof srcEnd != 'number' || srcEnd > this.length) | |
srcEnd = this.length | |
if (srcStart >= this.length) | |
return dst || new Buffer(0) | |
if (srcEnd <= 0) | |
return dst || new Buffer(0) | |
var copy = !!dst | |
, off = this._offset(srcStart) | |
, len = srcEnd - srcStart | |
, bytes = len | |
, bufoff = (copy && dstStart) || 0 | |
, start = off[1] | |
, l | |
, i | |
// copy/slice everything | |
if (srcStart === 0 && srcEnd == this.length) { | |
if (!copy) // slice, just return a full concat | |
return Buffer.concat(this._bufs) | |
// copy, need to copy individual buffers | |
for (i = 0; i < this._bufs.length; i++) { | |
this._bufs[i].copy(dst, bufoff) | |
bufoff += this._bufs[i].length | |
} | |
return dst | |
} | |
// easy, cheap case where it's a subset of one of the buffers | |
if (bytes <= this._bufs[off[0]].length - start) { | |
return copy | |
? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes) | |
: this._bufs[off[0]].slice(start, start + bytes) | |
} | |
if (!copy) // a slice, we need something to copy in to | |
dst = new Buffer(len) | |
for (i = off[0]; i < this._bufs.length; i++) { | |
l = this._bufs[i].length - start | |
if (bytes > l) { | |
this._bufs[i].copy(dst, bufoff, start) | |
} else { | |
this._bufs[i].copy(dst, bufoff, start, start + bytes) | |
break | |
} | |
bufoff += l | |
bytes -= l | |
if (start) | |
start = 0 | |
} | |
return dst | |
} | |
BufferList.prototype.toString = function (encoding, start, end) { | |
return this.slice(start, end).toString(encoding) | |
} | |
BufferList.prototype.consume = function (bytes) { | |
while (this._bufs.length) { | |
if (bytes > this._bufs[0].length) { | |
bytes -= this._bufs[0].length | |
this.length -= this._bufs[0].length | |
this._bufs.shift() | |
} else { | |
this._bufs[0] = this._bufs[0].slice(bytes) | |
this.length -= bytes | |
break | |
} | |
} | |
return this | |
} | |
BufferList.prototype.duplicate = function () { | |
var i = 0 | |
, copy = new BufferList() | |
for (; i < this._bufs.length; i++) | |
copy.append(this._bufs[i]) | |
return copy | |
} | |
BufferList.prototype.destroy = function () { | |
this._bufs.length = 0; | |
this.length = 0; | |
this.push(null); | |
} | |
;(function () { | |
var methods = { | |
'readDoubleBE' : 8 | |
, 'readDoubleLE' : 8 | |
, 'readFloatBE' : 4 | |
, 'readFloatLE' : 4 | |
, 'readInt32BE' : 4 | |
, 'readInt32LE' : 4 | |
, 'readUInt32BE' : 4 | |
, 'readUInt32LE' : 4 | |
, 'readInt16BE' : 2 | |
, 'readInt16LE' : 2 | |
, 'readUInt16BE' : 2 | |
, 'readUInt16LE' : 2 | |
, 'readInt8' : 1 | |
, 'readUInt8' : 1 | |
} | |
for (var m in methods) { | |
(function (m) { | |
BufferList.prototype[m] = function (offset) { | |
return this.slice(offset, offset + methods[m])[m](0) | |
} | |
}(m)) | |
} | |
}()) | |
module.exports = BufferList | |
}).call(this,require("buffer").Buffer) | |
},{"buffer":20,"readable-stream":69,"util":32}],54:[function(require,module,exports){ | |
(function (process,Buffer){ | |
var util = require('util') | |
, AbstractLevelDOWN = require('abstract-leveldown').AbstractLevelDOWN | |
function DeferredLevelDOWN (location) { | |
AbstractLevelDOWN.call(this, typeof location == 'string' ? location : '') // optional location, who cares? | |
this._db = undefined | |
this._operations = [] | |
} | |
util.inherits(DeferredLevelDOWN, AbstractLevelDOWN) | |
// called by LevelUP when we have a real DB to take its place | |
DeferredLevelDOWN.prototype.setDb = function (db) { | |
this._db = db | |
this._operations.forEach(function (op) { | |
db[op.method].apply(db, op.args) | |
}) | |
} | |
DeferredLevelDOWN.prototype._open = function (options, callback) { | |
return process.nextTick(callback) | |
} | |
// queue a new deferred operation | |
DeferredLevelDOWN.prototype._operation = function (method, args) { | |
if (this._db) | |
return this._db[method].apply(this._db, args) | |
this._operations.push({ method: method, args: args }) | |
} | |
// deferrables | |
'put get del batch approximateSize'.split(' ').forEach(function (m) { | |
DeferredLevelDOWN.prototype['_' + m] = function () { | |
this._operation(m, arguments) | |
} | |
}) | |
DeferredLevelDOWN.prototype._isBuffer = function (obj) { | |
return Buffer.isBuffer(obj) | |
} | |
// don't need to implement this as LevelUP's ReadStream checks for 'ready' state | |
DeferredLevelDOWN.prototype._iterator = function () { | |
throw new TypeError('not implemented') | |
} | |
module.exports = DeferredLevelDOWN | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),require("buffer").Buffer) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19,"abstract-leveldown":57,"buffer":20,"util":32}],55:[function(require,module,exports){ | |
(function (process){ | |
/* Copyright (c) 2013 Rod Vagg, MIT License */ | |
function AbstractChainedBatch (db) { | |
this._db = db | |
this._operations = [] | |
this._written = false | |
} | |
AbstractChainedBatch.prototype._checkWritten = function () { | |
if (this._written) | |
throw new Error('write() already called on this batch') | |
} | |
AbstractChainedBatch.prototype.put = function (key, value) { | |
this._checkWritten() | |
var err = this._db._checkKeyValue(key, 'key', this._db._isBuffer) | |
if (err) throw err | |
err = this._db._checkKeyValue(value, 'value', this._db._isBuffer) | |
if (err) throw err | |
if (!this._db._isBuffer(key)) key = String(key) | |
if (!this._db._isBuffer(value)) value = String(value) | |
if (typeof this._put == 'function' ) | |
this._put(key, value) | |
else | |
this._operations.push({ type: 'put', key: key, value: value }) | |
return this | |
} | |
AbstractChainedBatch.prototype.del = function (key) { | |
this._checkWritten() | |
var err = this._db._checkKeyValue(key, 'key', this._db._isBuffer) | |
if (err) throw err | |
if (!this._db._isBuffer(key)) key = String(key) | |
if (typeof this._del == 'function' ) | |
this._del(key) | |
else | |
this._operations.push({ type: 'del', key: key }) | |
return this | |
} | |
AbstractChainedBatch.prototype.clear = function () { | |
this._checkWritten() | |
this._operations = [] | |
if (typeof this._clear == 'function' ) | |
this._clear() | |
return this | |
} | |
AbstractChainedBatch.prototype.write = function (options, callback) { | |
this._checkWritten() | |
if (typeof options == 'function') | |
callback = options | |
if (typeof callback != 'function') | |
throw new Error('write() requires a callback argument') | |
if (typeof options != 'object') | |
options = {} | |
this._written = true | |
if (typeof this._write == 'function' ) | |
return this._write(callback) | |
if (typeof this._db._batch == 'function') | |
return this._db._batch(this._operations, options, callback) | |
process.nextTick(callback) | |
} | |
module.exports = AbstractChainedBatch | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19}],56:[function(require,module,exports){ | |
(function (process){ | |
/* Copyright (c) 2013 Rod Vagg, MIT License */ | |
function AbstractIterator (db) { | |
this.db = db | |
this._ended = false | |
this._nexting = false | |
} | |
AbstractIterator.prototype.next = function (callback) { | |
var self = this | |
if (typeof callback != 'function') | |
throw new Error('next() requires a callback argument') | |
if (self._ended) | |
return callback(new Error('cannot call next() after end()')) | |
if (self._nexting) | |
return callback(new Error('cannot call next() before previous next() has completed')) | |
self._nexting = true | |
if (typeof self._next == 'function') { | |
return self._next(function () { | |
self._nexting = false | |
callback.apply(null, arguments) | |
}) | |
} | |
process.nextTick(function () { | |
self._nexting = false | |
callback() | |
}) | |
} | |
AbstractIterator.prototype.end = function (callback) { | |
if (typeof callback != 'function') | |
throw new Error('end() requires a callback argument') | |
if (this._ended) | |
return callback(new Error('end() already called on iterator')) | |
this._ended = true | |
if (typeof this._end == 'function') | |
return this._end(callback) | |
process.nextTick(callback) | |
} | |
module.exports = AbstractIterator | |
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js")) | |
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":19}],57:[function(require,module,exports){ | |
(function (process,Buffer){ | |
/* Copyright (c) 2013 Rod Vagg, MIT License */ | |
var xtend = require('xtend') | |
, AbstractIterator = require('./abstract-iterator') | |
, AbstractChainedBatch = require('./abstract-chained-batch') | |
function AbstractLevelDOWN (location) { | |
if (!arguments.length || location === undefined) | |
throw new Error('constructor requires at least a location argument') | |
if (typeof location != 'string') | |
throw new Error('constructor requires a location string argument') | |
this.location = location | |
} | |
AbstractLevelDOWN.prototype.open = function (options, callback) { | |
if (typeof options == 'function') | |
callback = options | |
if (typeof callback != 'function') | |
throw new Error('open() requires a callback argument') | |
if (typeof options != 'object') | |
options = {} | |
if (typeof this._open == 'function') | |
return this._open(options, callback) | |
process.nextTick(callback) | |
} | |
AbstractLevelDOWN.prototype.close = function (callback) { | |
if (typeof callback != 'function') | |
throw new Error('close() requires a callback argument') | |
if (typeof this._close == 'function') | |
return this._close(callback) | |
process.nextTick(callback) | |
} | |
AbstractLevelDOWN.prototype.get = function (key, options, callback) { | |
var err | |
if (typeof options == 'function') | |
callback = options | |
if (typeof callback != 'function') | |
throw new Error('get() requires a callback argument') | |
if (err = this._checkKeyValue(key, 'key', this._isBuffer)) | |
return callback(err) | |
if (!this._isBuffer(key)) | |
key = String(key) | |
if (typeof options != 'object') | |
options = {} | |
if (typeof this._get == 'function') | |
return this._get(key, options, callback) | |
process.nextTick(function () { callback(new Error('NotFound')) }) | |
} | |
AbstractLevelDOWN.prototype.put = function (key, value, options, callback) { | |
var err | |
if (typeof options == 'function') | |
callback = options | |
if (typeof callback != 'function') | |
throw new Error('put() requires a callback argument') | |
if (err = this._checkKeyValue(key, 'key', this._isBuffer)) | |
return callback(err) | |
if (err = this._checkKeyValue(value, 'value', this._isBuffer)) | |
return callback(err) | |
if (!this._isBuffer(key)) | |
key = String(key) | |
// coerce value to string in node, don't touch it in browser | |
// (indexeddb can store any JS type) | |
if (!this._isBuffer(value) && !process.browser) | |
value = String(value) | |
if (typeof options != 'object') | |
options = {} | |
if (typeof this._put == 'function') | |
return this._put(key, value, options, callback) | |
process.nextTick(callback) | |
} | |
AbstractLevelDOWN.prototype.del = function (key, options, callback) { | |
var err | |
if (typeof options == 'function') | |
callback = options | |
if (typeof callback != 'function') | |
throw new Error('del() requires a callback argument') | |
if (err = this._checkKeyValue(key, 'key', this._isBuffer)) | |
return callback(err) | |
if (!this._isBuffer(key)) | |
key = String(key) | |
if (typeof options != 'object') | |
options = {} | |
if (typeof this._del == 'function') | |
return this._del(key, options, callback) | |
process.nextTick(callback) | |
} | |
AbstractLevelDOWN.prototype.batch = function (array, options, callback) { | |
if (!arguments.length) | |
return this._chainedBatch() | |
if (typeof options == 'function') | |
callback = options | |
if (typeof callback != 'function') | |
throw new Error('batch(array) requires a callback argument') | |
if (!Array.isArray(array)) | |
return callback(new Error('batch(array) requires an array argument')) | |
if (typeof options != 'object') | |
options = {} | |
var i = 0 | |
, l = array.length | |
, e | |
, err | |
for (; i < l; i++) { | |
e = array[i] | |
if (typeof e != 'object') | |
continue | |
if (err = this._checkKeyValue(e.type, 'type', this._isBuffer)) | |
return callback(err) | |
if (err = this._checkKeyValue(e.key, 'key', this._isBuffer)) | |
return callback(err) | |
if (e.type == 'put') { | |
if (err = this._checkKeyValue(e.value, 'value', this._isBuffer)) | |
return callback(err) | |
} | |
} | |
if (typeof this._batch == 'function') | |
return this._batch(array, options, callback) | |
process.nextTick(callback) | |
} | |
//TODO: remove from here, not a necessary primitive | |
AbstractLevelDOWN.prototype.approximateSize = function (start, end, callback) { | |
if ( start == null | |
|| end == null | |
|| typeof start == 'function' | |
|| typeof end == 'function') { | |
throw new Error('approximateSize() requires valid `start`, `end` and `callback` arguments') | |
} | |
if (typeof callback != 'function') | |
throw new Error('approximateSize() requires a callback argument') | |
if (!this._isBuffer(start)) | |
start = String(start) | |
if (!this._isBuffer(end)) | |
end = String(end) | |
if (typeof this._approximateSize == 'function') | |
return this._approximateSize(start, end, callback) | |
process.nextTick(function () { | |
callback(null, 0) | |
}) | |
} | |
AbstractLevelDOWN.prototype._setupIteratorOptions = function (options) { | |
var self = this | |
options = xtend(options) | |
;[ 'start', 'end', 'gt', 'gte', 'lt', 'lte' ].forEach(function (o) { | |
if (options[o] && self._isBuffer(options[o]) && options[o].length === 0) | |
delete options[o] | |
}) | |
options.reverse = !!options.reverse | |
// fix `start` so it takes into account gt, gte, lt, lte as appropriate | |
if (options.reverse && options.lt) | |
options.start = options.lt | |
if (options.reverse && options.lte) | |
options.start = options.lte | |
if (!options.reverse && options.gt) | |
options.start = options.gt | |
if (!options.reverse && options.gte) | |
options.start = options.gte | |
if ((options.reverse && options.lt && !options.lte) | |
|| (!options.reverse && options.gt && !options.gte)) | |
options.exclusiveStart = true // start should *not* include matching key | |
return options | |
} | |
AbstractLevelDOWN.prototype.iterator = function (options) { | |
if (typeof options != 'object') | |
options = {} | |
options = this._setupIteratorOptions(options) | |
if (typeof this._iterator == 'function') | |
return this._iterator(options) | |
return new AbstractIterator(this) | |
} | |
AbstractLevelDOWN.prototype._chainedBatch = function () { | |
return new AbstractChainedBatch(this) | |
} | |
AbstractLevelDOWN.prototype._isBuffer = function (obj) { | |
return Buffer.isBuffer(obj) | |
} | |
AbstractLevelDOWN.prototype._checkKeyValue = f |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment