Skip to content

Instantly share code, notes, and snippets.

@calvinmetcalf
Created September 29, 2014 20:08
Show Gist options
  • Save calvinmetcalf/c056f76b92f82f93bd85 to your computer and use it in GitHub Desktop.
Save calvinmetcalf/c056f76b92f82f93bd85 to your computer and use it in GitHub Desktop.
<!doctype html>
<html>
<head>
<meta charset="utf-8">
</head>
<body>
<pre style="width: 100%; height: 100%;" id="output"></pre>
<script src="pouchdb.js"></script>
<script src="pouchdb.redblack.js"></script>
<script src="performance-bundle.js"></script>
</body>
</html>
// PouchDB 3.0.7-prerelease
//
// (c) 2012-2014 Dale Harvey and the PouchDB team
// PouchDB may be freely distributed under the Apache license, version 2.0.
// For all details and documentation:
// http://pouchdb.com
!function(e){if("object"==typeof exports)module.exports=e();else if("function"==typeof define&&define.amd)define(e);else{var f;"undefined"!=typeof window?f=window:"undefined"!=typeof global?f=global:"undefined"!=typeof self&&(f=self),f.PouchDB=e()}}(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(_dereq_,module,exports){
"use strict";
var utils = _dereq_('./utils');
var merge = _dereq_('./merge');
var errors = _dereq_('./deps/errors');
var EventEmitter = _dereq_('events').EventEmitter;
var upsert = _dereq_('./deps/upsert');
var Changes = _dereq_('./changes');
var Promise = utils.Promise;
/*
* A generic pouch adapter
*/
// returns first element of arr satisfying callback predicate
function arrayFirst(arr, callback) {
for (var i = 0; i < arr.length; i++) {
if (callback(arr[i], i) === true) {
return arr[i];
}
}
return false;
}
// Wrapper for functions that call the bulkdocs api with a single doc,
// if the first result is an error, return an error
function yankError(callback) {
return function (err, results) {
if (err || results[0].error) {
callback(err || results[0]);
} else {
callback(null, results[0]);
}
};
}
// for every node in a revision tree computes its distance from the closest
// leaf
function computeHeight(revs) {
var height = {};
var edges = [];
merge.traverseRevTree(revs, function (isLeaf, pos, id, prnt) {
var rev = pos + "-" + id;
if (isLeaf) {
height[rev] = 0;
}
if (prnt !== undefined) {
edges.push({from: prnt, to: rev});
}
return rev;
});
edges.reverse();
edges.forEach(function (edge) {
if (height[edge.from] === undefined) {
height[edge.from] = 1 + height[edge.to];
} else {
height[edge.from] = Math.min(height[edge.from], 1 + height[edge.to]);
}
});
return height;
}
function allDocsKeysQuery(api, opts, callback) {
var keys = ('limit' in opts) ?
opts.keys.slice(opts.skip, opts.limit + opts.skip) :
(opts.skip > 0) ? opts.keys.slice(opts.skip) : opts.keys;
if (opts.descending) {
keys.reverse();
}
if (!keys.length) {
return api._allDocs({limit: 0}, callback);
}
var finalResults = {
offset: opts.skip
};
return Promise.all(keys.map(function (key, i) {
var subOpts = utils.extend(true, {key: key, deleted: 'ok'}, opts);
['limit', 'skip', 'keys'].forEach(function (optKey) {
delete subOpts[optKey];
});
return new Promise(function (resolve, reject) {
api._allDocs(subOpts, function (err, res) {
if (err) {
return reject(err);
}
finalResults.total_rows = res.total_rows;
resolve(res.rows[0] || {key: key, error: 'not_found'});
});
});
})).then(function (results) {
finalResults.rows = results;
return finalResults;
});
}
utils.inherits(AbstractPouchDB, EventEmitter);
module.exports = AbstractPouchDB;
function AbstractPouchDB() {
var self = this;
EventEmitter.call(this);
self.autoCompact = function (callback) {
// http doesn't have auto-compaction
if (!self.auto_compaction || self.type() === 'http') {
return callback;
}
return function (err, res) {
if (err) {
callback(err);
} else {
var count = res.length;
var decCount = function () {
count--;
if (!count) {
callback(null, res);
}
};
if (!res.length) {
return callback(null, res);
}
res.forEach(function (doc) {
if (doc.ok && doc.id) { // if no id, then it was a local doc
// TODO: we need better error handling
self.compactDocument(doc.id, 1, decCount);
} else {
decCount();
}
});
}
};
};
var listeners = 0, changes;
var eventNames = ['change', 'delete', 'create', 'update'];
this.on('newListener', function (eventName) {
if (~eventNames.indexOf(eventName)) {
if (listeners) {
listeners++;
return;
} else {
listeners++;
}
} else {
return;
}
var lastChange = 0;
changes = this.changes({
conflicts: true,
include_docs: true,
continuous: true,
since: 'now',
onChange: function (change) {
if (change.seq <= lastChange) {
return;
}
lastChange = change.seq;
self.emit('change', change);
if (change.doc._deleted) {
self.emit('delete', change);
} else if (change.doc._rev.split('-')[0] === '1') {
self.emit('create', change);
} else {
self.emit('update', change);
}
}
});
});
this.on('removeListener', function (eventName) {
if (~eventNames.indexOf(eventName)) {
listeners--;
if (listeners) {
return;
}
} else {
return;
}
changes.cancel();
});
}
AbstractPouchDB.prototype.post =
utils.adapterFun('post', function (doc, opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
if (typeof doc !== 'object' || Array.isArray(doc)) {
return callback(errors.NOT_AN_OBJECT);
}
this.bulkDocs({docs: [doc]}, opts,
this.autoCompact(yankError(callback)));
});
AbstractPouchDB.prototype.put =
utils.adapterFun('put', utils.getArguments(function (args) {
var temp, temptype, opts, callback;
var doc = args.shift();
var id = '_id' in doc;
if (typeof doc !== 'object' || Array.isArray(doc)) {
callback = args.pop();
return callback(errors.NOT_AN_OBJECT);
}
doc = utils.clone(doc);
while (true) {
temp = args.shift();
temptype = typeof temp;
if (temptype === "string" && !id) {
doc._id = temp;
id = true;
} else if (temptype === "string" && id && !('_rev' in doc)) {
doc._rev = temp;
} else if (temptype === "object") {
opts = temp;
} else if (temptype === "function") {
callback = temp;
}
if (!args.length) {
break;
}
}
opts = opts || {};
var error = utils.invalidIdError(doc._id);
if (error) {
return callback(error);
}
if (utils.isLocalId(doc._id) && typeof this._putLocal === 'function') {
if (doc._deleted) {
return this._removeLocal(doc, callback);
} else {
return this._putLocal(doc, callback);
}
}
this.bulkDocs({docs: [doc]}, opts,
this.autoCompact(yankError(callback)));
}));
AbstractPouchDB.prototype.putAttachment =
utils.adapterFun('putAttachment', function (docId, attachmentId, rev,
blob, type, callback) {
var api = this;
if (typeof type === 'function') {
callback = type;
type = blob;
blob = rev;
rev = null;
}
if (typeof type === 'undefined') {
type = blob;
blob = rev;
rev = null;
}
function createAttachment(doc) {
doc._attachments = doc._attachments || {};
doc._attachments[attachmentId] = {
content_type: type,
data: blob
};
return api.put(doc);
}
return api.get(docId).then(function (doc) {
if (doc._rev !== rev) {
throw errors.REV_CONFLICT;
}
return createAttachment(doc);
}, function (err) {
// create new doc
if (err.error === errors.MISSING_DOC.error) {
return createAttachment({_id: docId});
} else {
throw err;
}
});
});
AbstractPouchDB.prototype.removeAttachment =
utils.adapterFun('removeAttachment', function (docId, attachmentId, rev,
callback) {
var self = this;
self.get(docId, function (err, obj) {
if (err) {
callback(err);
return;
}
if (obj._rev !== rev) {
callback(errors.REV_CONFLICT);
return;
}
if (!obj._attachments) {
return callback();
}
delete obj._attachments[attachmentId];
if (Object.keys(obj._attachments).length === 0) {
delete obj._attachments;
}
self.put(obj, callback);
});
});
AbstractPouchDB.prototype.remove =
utils.adapterFun('remove', function (docOrId, optsOrRev, opts, callback) {
var doc;
if (typeof optsOrRev === 'string') {
// id, rev, opts, callback style
doc = {
_id: docOrId,
_rev: optsOrRev
};
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
} else {
// doc, opts, callback style
doc = docOrId;
if (typeof optsOrRev === 'function') {
callback = optsOrRev;
opts = {};
} else {
callback = opts;
opts = optsOrRev;
}
}
opts = utils.clone(opts || {});
opts.was_delete = true;
var newDoc = {_id: doc._id, _rev: (doc._rev || opts.rev)};
newDoc._deleted = true;
if (utils.isLocalId(newDoc._id) && typeof this._removeLocal === 'function') {
return this._removeLocal(doc, callback);
}
this.bulkDocs({docs: [newDoc]}, opts, yankError(callback));
});
AbstractPouchDB.prototype.revsDiff =
utils.adapterFun('revsDiff', function (req, opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
opts = utils.clone(opts);
var ids = Object.keys(req);
if (!ids.length) {
return callback(null, {});
}
var count = 0;
var missing = new utils.Map();
function addToMissing(id, revId) {
if (!missing.has(id)) {
missing.set(id, {missing: []});
}
missing.get(id).missing.push(revId);
}
function processDoc(id, rev_tree) {
// Is this fast enough? Maybe we should switch to a set simulated by a map
var missingForId = req[id].slice(0);
merge.traverseRevTree(rev_tree, function (isLeaf, pos, revHash, ctx,
opts) {
var rev = pos + '-' + revHash;
var idx = missingForId.indexOf(rev);
if (idx === -1) {
return;
}
missingForId.splice(idx, 1);
if (opts.status !== 'available') {
addToMissing(id, rev);
}
});
// Traversing the tree is synchronous, so now `missingForId` contains
// revisions that were not found in the tree
missingForId.forEach(function (rev) {
addToMissing(id, rev);
});
}
ids.map(function (id) {
this._getRevisionTree(id, function (err, rev_tree) {
if (err && err.status === 404 && err.message === 'missing') {
missing.set(id, {missing: req[id]});
} else if (err) {
return callback(err);
} else {
processDoc(id, rev_tree);
}
if (++count === ids.length) {
// convert LazyMap to object
var missingObj = {};
missing.forEach(function (value, key) {
missingObj[key] = value;
});
return callback(null, missingObj);
}
});
}, this);
});
// compact one document and fire callback
// by compacting we mean removing all revisions which
// are further from the leaf in revision tree than max_height
AbstractPouchDB.prototype.compactDocument =
utils.adapterFun('compactDocument', function (docId, max_height, callback) {
var self = this;
this._getRevisionTree(docId, function (err, rev_tree) {
if (err) {
return callback(err);
}
var height = computeHeight(rev_tree);
var candidates = [];
var revs = [];
Object.keys(height).forEach(function (rev) {
if (height[rev] > max_height) {
candidates.push(rev);
}
});
merge.traverseRevTree(rev_tree, function (isLeaf, pos, revHash, ctx, opts) {
var rev = pos + '-' + revHash;
if (opts.status === 'available' && candidates.indexOf(rev) !== -1) {
opts.status = 'missing';
revs.push(rev);
}
});
self._doCompaction(docId, rev_tree, revs, callback);
});
});
// compact the whole database using single document
// compaction
AbstractPouchDB.prototype.compact =
utils.adapterFun('compact', function (opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
var self = this;
opts = utils.clone(opts || {});
self.get('_local/compaction')["catch"](function () {
return false;
}).then(function (doc) {
if (typeof self._compact === 'function') {
if (doc && doc.last_seq) {
opts.last_seq = doc.last_seq;
}
return self._compact(opts, callback);
}
});
});
AbstractPouchDB.prototype._compact = function (opts, callback) {
var done = false;
var started = 0;
var copts = {
returnDocs: false
};
var self = this;
var lastSeq;
function finish() {
self.get('_local/compaction')["catch"](function () {
return false;
}).then(function (doc) {
doc = doc || {_id: '_local/compaction'};
doc.last_seq = lastSeq;
return self.put(doc);
}).then(function () {
callback();
}, callback);
}
if (opts.last_seq) {
copts.since = opts.last_seq;
}
function afterCompact() {
started--;
if (!started && done) {
finish();
}
}
function onChange(row) {
started++;
self.compactDocument(row.id, 0).then(afterCompact, callback);
}
self.changes(copts).on('change', onChange).on('complete', function (resp) {
done = true;
lastSeq = resp.last_seq;
if (!started) {
finish();
}
}).on('error', callback);
};
/* Begin api wrappers. Specific functionality to storage belongs in the
_[method] */
AbstractPouchDB.prototype.get =
utils.adapterFun('get', function (id, opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
if (typeof id !== 'string') {
return callback(errors.INVALID_ID);
}
if (utils.isLocalId(id) && typeof this._getLocal === 'function') {
return this._getLocal(id, callback);
}
var leaves = [], self = this;
function finishOpenRevs() {
var result = [];
var count = leaves.length;
if (!count) {
return callback(null, result);
}
// order with open_revs is unspecified
leaves.forEach(function (leaf) {
self.get(id,
{rev: leaf, revs: opts.revs, attachments: opts.attachments},
function (err, doc) {
if (!err) {
result.push({ok: doc});
} else {
result.push({missing: leaf});
}
count--;
if (!count) {
callback(null, result);
}
});
});
}
if (opts.open_revs) {
if (opts.open_revs === "all") {
this._getRevisionTree(id, function (err, rev_tree) {
if (err) {
// if there's no such document we should treat this
// situation the same way as if revision tree was empty
rev_tree = [];
}
leaves = merge.collectLeaves(rev_tree).map(function (leaf) {
return leaf.rev;
});
finishOpenRevs();
});
} else {
if (Array.isArray(opts.open_revs)) {
leaves = opts.open_revs;
for (var i = 0; i < leaves.length; i++) {
var l = leaves[i];
// looks like it's the only thing couchdb checks
if (!(typeof(l) === "string" && /^\d+-/.test(l))) {
return callback(errors.error(errors.BAD_REQUEST,
"Invalid rev format"));
}
}
finishOpenRevs();
} else {
return callback(errors.error(errors.UNKNOWN_ERROR,
'function_clause'));
}
}
return; // open_revs does not like other options
}
return this._get(id, opts, function (err, result) {
opts = utils.clone(opts);
if (err) {
return callback(err);
}
var doc = result.doc;
if (!doc) {
// a smoke test for something being very wrong
return callback(new Error('no doc!'));
}
var metadata = result.metadata;
var ctx = result.ctx;
if (opts.conflicts) {
var conflicts = merge.collectConflicts(metadata);
if (conflicts.length) {
doc._conflicts = conflicts;
}
}
if (opts.revs || opts.revs_info) {
var paths = merge.rootToLeaf(metadata.rev_tree);
var path = arrayFirst(paths, function (arr) {
return arr.ids.map(function (x) { return x.id; })
.indexOf(doc._rev.split('-')[1]) !== -1;
});
var indexOfRev = path.ids.map(function (x) {return x.id; })
.indexOf(doc._rev.split('-')[1]) + 1;
var howMany = path.ids.length - indexOfRev;
path.ids.splice(indexOfRev, howMany);
path.ids.reverse();
if (opts.revs) {
doc._revisions = {
start: (path.pos + path.ids.length) - 1,
ids: path.ids.map(function (rev) {
return rev.id;
})
};
}
if (opts.revs_info) {
var pos = path.pos + path.ids.length;
doc._revs_info = path.ids.map(function (rev) {
pos--;
return {
rev: pos + '-' + rev.id,
status: rev.opts.status
};
});
}
}
if (opts.local_seq) {
doc._local_seq = result.metadata.seq;
}
if (opts.attachments && doc._attachments) {
var attachments = doc._attachments;
var count = Object.keys(attachments).length;
if (count === 0) {
return callback(null, doc);
}
Object.keys(attachments).forEach(function (key) {
this._getAttachment(attachments[key],
{encode: true, ctx: ctx}, function (err, data) {
var att = doc._attachments[key];
att.data = data;
delete att.stub;
if (!--count) {
callback(null, doc);
}
});
}, self);
} else {
if (doc._attachments) {
for (var key in doc._attachments) {
if (doc._attachments.hasOwnProperty(key)) {
doc._attachments[key].stub = true;
}
}
}
callback(null, doc);
}
});
});
AbstractPouchDB.prototype.getAttachment =
utils.adapterFun('getAttachment', function (docId, attachmentId, opts,
callback) {
var self = this;
if (opts instanceof Function) {
callback = opts;
opts = {};
}
opts = utils.clone(opts);
this._get(docId, opts, function (err, res) {
if (err) {
return callback(err);
}
if (res.doc._attachments && res.doc._attachments[attachmentId]) {
opts.ctx = res.ctx;
self._getAttachment(res.doc._attachments[attachmentId], opts, callback);
} else {
return callback(errors.MISSING_DOC);
}
});
});
AbstractPouchDB.prototype.allDocs =
utils.adapterFun('allDocs', function (opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
opts = utils.clone(opts);
opts.skip = typeof opts.skip !== 'undefined' ? opts.skip : 0;
if ('keys' in opts) {
if (!Array.isArray(opts.keys)) {
return callback(new TypeError('options.keys must be an array'));
}
var incompatibleOpt =
['startkey', 'endkey', 'key'].filter(function (incompatibleOpt) {
return incompatibleOpt in opts;
})[0];
if (incompatibleOpt) {
callback(errors.error(errors.QUERY_PARSE_ERROR,
'Query parameter `' + incompatibleOpt +
'` is not compatible with multi-get'
));
return;
}
if (this.type() !== 'http') {
return allDocsKeysQuery(this, opts, callback);
}
}
return this._allDocs(opts, callback);
});
AbstractPouchDB.prototype.changes = function (opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
return new Changes(this, opts, callback);
};
AbstractPouchDB.prototype.close =
utils.adapterFun('close', function (callback) {
this._closed = true;
return this._close(callback);
});
AbstractPouchDB.prototype.info = utils.adapterFun('info', function (callback) {
var self = this;
this._info(function (err, info) {
if (err) {
return callback(err);
}
// assume we know better than the adapter, unless it informs us
info.db_name = info.db_name || self._db_name;
info.auto_compaction = !!(self._auto_compaction && self.type() !== 'http');
callback(null, info);
});
});
AbstractPouchDB.prototype.id = utils.adapterFun('id', function (callback) {
return this._id(callback);
});
AbstractPouchDB.prototype.type = function () {
return (typeof this._type === 'function') ? this._type() : this.adapter;
};
AbstractPouchDB.prototype.bulkDocs =
utils.adapterFun('bulkDocs', function (req, opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
opts = utils.clone(opts);
if (Array.isArray(req)) {
req = {
docs: req
};
}
if (!req || !req.docs || !Array.isArray(req.docs)) {
return callback(errors.MISSING_BULK_DOCS);
}
for (var i = 0; i < req.docs.length; ++i) {
if (typeof req.docs[i] !== 'object' || Array.isArray(req.docs[i])) {
return callback(errors.NOT_AN_OBJECT);
}
}
req = utils.clone(req);
if (!('new_edits' in opts)) {
if ('new_edits' in req) {
opts.new_edits = req.new_edits;
} else {
opts.new_edits = true;
}
}
return this._bulkDocs(req, opts, this.autoCompact(callback));
});
AbstractPouchDB.prototype.registerDependentDatabase =
utils.adapterFun('registerDependentDatabase', function (dependentDb,
callback) {
var depDB = new this.constructor(dependentDb, {adapter: this._adapter});
function diffFun(doc) {
doc.dependentDbs = doc.dependentDbs || {};
if (doc.dependentDbs[dependentDb]) {
return false; // no update required
}
doc.dependentDbs[dependentDb] = true;
return doc;
}
upsert(this, '_local/_pouch_dependentDbs', diffFun, function (err) {
if (err) {
return callback(err);
}
return callback(null, {db: depDB});
});
});
},{"./changes":6,"./deps/errors":11,"./deps/upsert":13,"./merge":18,"./utils":23,"events":27}],2:[function(_dereq_,module,exports){
(function (process){
"use strict";
var CHANGES_BATCH_SIZE = 25;
var utils = _dereq_('../utils');
var errors = _dereq_('../deps/errors');
// parseUri 1.2.2
// (c) Steven Levithan <stevenlevithan.com>
// MIT License
function parseUri(str) {
var o = parseUri.options;
var m = o.parser[o.strictMode ? "strict" : "loose"].exec(str);
var uri = {};
var i = 14;
while (i--) {
uri[o.key[i]] = m[i] || "";
}
uri[o.q.name] = {};
uri[o.key[12]].replace(o.q.parser, function ($0, $1, $2) {
if ($1) {
uri[o.q.name][$1] = $2;
}
});
return uri;
}
function encodeDocId(id) {
if (/^_(design|local)/.test(id)) {
return id;
}
return encodeURIComponent(id);
}
function preprocessAttachments(doc) {
if (!doc._attachments || !Object.keys(doc._attachments)) {
return utils.Promise.resolve();
}
return utils.Promise.all(Object.keys(doc._attachments).map(function (key) {
var attachment = doc._attachments[key];
if (attachment.data && typeof attachment.data !== 'string') {
if (typeof process === undefined || process.browser) {
return new utils.Promise(function (resolve) {
var reader = new FileReader();
reader.onloadend = function (e) {
attachment.data = utils.btoa(
utils.arrayBufferToBinaryString(e.target.result));
resolve();
};
reader.readAsArrayBuffer(attachment.data);
});
} else {
attachment.data = attachment.data.toString('base64');
}
}
}));
}
parseUri.options = {
strictMode: false,
key: ["source", "protocol", "authority", "userInfo", "user", "password",
"host", "port", "relative", "path", "directory", "file", "query",
"anchor"],
q: {
name: "queryKey",
parser: /(?:^|&)([^&=]*)=?([^&]*)/g
},
parser: {
/* jshint maxlen: false */
strict: /^(?:([^:\/?#]+):)?(?:\/\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?))?((((?:[^?#\/]*\/)*)([^?#]*))(?:\?([^#]*))?(?:#(.*))?)/,
loose: /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/
}
};
// Get all the information you possibly can about the URI given by name and
// return it as a suitable object.
function getHost(name, opts) {
// If the given name contains "http:"
if (/http(s?):/.test(name)) {
// Prase the URI into all its little bits
var uri = parseUri(name);
// Store the fact that it is a remote URI
uri.remote = true;
// Store the user and password as a separate auth object
if (uri.user || uri.password) {
uri.auth = {username: uri.user, password: uri.password};
}
// Split the path part of the URI into parts using '/' as the delimiter
// after removing any leading '/' and any trailing '/'
var parts = uri.path.replace(/(^\/|\/$)/g, '').split('/');
// Store the first part as the database name and remove it from the parts
// array
uri.db = parts.pop();
// Restore the path by joining all the remaining parts (all the parts
// except for the database name) with '/'s
uri.path = parts.join('/');
opts = opts || {};
opts = utils.clone(opts);
uri.headers = opts.headers || {};
if (opts.auth || uri.auth) {
var nAuth = opts.auth || uri.auth;
var token = utils.btoa(nAuth.username + ':' + nAuth.password);
uri.headers.Authorization = 'Basic ' + token;
}
if (opts.headers) {
uri.headers = opts.headers;
}
return uri;
}
// If the given name does not contain 'http:' then return a very basic object
// with no host, the current path, the given name as the database name and no
// username/password
return {host: '', path: '/', db: name, auth: false};
}
// Generate a URL with the host data given by opts and the given path
function genDBUrl(opts, path) {
return genUrl(opts, opts.db + '/' + path);
}
// Generate a URL with the host data given by opts and the given path
function genUrl(opts, path) {
if (opts.remote) {
// If the host already has a path, then we need to have a path delimiter
// Otherwise, the path delimiter is the empty string
var pathDel = !opts.path ? '' : '/';
// If the host already has a path, then we need to have a path delimiter
// Otherwise, the path delimiter is the empty string
return opts.protocol + '://' + opts.host + ':' + opts.port + '/' +
opts.path + pathDel + path;
}
return '/' + path;
}
// Implements the PouchDB API for dealing with CouchDB instances over HTTP
function HttpPouch(opts, callback) {
// The functions that will be publicly available for HttpPouch
var api = this;
api.getHost = opts.getHost ? opts.getHost : getHost;
// Parse the URI given by opts.name into an easy-to-use object
var host = api.getHost(opts.name, opts);
// Generate the database URL based on the host
var dbUrl = genDBUrl(host, '');
api.getUrl = function () {return dbUrl; };
api.getHeaders = function () {return utils.clone(host.headers); };
var ajaxOpts = opts.ajax || {};
opts = utils.clone(opts);
function ajax(options, callback) {
return utils.ajax(utils.extend({}, ajaxOpts, options), callback);
}
// Create a new CouchDB database based on the given opts
var createDB = function () {
ajax({headers: host.headers, method: 'PUT', url: dbUrl},
function (err, ret) {
// If we get an "Unauthorized" error
if (err && err.status === 401) {
// Test if the database already exists
ajax({headers: host.headers, method: 'HEAD', url: dbUrl},
function (err, ret) {
// If there is still an error
if (err) {
// Give the error to the callback to deal with
callback(err);
} else {
// Continue as if there had been no errors
callback(null, api);
}
});
// If there were no errros or if the only error is "Precondition Failed"
// (note: "Precondition Failed" occurs when we try to create a database
// that already exists)
} else if (!err || err.status === 412) {
// Continue as if there had been no errors
callback(null, api);
} else {
callback(err);
}
});
};
if (!opts.skipSetup) {
ajax({headers: host.headers, method: 'GET', url: dbUrl},
function (err, ret) {
//check if the db exists
if (err) {
if (err.status === 404) {
//if it doesn't, create it
createDB();
} else {
callback(err);
}
} else {
//go do stuff with the db
callback(null, api);
}
});
}
api.type = function () {
return 'http';
};
api.id = utils.adapterFun('id', function (callback) {
ajax({
headers: host.headers,
method: 'GET',
url: genUrl(host, '')
}, function (err, result) {
var uuid = (result && result.uuid) ?
result.uuid + host.db : genDBUrl(host, '');
callback(null, uuid);
});
});
api.request = utils.adapterFun('request', function (options, callback) {
options.headers = host.headers;
options.url = genDBUrl(host, options.url);
ajax(options, callback);
});
// Sends a POST request to the host calling the couchdb _compact function
// version: The version of CouchDB it is running
api.compact = utils.adapterFun('compact', function (opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
opts = utils.clone(opts);
ajax({
headers: host.headers,
url: genDBUrl(host, '_compact'),
method: 'POST'
}, function () {
function ping() {
api.info(function (err, res) {
if (!res.compact_running) {
callback();
} else {
setTimeout(ping, opts.interval || 200);
}
});
}
// Ping the http if it's finished compaction
if (typeof callback === "function") {
ping();
}
});
});
// Calls GET on the host, which gets back a JSON string containing
// couchdb: A welcome string
// version: The version of CouchDB it is running
api._info = function (callback) {
ajax({
headers: host.headers,
method: 'GET',
url: genDBUrl(host, '')
}, function (err, res) {
if (err) {
callback(err);
} else {
res.host = genDBUrl(host, '');
callback(null, res);
}
});
};
// Get the document with the given id from the database given by host.
// The id could be solely the _id in the database, or it may be a
// _design/ID or _local/ID path
api.get = utils.adapterFun('get', function (id, opts, callback) {
// If no options were given, set the callback to the second parameter
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
opts = utils.clone(opts);
if (opts.auto_encode === undefined) {
opts.auto_encode = true;
}
// List of parameters to add to the GET request
var params = [];
// If it exists, add the opts.revs value to the list of parameters.
// If revs=true then the resulting JSON will include a field
// _revisions containing an array of the revision IDs.
if (opts.revs) {
params.push('revs=true');
}
// If it exists, add the opts.revs_info value to the list of parameters.
// If revs_info=true then the resulting JSON will include the field
// _revs_info containing an array of objects in which each object
// representing an available revision.
if (opts.revs_info) {
params.push('revs_info=true');
}
if (opts.local_seq) {
params.push('local_seq=true');
}
// If it exists, add the opts.open_revs value to the list of parameters.
// If open_revs=all then the resulting JSON will include all the leaf
// revisions. If open_revs=["rev1", "rev2",...] then the resulting JSON
// will contain an array of objects containing data of all revisions
if (opts.open_revs) {
if (opts.open_revs !== "all") {
opts.open_revs = JSON.stringify(opts.open_revs);
}
params.push('open_revs=' + opts.open_revs);
}
// If it exists, add the opts.attachments value to the list of parameters.
// If attachments=true the resulting JSON will include the base64-encoded
// contents in the "data" property of each attachment.
if (opts.attachments) {
params.push('attachments=true');
}
// If it exists, add the opts.rev value to the list of parameters.
// If rev is given a revision number then get the specified revision.
if (opts.rev) {
params.push('rev=' + opts.rev);
}
// If it exists, add the opts.conflicts value to the list of parameters.
// If conflicts=true then the resulting JSON will include the field
// _conflicts containing all the conflicting revisions.
if (opts.conflicts) {
params.push('conflicts=' + opts.conflicts);
}
// Format the list of parameters into a valid URI query string
params = params.join('&');
params = params === '' ? '' : '?' + params;
if (opts.auto_encode) {
id = encodeDocId(id);
}
// Set the options for the ajax call
var options = {
headers: host.headers,
method: 'GET',
url: genDBUrl(host, id + params)
};
// If the given id contains at least one '/' and the part before the '/'
// is NOT "_design" and is NOT "_local"
// OR
// If the given id contains at least two '/' and the part before the first
// '/' is "_design".
// TODO This second condition seems strange since if parts[0] === '_design'
// then we already know that parts[0] !== '_local'.
var parts = id.split('/');
if ((parts.length > 1 && parts[0] !== '_design' && parts[0] !== '_local') ||
(parts.length > 2 && parts[0] === '_design' && parts[0] !== '_local')) {
// Binary is expected back from the server
options.binary = true;
}
// Get the document
ajax(options, function (err, doc, xhr) {
// If the document does not exist, send an error to the callback
if (err) {
return callback(err);
}
// Send the document to the callback
callback(null, doc, xhr);
});
});
// Delete the document given by doc from the database given by host.
api.remove = utils.adapterFun('remove', function (docOrId, optsOrRev, opts, callback) {
var doc;
if (typeof optsOrRev === 'string') {
// id, rev, opts, callback style
doc = {
_id: docOrId,
_rev: optsOrRev
};
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
} else {
// doc, opts, callback style
doc = docOrId;
if (typeof optsOrRev === 'function') {
callback = optsOrRev;
opts = {};
} else {
callback = opts;
opts = optsOrRev;
}
}
var rev = (doc._rev || opts.rev);
// Delete the document
ajax({
headers: host.headers,
method: 'DELETE',
url: genDBUrl(host, encodeDocId(doc._id)) + '?rev=' + rev
}, callback);
});
// Get the attachment
api.getAttachment =
utils.adapterFun('getAttachment', function (docId, attachmentId, opts,
callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
opts = utils.clone(opts);
if (opts.auto_encode === undefined) {
opts.auto_encode = true;
}
if (opts.auto_encode) {
docId = encodeDocId(docId);
}
opts.auto_encode = false;
api.get(docId + '/' + attachmentId, opts, callback);
});
// Remove the attachment given by the id and rev
api.removeAttachment =
utils.adapterFun('removeAttachment', function (docId, attachmentId, rev,
callback) {
ajax({
headers: host.headers,
method: 'DELETE',
url: genDBUrl(host, encodeDocId(docId) + '/' + attachmentId) + '?rev=' +
rev
}, callback);
});
// Add the attachment given by blob and its contentType property
// to the document with the given id, the revision given by rev, and
// add it to the database given by host.
api.putAttachment =
utils.adapterFun('putAttachment', function (docId, attachmentId, rev, blob,
type, callback) {
if (typeof type === 'function') {
callback = type;
type = blob;
blob = rev;
rev = null;
}
if (typeof type === 'undefined') {
type = blob;
blob = rev;
rev = null;
}
var id = encodeDocId(docId) + '/' + attachmentId;
var url = genDBUrl(host, id);
if (rev) {
url += '?rev=' + rev;
}
var opts = {
headers: utils.clone(host.headers),
method: 'PUT',
url: url,
processData: false,
body: blob,
timeout: 60000
};
opts.headers['Content-Type'] = type;
// Add the attachment
ajax(opts, callback);
});
// Add the document given by doc (in JSON string format) to the database
// given by host. This fails if the doc has no _id field.
api.put = utils.adapterFun('put', utils.getArguments(function (args) {
var temp, temptype, opts;
var doc = args.shift();
var id = '_id' in doc;
var callback = args.pop();
if (typeof doc !== 'object' || Array.isArray(doc)) {
return callback(errors.NOT_AN_OBJECT);
}
doc = utils.clone(doc);
preprocessAttachments(doc).then(function () {
while (true) {
temp = args.shift();
temptype = typeof temp;
if (temptype === "string" && !id) {
doc._id = temp;
id = true;
} else if (temptype === "string" && id && !('_rev' in doc)) {
doc._rev = temp;
} else if (temptype === "object") {
opts = utils.clone(temp);
}
if (!args.length) {
break;
}
}
opts = opts || {};
var error = utils.invalidIdError(doc._id);
if (error) {
throw error;
}
// List of parameter to add to the PUT request
var params = [];
// If it exists, add the opts.new_edits value to the list of parameters.
// If new_edits = false then the database will NOT assign this document a
// new revision number
if (opts && typeof opts.new_edits !== 'undefined') {
params.push('new_edits=' + opts.new_edits);
}
// Format the list of parameters into a valid URI query string
params = params.join('&');
if (params !== '') {
params = '?' + params;
}
// Add the document
ajax({
headers: host.headers,
method: 'PUT',
url: genDBUrl(host, encodeDocId(doc._id)) + params,
body: doc
}, function (err, res) {
if (err) {
return callback(err);
}
res.ok = true;
callback(null, res);
});
})["catch"](callback);
}));
// Add the document given by doc (in JSON string format) to the database
// given by host. This does not assume that doc is a new document
// (i.e. does not have a _id or a _rev field.)
api.post = utils.adapterFun('post', function (doc, opts, callback) {
// If no options were given, set the callback to be the second parameter
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
opts = utils.clone(opts);
if (typeof doc !== 'object') {
return callback(errors.NOT_AN_OBJECT);
}
if (! ("_id" in doc)) {
doc._id = utils.uuid();
}
api.put(doc, opts, function (err, res) {
if (err) {
return callback(err);
}
res.ok = true;
callback(null, res);
});
});
// Update/create multiple documents given by req in the database
// given by host.
api._bulkDocs = function (req, opts, callback) {
// If opts.new_edits exists add it to the document data to be
// send to the database.
// If new_edits=false then it prevents the database from creating
// new revision numbers for the documents. Instead it just uses
// the old ones. This is used in database replication.
if (typeof opts.new_edits !== 'undefined') {
req.new_edits = opts.new_edits;
}
utils.Promise.all(req.docs.map(preprocessAttachments)).then(function () {
// Update/create the documents
ajax({
headers: host.headers,
method: 'POST',
url: genDBUrl(host, '_bulk_docs'),
body: req
}, function (err, results) {
if (err) {
return callback(err);
}
results.forEach(function (result) {
result.ok = true; // smooths out cloudant not adding this
});
callback(null, results);
});
})["catch"](callback);
};
// Get a listing of the documents in the database given
// by host and ordered by increasing id.
api.allDocs = utils.adapterFun('allDocs', function (opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
opts = utils.clone(opts);
// List of parameters to add to the GET request
var params = [];
var body;
var method = 'GET';
// TODO I don't see conflicts as a valid parameter for a
// _all_docs request
// (see http://wiki.apache.org/couchdb/HTTP_Document_API#all_docs)
if (opts.conflicts) {
params.push('conflicts=true');
}
// If opts.descending is truthy add it to params
if (opts.descending) {
params.push('descending=true');
}
// If opts.include_docs exists, add the include_docs value to the
// list of parameters.
// If include_docs=true then include the associated document with each
// result.
if (opts.include_docs) {
params.push('include_docs=true');
}
if (opts.key) {
params.push('key=' + encodeURIComponent(JSON.stringify(opts.key)));
}
// If opts.startkey exists, add the startkey value to the list of
// parameters.
// If startkey is given then the returned list of documents will
// start with the document whose id is startkey.
if (opts.startkey) {
params.push('startkey=' +
encodeURIComponent(JSON.stringify(opts.startkey)));
}
// If opts.endkey exists, add the endkey value to the list of parameters.
// If endkey is given then the returned list of docuemnts will
// end with the document whose id is endkey.
if (opts.endkey) {
params.push('endkey=' + encodeURIComponent(JSON.stringify(opts.endkey)));
}
if (typeof opts.inclusive_end !== 'undefined') {
params.push('inclusive_end=' + !!opts.inclusive_end);
}
// If opts.limit exists, add the limit value to the parameter list.
if (typeof opts.limit !== 'undefined') {
params.push('limit=' + opts.limit);
}
if (typeof opts.skip !== 'undefined') {
params.push('skip=' + opts.skip);
}
// Format the list of parameters into a valid URI query string
params = params.join('&');
if (params !== '') {
params = '?' + params;
}
if (typeof opts.keys !== 'undefined') {
var MAX_URL_LENGTH = 2000;
// according to http://stackoverflow.com/a/417184/680742,
// the de factor URL length limit is 2000 characters
var keysAsString =
'keys=' + encodeURIComponent(JSON.stringify(opts.keys));
if (keysAsString.length + params.length + 1 <= MAX_URL_LENGTH) {
// If the keys are short enough, do a GET. we do this to work around
// Safari not understanding 304s on POSTs (see issue #1239)
params += (params.indexOf('?') !== -1 ? '&' : '?') + keysAsString;
} else {
// If keys are too long, issue a POST request to circumvent GET
// query string limits
// see http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options
method = 'POST';
body = JSON.stringify({keys: opts.keys});
}
}
// Get the document listing
ajax({
headers: host.headers,
method: method,
url: genDBUrl(host, '_all_docs' + params),
body: body
}, callback);
});
// Get a list of changes made to documents in the database given by host.
// TODO According to the README, there should be two other methods here,
// api.changes.addListener and api.changes.removeListener.
api._changes = function (opts) {
// We internally page the results of a changes request, this means
// if there is a large set of changes to be returned we can start
// processing them quicker instead of waiting on the entire
// set of changes to return and attempting to process them at once
var batchSize = 'batch_size' in opts ? opts.batch_size : CHANGES_BATCH_SIZE;
opts = utils.clone(opts);
opts.timeout = opts.timeout || 30 * 1000;
// We give a 5 second buffer for CouchDB changes to respond with
// an ok timeout
var params = { timeout: opts.timeout - (5 * 1000) };
var limit = (typeof opts.limit !== 'undefined') ? opts.limit : false;
if (limit === 0) {
limit = 1;
}
var returnDocs;
if ('returnDocs' in opts) {
returnDocs = opts.returnDocs;
} else {
returnDocs = true;
}
//
var leftToFetch = limit;
if (opts.style) {
params.style = opts.style;
}
if (opts.include_docs || opts.filter && typeof opts.filter === 'function') {
params.include_docs = true;
}
if (opts.continuous) {
params.feed = 'longpoll';
}
if (opts.conflicts) {
params.conflicts = true;
}
if (opts.descending) {
params.descending = true;
}
if (opts.filter && typeof opts.filter === 'string') {
params.filter = opts.filter;
if (opts.filter === '_view' &&
opts.view &&
typeof opts.view === 'string') {
params.view = opts.view;
}
}
// If opts.query_params exists, pass it through to the changes request.
// These parameters may be used by the filter on the source database.
if (opts.query_params && typeof opts.query_params === 'object') {
for (var param_name in opts.query_params) {
if (opts.query_params.hasOwnProperty(param_name)) {
params[param_name] = opts.query_params[param_name];
}
}
}
var xhr;
var lastFetchedSeq;
// Get all the changes starting wtih the one immediately after the
// sequence number given by since.
var fetch = function (since, callback) {
if (opts.aborted) {
return;
}
params.since = since;
if (opts.descending) {
if (limit) {
params.limit = leftToFetch;
}
} else {
params.limit = (!limit || leftToFetch > batchSize) ?
batchSize : leftToFetch;
}
var paramStr = '?' + Object.keys(params).map(function (k) {
return k + '=' + params[k];
}).join('&');
// Set the options for the ajax call
var xhrOpts = {
headers: host.headers,
method: 'GET',
url: genDBUrl(host, '_changes' + paramStr),
// _changes can take a long time to generate, especially when filtered
timeout: opts.timeout
};
lastFetchedSeq = since;
if (opts.aborted) {
return;
}
// Get the changes
xhr = ajax(xhrOpts, callback);
};
// If opts.since exists, get all the changes from the sequence
// number given by opts.since. Otherwise, get all the changes
// from the sequence number 0.
var fetchTimeout = 10;
var fetchRetryCount = 0;
var results = {results: []};
var fetched = function (err, res) {
if (opts.aborted) {
return;
}
var raw_results_length = 0;
// If the result of the ajax call (res) contains changes (res.results)
if (res && res.results) {
raw_results_length = res.results.length;
results.last_seq = res.last_seq;
// For each change
var req = {};
req.query = opts.query_params;
res.results = res.results.filter(function (c) {
leftToFetch--;
var ret = utils.filterChange(opts)(c);
if (ret) {
if (returnDocs) {
results.results.push(c);
}
utils.call(opts.onChange, c);
}
return ret;
});
} else if (err) {
// In case of an error, stop listening for changes and call
// opts.complete
opts.aborted = true;
utils.call(opts.complete, err);
return;
}
// The changes feed may have timed out with no results
// if so reuse last update sequence
if (res && res.last_seq) {
lastFetchedSeq = res.last_seq;
}
var finished = (limit && leftToFetch <= 0) ||
(res && raw_results_length < batchSize) ||
(opts.descending);
if ((opts.continuous && !(limit && leftToFetch <= 0)) || !finished) {
// Increase retry delay exponentially as long as errors persist
if (err) {
fetchRetryCount += 1;
} else {
fetchRetryCount = 0;
}
var timeoutMultiplier = 1 << fetchRetryCount;
var retryWait = fetchTimeout * timeoutMultiplier;
var maximumWait = opts.maximumWait || 30000;
if (retryWait > maximumWait) {
utils.call(opts.complete, err || errors.UNKNOWN_ERROR);
return;
}
// Queue a call to fetch again with the newest sequence number
setTimeout(function () { fetch(lastFetchedSeq, fetched); }, retryWait);
} else {
// We're done, call the callback
utils.call(opts.complete, null, results);
}
};
fetch(opts.since || 0, fetched);
// Return a method to cancel this method from processing any more
return {
cancel: function () {
opts.aborted = true;
if (xhr) {
xhr.abort();
}
}
};
};
// Given a set of document/revision IDs (given by req), tets the subset of
// those that do NOT correspond to revisions stored in the database.
// See http://wiki.apache.org/couchdb/HttpPostRevsDiff
api.revsDiff = utils.adapterFun('revsDiff', function (req, opts, callback) {
// If no options were given, set the callback to be the second parameter
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
// Get the missing document/revision IDs
ajax({
headers: host.headers,
method: 'POST',
url: genDBUrl(host, '_revs_diff'),
body: JSON.stringify(req)
}, callback);
});
api._close = function (callback) {
callback();
};
api.destroy = utils.adapterFun('destroy', function (callback) {
ajax({
url: genDBUrl(host, ''),
method: 'DELETE',
headers: host.headers
}, function (err, resp) {
if (err) {
api.emit('error', err);
callback(err);
} else {
api.emit('destroyed');
callback(null, resp);
}
});
});
}
// Delete the HttpPouch specified by the given name.
HttpPouch.destroy = utils.toPromise(function (name, opts, callback) {
var host = getHost(name, opts);
opts = opts || {};
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
opts = utils.clone(opts);
opts.headers = host.headers;
opts.method = 'DELETE';
opts.url = genDBUrl(host, '');
var ajaxOpts = opts.ajax || {};
opts = utils.extend({}, opts, ajaxOpts);
utils.ajax(opts, callback);
});
// HttpPouch is a valid adapter.
HttpPouch.valid = function () {
return true;
};
module.exports = HttpPouch;
}).call(this,_dereq_("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"))
},{"../deps/errors":11,"../utils":23,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":28}],3:[function(_dereq_,module,exports){
(function (process,global){
'use strict';
var utils = _dereq_('../utils');
var merge = _dereq_('../merge');
var errors = _dereq_('../deps/errors');
var vuvuzela = _dereq_('vuvuzela');
var cachedDBs = {};
var taskQueue = {
running: false,
queue: []
};
function tryCode(fun, that, args) {
try {
fun.apply(that, args);
} catch (err) { // shouldn't happen
if (window.PouchDB) {
window.PouchDB.emit('error', err);
}
}
}
function applyNext() {
if (taskQueue.running || !taskQueue.queue.length) {
return;
}
taskQueue.running = true;
var item = taskQueue.queue.shift();
item.action(function (err, res) {
tryCode(item.callback, this, [err, res]);
taskQueue.running = false;
process.nextTick(applyNext);
});
}
function idbError(callback) {
return function (event) {
var message = (event.target && event.target.error &&
event.target.error.name) || event.target;
callback(errors.error(errors.IDB_ERROR, message, event.type));
};
}
function isModernIdb() {
// check for outdated implementations of IDB
// that rely on the setVersion method instead of onupgradeneeded (issue #1207)
// cache based on appVersion, in case the browser is updated
var cacheKey = "_pouch__checkModernIdb_" +
(global.navigator && global.navigator.appVersion);
var cached = utils.hasLocalStorage() && global.localStorage[cacheKey];
if (cached) {
return JSON.parse(cached);
}
var dbName = '_pouch__checkModernIdb';
var result = global.indexedDB.open(dbName, 1).onupgradeneeded === null;
if (global.indexedDB.deleteDatabase) {
global.indexedDB.deleteDatabase(dbName); // db no longer needed
}
if (utils.hasLocalStorage()) {
global.localStorage[cacheKey] = JSON.stringify(result); // cache
}
return result;
}
// Unfortunately, the metadata has to be stringified
// when it is put into the database, because otherwise
// IndexedDB can throw errors for deeply-nested objects.
// Originally we just used JSON.parse/JSON.stringify; now
// we use this custom vuvuzela library that avoids recursion.
// If we could do it all over again, we'd probably use a
// format for the revision trees other than JSON.
function encodeMetadata(metadata, winningRev, deleted) {
var storedObject = {data: vuvuzela.stringify(metadata)};
storedObject.winningRev = winningRev;
storedObject.deletedOrLocal = deleted ? '1' : '0';
storedObject.id = metadata.id;
return storedObject;
}
function decodeMetadata(storedObject) {
if (!storedObject) {
return null;
}
if (!storedObject.data) {
// old format, when we didn't store it stringified
return storedObject;
}
var metadata = vuvuzela.parse(storedObject.data);
metadata.winningRev = storedObject.winningRev;
metadata.deletedOrLocal = storedObject.deletedOrLocal === '1';
return metadata;
}
function IdbPouch(opts, callback) {
var api = this;
taskQueue.queue.push({
action: function (thisCallback) {
init(api, opts, thisCallback);
},
callback: callback
});
applyNext();
}
function init(api, opts, callback) {
// IndexedDB requires a versioned database structure, so we use the
// version here to manage migrations.
var ADAPTER_VERSION = 3;
// The object stores created for each database
// DOC_STORE stores the document meta data, its revision history and state
// Keyed by document id
var DOC_STORE = 'document-store';
// BY_SEQ_STORE stores a particular version of a document, keyed by its
// sequence id
var BY_SEQ_STORE = 'by-sequence';
// Where we store attachments
var ATTACH_STORE = 'attach-store';
// Where we store database-wide meta data in a single record
// keyed by id: META_STORE
var META_STORE = 'meta-store';
// Where we store local documents
var LOCAL_STORE = 'local-store';
// Where we detect blob support
var DETECT_BLOB_SUPPORT_STORE = 'detect-blob-support';
var name = opts.name;
var blobSupport = null;
var instanceId = null;
var idStored = false;
var idb = null;
var docCount = -1;
function createSchema(db) {
db.createObjectStore(DOC_STORE, {keyPath : 'id'})
.createIndex('seq', 'seq', {unique: true});
db.createObjectStore(BY_SEQ_STORE, {autoIncrement: true})
.createIndex('_doc_id_rev', '_doc_id_rev', {unique: true});
db.createObjectStore(ATTACH_STORE, {keyPath: 'digest'});
db.createObjectStore(META_STORE, {keyPath: 'id', autoIncrement: false});
db.createObjectStore(DETECT_BLOB_SUPPORT_STORE);
}
// migration to version 2
// unfortunately "deletedOrLocal" is a misnomer now that we no longer
// store local docs in the main doc-store, but whaddyagonnado
function addDeletedOrLocalIndex(e, callback) {
var transaction = e.currentTarget.transaction;
var docStore = transaction.objectStore(DOC_STORE);
docStore.createIndex('deletedOrLocal', 'deletedOrLocal', {unique : false});
docStore.openCursor().onsuccess = function (event) {
var cursor = event.target.result;
if (cursor) {
var metadata = cursor.value;
var deleted = utils.isDeleted(metadata);
metadata.deletedOrLocal = deleted ? "1" : "0";
docStore.put(metadata);
cursor["continue"]();
} else {
callback(transaction);
}
};
}
// migrations to get to version 3
function createLocalStoreSchema(db) {
db.createObjectStore(LOCAL_STORE, {keyPath: '_id'})
.createIndex('_doc_id_rev', '_doc_id_rev', {unique: true});
}
function migrateLocalStore(e, tx) {
tx = tx || e.currentTarget.transaction;
var localStore = tx.objectStore(LOCAL_STORE);
var docStore = tx.objectStore(DOC_STORE);
var seqStore = tx.objectStore(BY_SEQ_STORE);
var cursor = docStore.openCursor();
cursor.onsuccess = function (event) {
var cursor = event.target.result;
if (cursor) {
var metadata = cursor.value;
var docId = metadata.id;
var local = utils.isLocalId(docId);
var rev = merge.winningRev(metadata);
if (local) {
var docIdRev = docId + "::" + rev;
// remove all seq entries
// associated with this docId
var start = docId + "::";
var end = docId + "::~";
var index = seqStore.index('_doc_id_rev');
var range = global.IDBKeyRange.bound(start, end, false, false);
var seqCursor = index.openCursor(range);
seqCursor.onsuccess = function (e) {
seqCursor = e.target.result;
if (!seqCursor) {
// done
docStore["delete"](cursor.primaryKey);
cursor["continue"]();
} else {
var data = seqCursor.value;
if (data._doc_id_rev === docIdRev) {
localStore.put(data);
}
seqStore["delete"](seqCursor.primaryKey);
seqCursor["continue"]();
}
};
} else {
cursor["continue"]();
}
}
};
}
api.type = function () {
return 'idb';
};
api._id = utils.toPromise(function (callback) {
callback(null, instanceId);
});
api._bulkDocs = function idb_bulkDocs(req, opts, callback) {
var newEdits = opts.new_edits;
var userDocs = req.docs;
// Parse the docs, give them a sequence number for the result
var docInfos = userDocs.map(function (doc, i) {
if (doc._id && utils.isLocalId(doc._id)) {
return doc;
}
var newDoc = utils.parseDoc(doc, newEdits);
newDoc._bulk_seq = i;
return newDoc;
});
var docInfoErrors = docInfos.filter(function (docInfo) {
return docInfo.error;
});
if (docInfoErrors.length) {
return callback(docInfoErrors[0]);
}
var results = new Array(docInfos.length);
var fetchedDocs = new utils.Map();
var updateSeq = 0;
var numDocsWritten = 0;
function writeMetaData(e) {
var meta = e.target.result;
meta.updateSeq = (meta.updateSeq || 0) + updateSeq;
txn.objectStore(META_STORE).put(meta);
}
function checkDoneWritingDocs() {
if (++numDocsWritten === docInfos.length) {
txn.objectStore(META_STORE).get(META_STORE).onsuccess = writeMetaData;
}
}
function processDocs() {
if (!docInfos.length) {
return;
}
var idsToDocs = new utils.Map();
docInfos.forEach(function (currentDoc, resultsIdx) {
if (currentDoc._id && utils.isLocalId(currentDoc._id)) {
api[currentDoc._deleted ? '_removeLocal' : '_putLocal'](
currentDoc, {ctx: txn}, function (err, resp) {
if (err) {
results[resultsIdx] = err;
} else {
results[resultsIdx] = {};
}
checkDoneWritingDocs();
});
return;
}
var id = currentDoc.metadata.id;
if (idsToDocs.has(id)) {
idsToDocs.get(id).push([currentDoc, resultsIdx]);
} else {
idsToDocs.set(id, [[currentDoc, resultsIdx]]);
}
});
// in the case of new_edits, the user can provide multiple docs
// with the same id. these need to be processed sequentially
idsToDocs.forEach(function (docs, id) {
var numDone = 0;
function docWritten() {
checkDoneWritingDocs();
if (++numDone < docs.length) {
nextDoc();
}
}
function nextDoc() {
var value = docs[numDone];
var currentDoc = value[0];
var resultsIdx = value[1];
if (fetchedDocs.has(id)) {
updateDoc(fetchedDocs.get(id), currentDoc, resultsIdx, docWritten);
} else {
insertDoc(currentDoc, resultsIdx, docWritten);
}
}
nextDoc();
});
}
function fetchExistingDocs(callback) {
if (!docInfos.length) {
return callback();
}
var numFetched = 0;
function checkDone() {
if (++numFetched === docInfos.length) {
callback();
}
}
docInfos.forEach(function (docInfo) {
if (docInfo._id && utils.isLocalId(docInfo._id)) {
return checkDone(); // skip local docs
}
var id = docInfo.metadata.id;
var req = txn.objectStore(DOC_STORE).get(id);
req.onsuccess = function process_docRead(event) {
var metadata = decodeMetadata(event.target.result);
if (metadata) {
fetchedDocs.set(id, metadata);
}
checkDone();
};
});
}
function complete() {
var aresults = results.map(function (result) {
if (result._bulk_seq) {
delete result._bulk_seq;
} else if (!Object.keys(result).length) {
return {
ok: true
};
}
if (result.error) {
return result;
}
var metadata = result.metadata;
var rev = merge.winningRev(metadata);
return {
ok: true,
id: metadata.id,
rev: rev
};
});
IdbPouch.Changes.notify(name);
docCount = -1; // invalidate
callback(null, aresults);
}
function preprocessAttachment(att, finish) {
if (att.stub) {
return finish();
}
if (typeof att.data === 'string') {
var data;
try {
data = atob(att.data);
} catch (e) {
var err = errors.error(errors.BAD_ARG,
"Attachments need to be base64 encoded");
return callback(err);
}
if (blobSupport) {
var type = att.content_type;
data = utils.fixBinary(data);
att.data = utils.createBlob([data], {type: type});
}
utils.MD5(data).then(function (result) {
att.digest = 'md5-' + result;
finish();
});
return;
}
var reader = new FileReader();
reader.onloadend = function (e) {
var binary = utils.arrayBufferToBinaryString(this.result || '');
if (!blobSupport) {
att.data = btoa(binary);
}
utils.MD5(binary).then(function (result) {
att.digest = 'md5-' + result;
finish();
});
};
reader.readAsArrayBuffer(att.data);
}
function preprocessAttachments(callback) {
if (!docInfos.length) {
return callback();
}
var docv = 0;
docInfos.forEach(function (docInfo) {
var attachments = docInfo.data && docInfo.data._attachments ?
Object.keys(docInfo.data._attachments) : [];
if (!attachments.length) {
return done();
}
var recv = 0;
function attachmentProcessed() {
recv++;
if (recv === attachments.length) {
done();
}
}
for (var key in docInfo.data._attachments) {
if (docInfo.data._attachments.hasOwnProperty(key)) {
preprocessAttachment(docInfo.data._attachments[key],
attachmentProcessed);
}
}
});
function done() {
docv++;
if (docInfos.length === docv) {
callback();
}
}
}
function writeDoc(docInfo, winningRev, deleted, callback, resultsIdx) {
var err = null;
var recv = 0;
docInfo.data._id = docInfo.metadata.id;
docInfo.data._rev = docInfo.metadata.rev;
if (deleted) {
docInfo.data._deleted = true;
}
var attachments = docInfo.data._attachments ?
Object.keys(docInfo.data._attachments) : [];
function collectResults(attachmentErr) {
if (!err) {
if (attachmentErr) {
err = attachmentErr;
callback(err);
} else if (recv === attachments.length) {
finish();
}
}
}
function attachmentSaved(err) {
recv++;
collectResults(err);
}
for (var key in docInfo.data._attachments) {
if (!docInfo.data._attachments[key].stub) {
var data = docInfo.data._attachments[key].data;
delete docInfo.data._attachments[key].data;
var digest = docInfo.data._attachments[key].digest;
saveAttachment(docInfo, digest, data, attachmentSaved);
} else {
recv++;
collectResults();
}
}
function finish() {
updateSeq++;
docInfo.data._doc_id_rev = docInfo.data._id + "::" + docInfo.data._rev;
var seqStore = txn.objectStore(BY_SEQ_STORE);
var index = seqStore.index('_doc_id_rev');
function afterPut(e) {
var metadata = docInfo.metadata;
metadata.seq = e.target.result;
// Current _rev is calculated from _rev_tree on read
delete metadata.rev;
var metadataToStore = encodeMetadata(metadata, winningRev, deleted);
var metaDataReq = txn.objectStore(DOC_STORE).put(metadataToStore);
metaDataReq.onsuccess = function () {
delete metadata.deletedOrLocal;
delete metadata.winningRev;
results[resultsIdx] = docInfo;
fetchedDocs.set(docInfo.metadata.id, docInfo.metadata);
utils.call(callback);
};
}
var putReq = seqStore.put(docInfo.data);
putReq.onsuccess = afterPut;
putReq.onerror = function (e) {
// ConstraintError, need to update, not put (see #1638 for details)
e.preventDefault(); // avoid transaction abort
e.stopPropagation(); // avoid transaction onerror
var getKeyReq = index.getKey(docInfo.data._doc_id_rev);
getKeyReq.onsuccess = function (e) {
var putReq = seqStore.put(docInfo.data, e.target.result);
updateSeq--; // discount, since it's an update, not a new seq
putReq.onsuccess = afterPut;
};
};
}
if (!attachments.length) {
finish();
}
}
function updateDoc(oldDoc, docInfo, resultsIdx, callback) {
var merged =
merge.merge(oldDoc.rev_tree, docInfo.metadata.rev_tree[0], 1000);
var wasPreviouslyDeleted = utils.isDeleted(oldDoc);
var deleted = utils.isDeleted(docInfo.metadata);
var inConflict = (wasPreviouslyDeleted && deleted && newEdits) ||
(!wasPreviouslyDeleted && newEdits && merged.conflicts !== 'new_leaf');
if (inConflict) {
results[resultsIdx] = makeErr(errors.REV_CONFLICT, docInfo._bulk_seq);
return callback();
}
docInfo.metadata.rev_tree = merged.tree;
// recalculate
var winningRev = merge.winningRev(docInfo.metadata);
deleted = utils.isDeleted(docInfo.metadata, winningRev);
writeDoc(docInfo, winningRev, deleted, callback, resultsIdx);
}
function insertDoc(docInfo, resultsIdx, callback) {
var winningRev = merge.winningRev(docInfo.metadata);
var deleted = utils.isDeleted(docInfo.metadata, winningRev);
// Cant insert new deleted documents
if ('was_delete' in opts && deleted) {
results[resultsIdx] = errors.MISSING_DOC;
return callback();
}
writeDoc(docInfo, winningRev, deleted, callback, resultsIdx);
}
// Insert sequence number into the error so we can sort later
function makeErr(err, seq) {
err._bulk_seq = seq;
return err;
}
function saveAttachment(docInfo, digest, data, callback) {
var objectStore = txn.objectStore(ATTACH_STORE);
objectStore.get(digest).onsuccess = function (e) {
var originalRefs = e.target.result && e.target.result.refs || {};
var ref = [docInfo.metadata.id, docInfo.metadata.rev].join('@');
var newAtt = {
digest: digest,
body: data,
refs: originalRefs
};
newAtt.refs[ref] = true;
objectStore.put(newAtt).onsuccess = function (e) {
utils.call(callback);
};
};
}
var txn;
preprocessAttachments(function () {
var stores = [DOC_STORE, BY_SEQ_STORE, ATTACH_STORE, META_STORE,
LOCAL_STORE];
txn = idb.transaction(stores, 'readwrite');
txn.onerror = idbError(callback);
txn.ontimeout = idbError(callback);
txn.oncomplete = complete;
fetchExistingDocs(processDocs);
});
};
// First we look up the metadata in the ids database, then we fetch the
// current revision(s) from the by sequence store
api._get = function idb_get(id, opts, callback) {
var doc;
var metadata;
var err;
var txn;
opts = utils.clone(opts);
if (opts.ctx) {
txn = opts.ctx;
} else {
txn =
idb.transaction([DOC_STORE, BY_SEQ_STORE, ATTACH_STORE], 'readonly');
}
function finish() {
callback(err, {doc: doc, metadata: metadata, ctx: txn});
}
txn.objectStore(DOC_STORE).get(id).onsuccess = function (e) {
metadata = decodeMetadata(e.target.result);
// we can determine the result here if:
// 1. there is no such document
// 2. the document is deleted and we don't ask about specific rev
// When we ask with opts.rev we expect the answer to be either
// doc (possibly with _deleted=true) or missing error
if (!metadata) {
err = errors.MISSING_DOC;
return finish();
}
if (utils.isDeleted(metadata) && !opts.rev) {
err = errors.error(errors.MISSING_DOC, "deleted");
return finish();
}
var objectStore = txn.objectStore(BY_SEQ_STORE);
// metadata.winningRev was added later, so older DBs might not have it
var rev = opts.rev || metadata.winningRev || merge.winningRev(metadata);
var key = metadata.id + '::' + rev;
objectStore.index('_doc_id_rev').get(key).onsuccess = function (e) {
doc = e.target.result;
if (doc && doc._doc_id_rev) {
delete(doc._doc_id_rev);
}
if (!doc) {
err = errors.MISSING_DOC;
return finish();
}
finish();
};
};
};
api._getAttachment = function (attachment, opts, callback) {
var txn;
opts = utils.clone(opts);
if (opts.ctx) {
txn = opts.ctx;
} else {
txn =
idb.transaction([DOC_STORE, BY_SEQ_STORE, ATTACH_STORE], 'readonly');
}
var digest = attachment.digest;
var type = attachment.content_type;
txn.objectStore(ATTACH_STORE).get(digest).onsuccess = function (e) {
var data = e.target.result.body;
if (opts.encode) {
if (!data) {
callback(null, '');
} else if (typeof data !== 'string') { // we have blob support
var reader = new FileReader();
reader.onloadend = function (e) {
var binary = utils.arrayBufferToBinaryString(this.result || '');
callback(null, btoa(binary));
};
reader.readAsArrayBuffer(data);
} else { // no blob support
callback(null, data);
}
} else {
if (!data) {
callback(null, utils.createBlob([''], {type: type}));
} else if (typeof data !== 'string') { // we have blob support
callback(null, data);
} else { // no blob support
data = utils.fixBinary(atob(data));
callback(null, utils.createBlob([data], {type: type}));
}
}
};
};
function allDocsQuery(totalRows, opts, callback) {
var start = 'startkey' in opts ? opts.startkey : false;
var end = 'endkey' in opts ? opts.endkey : false;
var key = 'key' in opts ? opts.key : false;
var skip = opts.skip || 0;
var limit = typeof opts.limit === 'number' ? opts.limit : -1;
var inclusiveEnd = opts.inclusive_end !== false;
var descending = 'descending' in opts && opts.descending ? 'prev' : null;
var manualDescEnd = false;
if (descending && start && end) {
// unfortunately IDB has a quirk where IDBKeyRange.bound is invalid if the
// start is less than the end, even in descending mode. Best bet
// is just to handle it manually in that case.
manualDescEnd = end;
end = false;
}
var keyRange = null;
try {
if (start && end) {
keyRange = global.IDBKeyRange.bound(start, end, false, !inclusiveEnd);
} else if (start) {
if (descending) {
keyRange = global.IDBKeyRange.upperBound(start);
} else {
keyRange = global.IDBKeyRange.lowerBound(start);
}
} else if (end) {
if (descending) {
keyRange = global.IDBKeyRange.lowerBound(end, !inclusiveEnd);
} else {
keyRange = global.IDBKeyRange.upperBound(end, !inclusiveEnd);
}
} else if (key) {
keyRange = global.IDBKeyRange.only(key);
}
} catch (e) {
if (e.name === "DataError" && e.code === 0) {
// data error, start is less than end
return callback(null, {
total_rows : totalRows,
offset : opts.skip,
rows : []
});
} else {
return callback(errors.error(errors.IDB_ERROR, e.name, e.message));
}
}
var transaction = idb.transaction([DOC_STORE, BY_SEQ_STORE], 'readonly');
transaction.oncomplete = function () {
callback(null, {
total_rows: totalRows,
offset: opts.skip,
rows: results
});
};
var oStore = transaction.objectStore(DOC_STORE);
var oCursor = descending ? oStore.openCursor(keyRange, descending)
: oStore.openCursor(keyRange);
var results = [];
oCursor.onsuccess = function (e) {
if (!e.target.result) {
return;
}
var cursor = e.target.result;
var metadata = decodeMetadata(cursor.value);
// metadata.winningRev added later, some dbs might be missing it
var winningRev = metadata.winningRev || merge.winningRev(metadata);
function allDocsInner(metadata, data) {
var doc = {
id: metadata.id,
key: metadata.id,
value: {
rev: winningRev
}
};
if (opts.include_docs) {
doc.doc = data;
doc.doc._rev = winningRev;
if (doc.doc._doc_id_rev) {
delete(doc.doc._doc_id_rev);
}
if (opts.conflicts) {
doc.doc._conflicts = merge.collectConflicts(metadata);
}
for (var att in doc.doc._attachments) {
if (doc.doc._attachments.hasOwnProperty(att)) {
doc.doc._attachments[att].stub = true;
}
}
}
var deleted = utils.isDeleted(metadata, winningRev);
if (opts.deleted === 'ok') {
// deleted docs are okay with keys_requests
if (deleted) {
doc.value.deleted = true;
doc.doc = null;
}
results.push(doc);
} else if (!deleted && skip-- <= 0) {
if (manualDescEnd) {
if (inclusiveEnd && doc.key < manualDescEnd) {
return;
} else if (!inclusiveEnd && doc.key <= manualDescEnd) {
return;
}
}
results.push(doc);
if (--limit === 0) {
return;
}
}
cursor["continue"]();
}
if (!opts.include_docs) {
allDocsInner(metadata);
} else {
var index = transaction.objectStore(BY_SEQ_STORE).index('_doc_id_rev');
var key = metadata.id + "::" + winningRev;
index.get(key).onsuccess = function (event) {
allDocsInner(decodeMetadata(cursor.value), event.target.result);
};
}
};
}
function countDocs(callback) {
if (docCount !== -1) {
return callback(null, docCount);
}
var count;
var txn = idb.transaction([DOC_STORE], 'readonly');
var index = txn.objectStore(DOC_STORE).index('deletedOrLocal');
index.count(global.IDBKeyRange.only("0")).onsuccess = function (e) {
count = e.target.result;
};
txn.onerror = idbError(callback);
txn.oncomplete = function () {
docCount = count;
callback(null, docCount);
};
}
api._allDocs = function idb_allDocs(opts, callback) {
// first count the total_rows
countDocs(function (err, totalRows) {
if (err) {
return callback(err);
}
if (opts.limit === 0) {
return callback(null, {
total_rows : totalRows,
offset : opts.skip,
rows : []
});
}
allDocsQuery(totalRows, opts, callback);
});
};
api._info = function idb_info(callback) {
countDocs(function (err, count) {
if (err) {
return callback(err);
}
if (idb === null) {
var error = new Error('db isn\'t open');
error.id = 'idbNull';
return callback(error);
}
var updateSeq = 0;
var txn = idb.transaction([META_STORE], 'readonly');
txn.objectStore(META_STORE).get(META_STORE).onsuccess = function (e) {
updateSeq = e.target.result && e.target.result.updateSeq || 0;
};
txn.oncomplete = function () {
callback(null, {
doc_count: count,
update_seq: updateSeq
});
};
});
};
api._changes = function (opts) {
opts = utils.clone(opts);
if (opts.continuous) {
var id = name + ':' + utils.uuid();
IdbPouch.Changes.addListener(name, id, api, opts);
IdbPouch.Changes.notify(name);
return {
cancel: function () {
IdbPouch.Changes.removeListener(name, id);
}
};
}
var descending = opts.descending ? 'prev' : null;
var lastSeq = 0;
// Ignore the `since` parameter when `descending` is true
opts.since = opts.since && !descending ? opts.since : 0;
var limit = 'limit' in opts ? opts.limit : -1;
if (limit === 0) {
limit = 1; // per CouchDB _changes spec
}
var returnDocs;
if ('returnDocs' in opts) {
returnDocs = opts.returnDocs;
} else {
returnDocs = true;
}
var results = [];
var numResults = 0;
var filter = utils.filterChange(opts);
var txn;
function fetchChanges() {
txn = idb.transaction([DOC_STORE, BY_SEQ_STORE], 'readonly');
txn.oncomplete = onTxnComplete;
var req;
if (descending) {
req = txn.objectStore(BY_SEQ_STORE)
.openCursor(global.IDBKeyRange.lowerBound(opts.since, true),
descending);
} else {
req = txn.objectStore(BY_SEQ_STORE)
.openCursor(global.IDBKeyRange.lowerBound(opts.since, true));
}
req.onsuccess = onsuccess;
req.onerror = onerror;
}
fetchChanges();
function onsuccess(event) {
var cursor = event.target.result;
if (!cursor) {
return;
}
var doc = cursor.value;
if (opts.doc_ids && opts.doc_ids.indexOf(doc._id) === -1) {
return cursor["continue"]();
}
var index = txn.objectStore(DOC_STORE);
index.get(doc._id).onsuccess = function (event) {
var metadata = decodeMetadata(event.target.result);
if (lastSeq < metadata.seq) {
lastSeq = metadata.seq;
}
// metadata.winningRev was only added later
var winningRev = metadata.winningRev || merge.winningRev(metadata);
if (doc._rev !== winningRev) {
return cursor["continue"]();
}
delete doc['_doc_id_rev'];
var change = opts.processChange(doc, metadata, opts);
change.seq = cursor.key;
if (filter(change)) {
numResults++;
if (returnDocs) {
results.push(change);
}
opts.onChange(change);
}
if (numResults !== limit) {
cursor["continue"]();
}
};
}
function onTxnComplete() {
if (!opts.continuous) {
opts.complete(null, {
results: results,
last_seq: lastSeq
});
}
}
};
api._close = function (callback) {
if (idb === null) {
return callback(errors.NOT_OPEN);
}
// https://developer.mozilla.org/en-US/docs/IndexedDB/IDBDatabase#close
// "Returns immediately and closes the connection in a separate thread..."
idb.close();
delete cachedDBs[name];
idb = null;
callback();
};
api._getRevisionTree = function (docId, callback) {
var txn = idb.transaction([DOC_STORE], 'readonly');
var req = txn.objectStore(DOC_STORE).get(docId);
req.onsuccess = function (event) {
var doc = decodeMetadata(event.target.result);
if (!doc) {
callback(errors.MISSING_DOC);
} else {
callback(null, doc.rev_tree);
}
};
};
// This function removes revisions of document docId
// which are listed in revs and sets this document
// revision to to rev_tree
api._doCompaction = function (docId, rev_tree, revs, callback) {
var txn = idb.transaction([DOC_STORE, BY_SEQ_STORE], 'readwrite');
var index = txn.objectStore(DOC_STORE);
index.get(docId).onsuccess = function (event) {
var metadata = decodeMetadata(event.target.result);
metadata.rev_tree = rev_tree;
var count = revs.length;
revs.forEach(function (rev) {
var index = txn.objectStore(BY_SEQ_STORE).index('_doc_id_rev');
var key = docId + "::" + rev;
index.getKey(key).onsuccess = function (e) {
var seq = e.target.result;
if (!seq) {
return;
}
txn.objectStore(BY_SEQ_STORE)["delete"](seq);
count--;
if (!count) {
// winningRev is not guaranteed to be there, since it's
// not formally migrated. deletedOrLocal is a
// now-unfortunate name that really just means "deleted"
var winningRev = metadata.winningRev ||
merge.winningRev(metadata);
var deleted = metadata.deletedOrLocal;
txn.objectStore(DOC_STORE).put(
encodeMetadata(metadata, winningRev, deleted));
}
};
});
};
txn.oncomplete = function () {
utils.call(callback);
};
};
api._getLocal = function (id, callback) {
var tx = idb.transaction([LOCAL_STORE], 'readonly');
var req = tx.objectStore(LOCAL_STORE).get(id);
req.onerror = idbError(callback);
req.onsuccess = function (e) {
var doc = e.target.result;
if (!doc) {
callback(errors.MISSING_DOC);
} else {
delete doc['_doc_id_rev'];
callback(null, doc);
}
};
};
api._putLocal = function (doc, opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
delete doc._revisions; // ignore this, trust the rev
var oldRev = doc._rev;
var id = doc._id;
if (!oldRev) {
doc._rev = '0-1';
} else {
doc._rev = '0-' + (parseInt(oldRev.split('-')[1], 10) + 1);
}
doc._doc_id_rev = id + '::' + doc._rev;
var tx = opts.ctx;
var ret;
if (!tx) {
tx = idb.transaction([LOCAL_STORE], 'readwrite');
tx.onerror = idbError(callback);
tx.oncomplete = function () {
if (ret) {
callback(null, ret);
}
};
}
var oStore = tx.objectStore(LOCAL_STORE);
var req;
if (oldRev) {
var index = oStore.index('_doc_id_rev');
var docIdRev = id + '::' + oldRev;
req = index.get(docIdRev);
req.onsuccess = function (e) {
if (!e.target.result) {
callback(errors.REV_CONFLICT);
} else { // update
var req = oStore.put(doc);
req.onsuccess = function () {
ret = {ok: true, id: doc._id, rev: doc._rev};
if (opts.ctx) { // retuthis.immediately
callback(null, ret);
}
};
}
};
} else { // new doc
req = oStore.get(id);
req.onsuccess = function (e) {
if (e.target.result) { // already exists
callback(errors.REV_CONFLICT);
} else { // insert
var req = oStore.put(doc);
req.onsuccess = function () {
ret = {ok: true, id: doc._id, rev: doc._rev};
if (opts.ctx) { // return immediately
callback(null, ret);
}
};
}
};
}
};
api._removeLocal = function (doc, callback) {
var tx = idb.transaction([LOCAL_STORE], 'readwrite');
var ret;
tx.oncomplete = function () {
if (ret) {
callback(null, ret);
}
};
var docIdRev = doc._id + '::' + doc._rev;
var oStore = tx.objectStore(LOCAL_STORE);
var index = oStore.index('_doc_id_rev');
var req = index.get(docIdRev);
req.onerror = idbError(callback);
req.onsuccess = function (e) {
var doc = e.target.result;
if (!doc) {
callback(errors.MISSING_DOC);
} else {
var req = index.getKey(docIdRev);
req.onsuccess = function (e) {
var key = e.target.result;
oStore["delete"](key);
ret = {ok: true, id: doc._id, rev: '0-0'};
};
}
};
};
var cached = cachedDBs[name];
if (cached) {
idb = cached.idb;
blobSupport = cached.blobSupport;
instanceId = cached.instanceId;
idStored = cached.idStored;
process.nextTick(function () {
callback(null, api);
});
return;
}
var req = global.indexedDB.open(name, ADAPTER_VERSION);
if (!('openReqList' in IdbPouch)) {
IdbPouch.openReqList = {};
}
IdbPouch.openReqList[name] = req;
req.onupgradeneeded = function (e) {
var db = e.target.result;
if (e.oldVersion < 1) {
// initial schema
createSchema(db);
}
if (e.oldVersion < 3) {
createLocalStoreSchema(db);
if (e.oldVersion < 2) {
// version 2 adds the deletedOrLocal index
addDeletedOrLocalIndex(e, function (transaction) {
migrateLocalStore(e, transaction);
});
} else {
migrateLocalStore(e);
}
}
};
req.onsuccess = function (e) {
idb = e.target.result;
idb.onversionchange = function () {
idb.close();
delete cachedDBs[name];
};
idb.onabort = function () {
idb.close();
delete cachedDBs[name];
};
var txn = idb.transaction([META_STORE, DETECT_BLOB_SUPPORT_STORE],
'readwrite');
var req = txn.objectStore(META_STORE).get(META_STORE);
req.onsuccess = function (e) {
var checkSetupComplete = function () {
if (blobSupport === null || !idStored) {
return;
} else {
cachedDBs[name] = {
idb: idb,
blobSupport: blobSupport,
instanceId: instanceId,
idStored: idStored,
loaded: true
};
callback(null, api);
}
};
var meta = e.target.result || {id: META_STORE};
if (name + '_id' in meta) {
instanceId = meta[name + '_id'];
idStored = true;
checkSetupComplete();
} else {
instanceId = utils.uuid();
meta[name + '_id'] = instanceId;
txn.objectStore(META_STORE).put(meta).onsuccess = function () {
idStored = true;
checkSetupComplete();
};
}
// Detect blob support. Chrome didn't support it until version 38.
// in version 37 they had a broken version where PNGs (and possibly
// other binary types) aren't stored correctly.
try {
var blob = utils.createBlob([''], {type: 'image/png'});
txn.objectStore(DETECT_BLOB_SUPPORT_STORE).put(blob, 'key');
txn.oncomplete = function () {
// have to do it in a separate transaction, else the correct
// content type is always returned
txn = idb.transaction([META_STORE, DETECT_BLOB_SUPPORT_STORE],
'readwrite');
var getBlobReq = txn.objectStore(
DETECT_BLOB_SUPPORT_STORE).get('key');
getBlobReq.onsuccess = function (e) {
var storedBlob = e.target.result;
var url = URL.createObjectURL(storedBlob);
utils.ajax({
url: url,
cache: true,
binary: true
}, function (err, res) {
if (err && err.status === 405) {
// firefox won't let us do that. but firefox doesn't
// have the blob type bug that Chrome does, so that's ok
blobSupport = true;
} else {
blobSupport = !!(res && res.type === 'image/png');
}
checkSetupComplete();
});
};
};
} catch (err) {
blobSupport = false;
checkSetupComplete();
}
};
};
req.onerror = idbError(callback);
}
IdbPouch.valid = function () {
// Issue #2533, we finally gave up on doing bug
// detection instead of browser sniffing. Safari brought us
// to our knees.
var isSafari = typeof openDatabase !== 'undefined' &&
/Safari/.test(navigator.userAgent) &&
!/Chrome/.test(navigator.userAgent);
return !isSafari && global.indexedDB && isModernIdb();
};
function destroy(name, opts, callback) {
if (!('openReqList' in IdbPouch)) {
IdbPouch.openReqList = {};
}
IdbPouch.Changes.removeAllListeners(name);
//Close open request for "name" database to fix ie delay.
if (IdbPouch.openReqList[name] && IdbPouch.openReqList[name].result) {
IdbPouch.openReqList[name].result.close();
}
var req = global.indexedDB.deleteDatabase(name);
req.onsuccess = function () {
//Remove open request from the list.
if (IdbPouch.openReqList[name]) {
IdbPouch.openReqList[name] = null;
}
if (utils.hasLocalStorage() && (name in global.localStorage)) {
delete global.localStorage[name];
}
delete cachedDBs[name];
callback(null, { 'ok': true });
};
req.onerror = idbError(callback);
}
IdbPouch.destroy = utils.toPromise(function (name, opts, callback) {
taskQueue.queue.push({
action: function (thisCallback) {
destroy(name, opts, thisCallback);
},
callback: callback
});
applyNext();
});
IdbPouch.Changes = new utils.Changes();
module.exports = IdbPouch;
}).call(this,_dereq_("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"../deps/errors":11,"../merge":18,"../utils":23,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":28,"vuvuzela":58}],4:[function(_dereq_,module,exports){
module.exports = ['idb', 'websql'];
},{}],5:[function(_dereq_,module,exports){
(function (global){
'use strict';
var utils = _dereq_('../utils');
var merge = _dereq_('../merge');
var errors = _dereq_('../deps/errors');
var vuvuzela = _dereq_('vuvuzela');
function quote(str) {
return "'" + str + "'";
}
var cachedDatabases = {};
var openDB = utils.getArguments(function (args) {
if (typeof global !== 'undefined') {
if (global.navigator && global.navigator.sqlitePlugin &&
global.navigator.sqlitePlugin.openDatabase) {
return navigator.sqlitePlugin.openDatabase
.apply(navigator.sqlitePlugin, args);
} else if (global.sqlitePlugin && global.sqlitePlugin.openDatabase) {
return global.sqlitePlugin.openDatabase
.apply(global.sqlitePlugin, args);
} else {
var db = cachedDatabases[args[0]];
if (!db) {
db = cachedDatabases[args[0]] =
global.openDatabase.apply(global, args);
}
return db;
}
}
});
var POUCH_VERSION = 1;
var ADAPTER_VERSION = 4; // used to manage migrations
// The object stores created for each database
// DOC_STORE stores the document meta data, its revision history and state
var DOC_STORE = quote('document-store');
// BY_SEQ_STORE stores a particular version of a document, keyed by its
// sequence id
var BY_SEQ_STORE = quote('by-sequence');
// Where we store attachments
var ATTACH_STORE = quote('attach-store');
var LOCAL_STORE = quote('local-store');
var META_STORE = quote('metadata-store');
// these indexes cover the ground for most allDocs queries
var BY_SEQ_STORE_DELETED_INDEX_SQL =
'CREATE INDEX IF NOT EXISTS \'by-seq-deleted-idx\' ON ' +
BY_SEQ_STORE + ' (seq, deleted)';
var BY_SEQ_STORE_DOC_ID_REV_INDEX_SQL =
'CREATE UNIQUE INDEX IF NOT EXISTS \'by-seq-doc-id-rev\' ON ' +
BY_SEQ_STORE + ' (doc_id, rev)';
var DOC_STORE_WINNINGSEQ_INDEX_SQL =
'CREATE INDEX IF NOT EXISTS \'doc-winningseq-idx\' ON ' +
DOC_STORE + ' (winningseq)';
var DOC_STORE_AND_BY_SEQ_JOINER = BY_SEQ_STORE +
'.seq = ' + DOC_STORE + '.winningseq';
var SELECT_DOCS = BY_SEQ_STORE + '.seq AS seq, ' +
BY_SEQ_STORE + '.deleted AS deleted, ' +
BY_SEQ_STORE + '.json AS data, ' +
BY_SEQ_STORE + '.rev AS rev, ' +
DOC_STORE + '.json AS metadata';
function select(selector, table, joiner, where, orderBy) {
return 'SELECT ' + selector + ' FROM ' +
(typeof table === 'string' ? table : table.join(' JOIN ')) +
(joiner ? (' ON ' + joiner) : '') +
(where ? (' WHERE ' +
(typeof where === 'string' ? where : where.join(' AND '))) : '') +
(orderBy ? (' ORDER BY ' + orderBy) : '');
}
function unknownError(callback) {
return function (event) {
// event may actually be a SQLError object, so report is as such
var errorNameMatch = event && event.constructor.toString()
.match(/function ([^\(]+)/);
var errorName = (errorNameMatch && errorNameMatch[1]) || event.type;
var errorReason = event.target || event.message;
callback(errors.error(errors.WSQ_ERROR, errorReason, errorName));
};
}
function decodeUtf8(str) {
return decodeURIComponent(window.escape(str));
}
function parseHexString(str, encoding) {
var result = '';
var charWidth = encoding === 'UTF-8' ? 2 : 4;
for (var i = 0, len = str.length; i < len; i += charWidth) {
var substring = str.substring(i, i + charWidth);
if (charWidth === 4) { // UTF-16, twiddle the bits
substring = substring.substring(2, 4) + substring.substring(0, 2);
}
result += String.fromCharCode(parseInt(substring, 16));
}
result = encoding === 'UTF-8' ? decodeUtf8(result) : result;
return result;
}
function stringifyDoc(doc) {
// don't bother storing the id/rev. it uses lots of space,
// in persistent map/reduce especially
delete doc._id;
delete doc._rev;
return JSON.stringify(doc);
}
function unstringifyDoc(doc, id, rev) {
doc = JSON.parse(doc);
doc._id = id;
doc._rev = rev;
return doc;
}
function getSize(opts) {
if ('size' in opts) {
// triggers immediate popup in iOS, fixes #2347
// e.g. 5000001 asks for 5 MB, 10000001 asks for 10 MB,
return opts.size * 1000000;
}
// In iOS, doesn't matter as long as it's <= 5000000.
// Except that if you request too much, our tests fail
// because of the native "do you accept?" popup.
// In Android <=4.3, this value is actually used as an
// honest-to-god ceiling for data, so we need to
// set it to a decently high number.
var isAndroid = /Android/.test(window.navigator.userAgent);
return isAndroid ? 5000000 : 1;
}
function WebSqlPouch(opts, callback) {
var api = this;
var instanceId = null;
var name = opts.name;
var size = getSize(opts);
var idRequests = [];
var docCount = -1; // cache sqlite count(*) for performance
var encoding;
var db = openDB(name, POUCH_VERSION, name, size);
if (!db) {
return callback(errors.UNKNOWN_ERROR);
} else if (typeof db.readTransaction !== 'function') {
// doesn't exist in sqlite plugin
db.readTransaction = db.transaction;
}
function dbCreated() {
// note the db name in case the browser upgrades to idb
if (utils.hasLocalStorage()) {
global.localStorage['_pouch__websqldb_' + name] = true;
}
callback(null, api);
}
// In this migration, we added the 'deleted' and 'local' columns to the
// by-seq and doc store tables.
// To preserve existing user data, we re-process all the existing JSON
// and add these values.
// Called migration2 because it corresponds to adapter version (db_version) #2
function runMigration2(tx, callback) {
// index used for the join in the allDocs query
tx.executeSql(DOC_STORE_WINNINGSEQ_INDEX_SQL);
tx.executeSql('ALTER TABLE ' + BY_SEQ_STORE +
' ADD COLUMN deleted TINYINT(1) DEFAULT 0', [], function () {
tx.executeSql(BY_SEQ_STORE_DELETED_INDEX_SQL);
tx.executeSql('ALTER TABLE ' + DOC_STORE +
' ADD COLUMN local TINYINT(1) DEFAULT 0', [], function () {
tx.executeSql('CREATE INDEX IF NOT EXISTS \'doc-store-local-idx\' ON ' +
DOC_STORE + ' (local, id)');
var sql = 'SELECT ' + DOC_STORE + '.winningseq AS seq, ' + DOC_STORE +
'.json AS metadata FROM ' + BY_SEQ_STORE + ' JOIN ' + DOC_STORE +
' ON ' + BY_SEQ_STORE + '.seq = ' + DOC_STORE + '.winningseq';
tx.executeSql(sql, [], function (tx, result) {
var deleted = [];
var local = [];
for (var i = 0; i < result.rows.length; i++) {
var item = result.rows.item(i);
var seq = item.seq;
var metadata = JSON.parse(item.metadata);
if (utils.isDeleted(metadata)) {
deleted.push(seq);
}
if (utils.isLocalId(metadata.id)) {
local.push(metadata.id);
}
}
tx.executeSql('UPDATE ' + DOC_STORE + 'SET local = 1 WHERE id IN (' +
local.map(function () {
return '?';
}).join(',') + ')', local, function () {
tx.executeSql('UPDATE ' + BY_SEQ_STORE +
' SET deleted = 1 WHERE seq IN (' + deleted.map(function () {
return '?';
}).join(',') + ')', deleted, callback);
});
});
});
});
}
// in this migration, we make all the local docs unversioned
function runMigration3(tx, callback) {
var local = 'CREATE TABLE IF NOT EXISTS ' + LOCAL_STORE +
' (id UNIQUE, rev, json)';
tx.executeSql(local, [], function () {
var sql = 'SELECT ' + DOC_STORE + '.id AS id, ' +
BY_SEQ_STORE + '.json AS data ' +
'FROM ' + BY_SEQ_STORE + ' JOIN ' +
DOC_STORE + ' ON ' + BY_SEQ_STORE + '.seq = ' +
DOC_STORE + '.winningseq WHERE local = 1';
tx.executeSql(sql, [], function (tx, res) {
var rows = [];
for (var i = 0; i < res.rows.length; i++) {
rows.push(res.rows.item(i));
}
function doNext() {
if (!rows.length) {
return callback();
}
var row = rows.shift();
var rev = JSON.parse(row.data)._rev;
tx.executeSql('INSERT INTO ' + LOCAL_STORE +
' (id, rev, json) VALUES (?,?,?)',
[row.id, rev, row.data], function (tx) {
tx.executeSql('DELETE FROM ' + DOC_STORE + ' WHERE id=?',
[row.id], function (tx) {
tx.executeSql('DELETE FROM ' + BY_SEQ_STORE + ' WHERE seq=?',
[row.seq], function () {
doNext();
});
});
});
}
doNext();
});
});
}
// in this migration, we remove doc_id_rev and just use rev
function runMigration4(tx, callback) {
function updateRows(rows, encoding) {
function doNext() {
if (!rows.length) {
return callback();
}
var row = rows.shift();
var doc_id_rev = parseHexString(row.hex, encoding);
var idx = doc_id_rev.lastIndexOf('::');
var doc_id = doc_id_rev.substring(0, idx);
var rev = doc_id_rev.substring(idx + 2);
var sql = 'UPDATE ' + BY_SEQ_STORE +
' SET doc_id=?, rev=? WHERE doc_id_rev=?';
tx.executeSql(sql, [doc_id, rev, doc_id_rev], function () {
doNext();
});
}
doNext();
}
var sql = 'ALTER TABLE ' + BY_SEQ_STORE + ' ADD COLUMN doc_id';
tx.executeSql(sql, [], function (tx) {
var sql = 'ALTER TABLE ' + BY_SEQ_STORE + ' ADD COLUMN rev';
tx.executeSql(sql, [], function (tx) {
tx.executeSql(BY_SEQ_STORE_DOC_ID_REV_INDEX_SQL, [], function (tx) {
var sql = 'SELECT hex(doc_id_rev) as hex FROM ' + BY_SEQ_STORE;
tx.executeSql(sql, [], function (tx, res) {
var rows = [];
for (var i = 0; i < res.rows.length; i++) {
rows.push(res.rows.item(i));
}
// it sucks, but we fetch the encoding twice
tx.executeSql(
'SELECT dbid, hex(dbid) AS hexId FROM ' + META_STORE, [],
function (tx, result) {
var id = result.rows.item(0).dbid;
var hexId = result.rows.item(0).hexId;
var encoding = (hexId.length === id.length * 2) ?
'UTF-8' : 'UTF-16';
updateRows(rows, encoding);
}
);
});
});
});
});
}
function onGetInstanceId(tx) {
while (idRequests.length > 0) {
var idCallback = idRequests.pop();
idCallback(null, instanceId);
}
checkDbEncoding(tx);
}
function checkDbEncoding(tx) {
// check db encoding - utf-8 (chrome, opera) or utf-16 (safari)?
tx.executeSql('SELECT dbid, hex(dbid) AS hexId FROM ' + META_STORE, [],
function (tx, result) {
var id = result.rows.item(0).dbid;
var hexId = result.rows.item(0).hexId;
encoding = (hexId.length === id.length * 2) ? 'UTF-8' : 'UTF-16';
}
);
}
function onGetVersion(tx, dbVersion) {
if (dbVersion === 0) {
// initial schema
var meta = 'CREATE TABLE IF NOT EXISTS ' + META_STORE +
' (update_seq INTEGER, dbid, db_version INTEGER)';
var attach = 'CREATE TABLE IF NOT EXISTS ' + ATTACH_STORE +
' (digest, json, body BLOB)';
var doc = 'CREATE TABLE IF NOT EXISTS ' + DOC_STORE +
' (id unique, json, winningseq)';
var seq = 'CREATE TABLE IF NOT EXISTS ' + BY_SEQ_STORE +
' (seq INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, ' +
'json, deleted TINYINT(1), doc_id, rev)';
var local = 'CREATE TABLE IF NOT EXISTS ' + LOCAL_STORE +
' (id UNIQUE, rev, json)';
// creates
tx.executeSql(attach);
tx.executeSql(local);
tx.executeSql(doc, [], function () {
tx.executeSql(DOC_STORE_WINNINGSEQ_INDEX_SQL);
tx.executeSql(seq, [], function () {
tx.executeSql(BY_SEQ_STORE_DELETED_INDEX_SQL);
tx.executeSql(BY_SEQ_STORE_DOC_ID_REV_INDEX_SQL);
tx.executeSql(meta, [], function () {
// mark the update_seq, db version, and new dbid
var initSeq = 'INSERT INTO ' + META_STORE +
' (update_seq, db_version, dbid) VALUES (?, ?, ?)';
instanceId = utils.uuid();
var initSeqArgs = [0, ADAPTER_VERSION, instanceId];
tx.executeSql(initSeq, initSeqArgs, function (tx) {
onGetInstanceId(tx);
});
});
});
});
} else { // version > 0
var setupDone = function () {
var migrated = dbVersion < ADAPTER_VERSION;
if (migrated) {
// update the db version within this transaction
tx.executeSql('UPDATE ' + META_STORE + ' SET db_version = ' +
ADAPTER_VERSION);
}
// notify db.id() callers
var sql = 'SELECT dbid FROM ' + META_STORE;
tx.executeSql(sql, [], function (tx, result) {
instanceId = result.rows.item(0).dbid;
onGetInstanceId(tx);
});
};
// would love to use promises here, but then websql
// ends the transaction early
switch (dbVersion) {
case 1:
runMigration2(tx, function () {
runMigration3(tx, function () {
runMigration4(tx, setupDone);
});
});
break;
case 2:
runMigration3(tx, function () {
runMigration4(tx, setupDone);
});
break;
case 3:
runMigration4(tx, setupDone);
break;
default:
setupDone();
break;
}
}
}
function setup() {
db.transaction(function (tx) {
// first get the version
tx.executeSql('SELECT sql FROM sqlite_master WHERE tbl_name = ' +
META_STORE, [], function (tx, result) {
if (!result.rows.length) {
// database hasn't even been created yet (version 0)
onGetVersion(tx, 0);
} else if (!/db_version/.test(result.rows.item(0).sql)) {
// table was created, but without the new db_version column,
// so add it.
tx.executeSql('ALTER TABLE ' + META_STORE +
' ADD COLUMN db_version INTEGER', [], function () {
// before version 2, this column didn't even exist
onGetVersion(tx, 1);
});
} else { // column exists, we can safely get it
tx.executeSql('SELECT db_version FROM ' + META_STORE, [],
function (tx, result) {
var dbVersion = result.rows.item(0).db_version;
onGetVersion(tx, dbVersion);
});
}
});
}, unknownError(callback), dbCreated);
}
if (utils.isCordova() && typeof global !== 'undefined') {
//to wait until custom api is made in pouch.adapters before doing setup
global.addEventListener(name + '_pouch', function cordova_init() {
global.removeEventListener(name + '_pouch', cordova_init, false);
setup();
}, false);
} else {
setup();
}
api.type = function () {
return 'websql';
};
api._id = utils.toPromise(function (callback) {
callback(null, instanceId);
});
api._info = function (callback) {
db.readTransaction(function (tx) {
countDocs(tx, function (docCount) {
var sql = 'SELECT update_seq FROM ' + META_STORE;
tx.executeSql(sql, [], function (tx, result) {
var updateSeq = result.rows.item(0).update_seq;
callback(null, {
doc_count: docCount,
update_seq: updateSeq
});
});
});
}, unknownError(callback));
};
api._bulkDocs = function (req, opts, callback) {
var newEdits = opts.new_edits;
var userDocs = req.docs;
// Parse the docs, give them a sequence number for the result
var docInfos = userDocs.map(function (doc, i) {
if (doc._id && utils.isLocalId(doc._id)) {
return doc;
}
var newDoc = utils.parseDoc(doc, newEdits);
newDoc._bulk_seq = i;
return newDoc;
});
var docInfoErrors = docInfos.filter(function (docInfo) {
return docInfo.error;
});
if (docInfoErrors.length) {
return callback(docInfoErrors[0]);
}
var tx;
var results = new Array(docInfos.length);
var updateSeq = 0;
var fetchedDocs = new utils.Map();
var numDocsWritten = 0;
function complete() {
var aresults = results.map(function (result) {
if (result._bulk_seq) {
delete result._bulk_seq;
} else if (!Object.keys(result).length) {
return {
ok: true
};
}
if (result.error) {
return result;
}
var metadata = result.metadata;
var rev = merge.winningRev(metadata);
return {
ok: true,
id: metadata.id,
rev: rev
};
});
WebSqlPouch.Changes.notify(name);
var updateseq = 'SELECT update_seq FROM ' + META_STORE;
tx.executeSql(updateseq, [], function (tx, result) {
var update_seq = result.rows.item(0).update_seq + updateSeq;
var sql = 'UPDATE ' + META_STORE + ' SET update_seq=?';
tx.executeSql(sql, [update_seq], function () {
callback(null, aresults);
});
});
}
function preprocessAttachment(att, finish) {
if (att.stub) {
return finish();
}
if (typeof att.data === 'string') {
try {
att.data = atob(att.data);
} catch (e) {
var err = errors.error(errors.BAD_ARG,
"Attachments need to be base64 encoded");
return callback(err);
}
var data = utils.fixBinary(att.data);
att.data = utils.createBlob([data], {type: att.content_type});
}
var reader = new FileReader();
reader.onloadend = function (e) {
var binary = utils.arrayBufferToBinaryString(this.result);
att.data = binary;
utils.MD5(binary).then(function (result) {
att.digest = 'md5-' + result;
finish();
});
};
reader.readAsArrayBuffer(att.data);
}
function preprocessAttachments(callback) {
if (!docInfos.length) {
return callback();
}
var docv = 0;
docInfos.forEach(function (docInfo) {
var attachments = docInfo.data && docInfo.data._attachments ?
Object.keys(docInfo.data._attachments) : [];
var recv = 0;
if (!attachments.length) {
return done();
}
function processedAttachment() {
recv++;
if (recv === attachments.length) {
done();
}
}
for (var key in docInfo.data._attachments) {
if (docInfo.data._attachments.hasOwnProperty(key)) {
preprocessAttachment(docInfo.data._attachments[key],
processedAttachment);
}
}
});
function done() {
docv++;
if (docInfos.length === docv) {
callback();
}
}
}
function writeDoc(docInfo, winningRev, deleted, callback, isUpdate,
resultsIdx) {
function finish() {
updateSeq++;
var data = docInfo.data;
var deletedInt = deleted ? 1 : 0;
var id = data._id;
var rev = data._rev;
var json = stringifyDoc(data);
var sql = 'INSERT INTO ' + BY_SEQ_STORE +
' (doc_id, rev, json, deleted) VALUES (?, ?, ?, ?);';
var sqlArgs = [id, rev, json, deletedInt];
tx.executeSql(sql, sqlArgs, function (tx, result) {
dataWritten(tx, result.insertId);
}, function () {
// constraint error, recover by updating instead (see #1638)
var fetchSql = select('seq', BY_SEQ_STORE, null,
'doc_id=? AND rev=?');
tx.executeSql(fetchSql, [id, rev], function (tx, res) {
var seq = res.rows.item(0).seq;
var sql = 'UPDATE ' + BY_SEQ_STORE +
' SET json=?, deleted=? WHERE doc_id=? AND rev=?;';
var sqlArgs = [json, deletedInt, id, rev];
tx.executeSql(sql, sqlArgs, function (tx) {
updateSeq--; // discount, since it's an update, not a new seq
dataWritten(tx, seq);
});
});
return false; // ack that we've handled the error
});
}
function collectResults(attachmentErr) {
if (!err) {
if (attachmentErr) {
err = attachmentErr;
callback(err);
} else if (recv === attachments.length) {
finish();
}
}
}
var err = null;
var recv = 0;
docInfo.data._id = docInfo.metadata.id;
docInfo.data._rev = docInfo.metadata.rev;
if (deleted) {
docInfo.data._deleted = true;
}
var attachments = docInfo.data._attachments ?
Object.keys(docInfo.data._attachments) : [];
function attachmentSaved(err) {
recv++;
collectResults(err);
}
for (var key in docInfo.data._attachments) {
if (!docInfo.data._attachments[key].stub) {
var data = docInfo.data._attachments[key].data;
delete docInfo.data._attachments[key].data;
var digest = docInfo.data._attachments[key].digest;
saveAttachment(docInfo, digest, data, attachmentSaved);
} else {
recv++;
collectResults();
}
}
if (!attachments.length) {
finish();
}
function dataWritten(tx, seq) {
docInfo.metadata.seq = seq;
delete docInfo.metadata.rev;
var sql = isUpdate ?
'UPDATE ' + DOC_STORE +
' SET json=?, winningseq=(SELECT seq FROM ' + BY_SEQ_STORE +
' WHERE doc_id=' + DOC_STORE + '.id AND rev=?) WHERE id=?'
: 'INSERT INTO ' + DOC_STORE +
' (id, winningseq, json) VALUES (?, ?, ?);';
var metadataStr = vuvuzela.stringify(docInfo.metadata);
var id = docInfo.metadata.id;
var params = isUpdate ?
[metadataStr, winningRev, id] :
[id, seq, metadataStr];
tx.executeSql(sql, params, function () {
results[resultsIdx] = docInfo;
fetchedDocs.set(id, docInfo.metadata);
callback();
});
}
}
function updateDoc(oldDoc, docInfo, resultsIdx, callback) {
var merged =
merge.merge(oldDoc.rev_tree, docInfo.metadata.rev_tree[0], 1000);
var deleted = utils.isDeleted(docInfo.metadata);
var oldDocDeleted = utils.isDeleted(oldDoc);
var inConflict = (oldDocDeleted && deleted && newEdits) ||
(!oldDocDeleted && newEdits && merged.conflicts !== 'new_leaf');
if (inConflict) {
results[resultsIdx] = makeErr(errors.REV_CONFLICT, docInfo._bulk_seq);
return callback();
}
docInfo.metadata.rev_tree = merged.tree;
// recalculate
var winningRev = merge.winningRev(docInfo.metadata);
deleted = utils.isDeleted(docInfo.metadata, winningRev);
writeDoc(docInfo, winningRev, deleted, callback, true, resultsIdx);
}
function insertDoc(docInfo, resultsIdx, callback) {
// Cant insert new deleted documents
var winningRev = merge.winningRev(docInfo.metadata);
var deleted = utils.isDeleted(docInfo.metadata, winningRev);
if ('was_delete' in opts && deleted) {
results[resultsIdx] = errors.MISSING_DOC;
return callback();
}
writeDoc(docInfo, winningRev, deleted, callback, false, resultsIdx);
}
function checkDoneWritingDocs() {
if (++numDocsWritten === docInfos.length) {
complete();
}
}
function processDocs() {
if (!docInfos.length) {
return complete();
}
var idsToDocs = new utils.Map();
docInfos.forEach(function (currentDoc, resultsIdx) {
if (currentDoc._id && utils.isLocalId(currentDoc._id)) {
api[currentDoc._deleted ? '_removeLocal' : '_putLocal'](
currentDoc, {ctx: tx}, function (err, resp) {
if (err) {
results[resultsIdx] = err;
} else {
results[resultsIdx] = {};
}
checkDoneWritingDocs();
});
return;
}
var id = currentDoc.metadata.id;
if (idsToDocs.has(id)) {
idsToDocs.get(id).push([currentDoc, resultsIdx]);
} else {
idsToDocs.set(id, [[currentDoc, resultsIdx]]);
}
});
// in the case of new_edits, the user can provide multiple docs
// with the same id. these need to be processed sequentially
idsToDocs.forEach(function (docs, id) {
var numDone = 0;
function docWritten() {
checkDoneWritingDocs();
if (++numDone < docs.length) {
nextDoc();
}
}
function nextDoc() {
var value = docs[numDone];
var currentDoc = value[0];
var resultsIdx = value[1];
if (fetchedDocs.has(id)) {
updateDoc(fetchedDocs.get(id), currentDoc, resultsIdx, docWritten);
} else {
insertDoc(currentDoc, resultsIdx, docWritten);
}
}
nextDoc();
});
}
function fetchExistingDocs(callback) {
if (!docInfos.length) {
return callback();
}
var numFetched = 0;
function checkDone() {
if (++numFetched === docInfos.length) {
callback();
}
}
docInfos.forEach(function (docInfo) {
if (docInfo._id && utils.isLocalId(docInfo._id)) {
return checkDone(); // skip local docs
}
var id = docInfo.metadata.id;
tx.executeSql('SELECT json FROM ' + DOC_STORE +
' WHERE id = ?', [id], function (tx, result) {
if (result.rows.length) {
var metadata = vuvuzela.parse(result.rows.item(0).json);
fetchedDocs.set(id, metadata);
}
checkDone();
});
});
}
// Insert sequence number into the error so we can sort later
function makeErr(err, seq) {
err._bulk_seq = seq;
return err;
}
function saveAttachment(docInfo, digest, data, callback) {
var ref = [docInfo.metadata.id, docInfo.metadata.rev].join('@');
var newAtt = {digest: digest};
var sql = 'SELECT digest, json FROM ' + ATTACH_STORE + ' WHERE digest=?';
tx.executeSql(sql, [digest], function (tx, result) {
if (!result.rows.length) {
newAtt.refs = {};
newAtt.refs[ref] = true;
sql = 'INSERT INTO ' + ATTACH_STORE +
'(digest, json, body) VALUES (?, ?, ?)';
tx.executeSql(sql, [digest, JSON.stringify(newAtt), data],
function () {
callback();
});
} else {
newAtt.refs = JSON.parse(result.rows.item(0).json).refs;
sql = 'UPDATE ' + ATTACH_STORE + ' SET json=?, body=? WHERE digest=?';
tx.executeSql(sql, [JSON.stringify(newAtt), data, digest],
function () {
callback();
});
}
});
}
preprocessAttachments(function () {
db.transaction(function (txn) {
tx = txn;
fetchExistingDocs(processDocs);
}, unknownError(callback), function () {
docCount = -1;
});
});
};
api._get = function (id, opts, callback) {
opts = utils.clone(opts);
var doc;
var metadata;
var err;
if (!opts.ctx) {
db.readTransaction(function (txn) {
opts.ctx = txn;
api._get(id, opts, callback);
});
return;
}
var tx = opts.ctx;
function finish() {
callback(err, {doc: doc, metadata: metadata, ctx: tx});
}
var sql;
var sqlArgs;
if (opts.rev) {
sql = select(
SELECT_DOCS,
[DOC_STORE, BY_SEQ_STORE],
DOC_STORE + '.id=' + BY_SEQ_STORE + '.doc_id',
[BY_SEQ_STORE + '.doc_id=?', BY_SEQ_STORE + '.rev=?']);
sqlArgs = [id, opts.rev];
} else {
sql = select(
SELECT_DOCS,
[DOC_STORE, BY_SEQ_STORE],
DOC_STORE_AND_BY_SEQ_JOINER,
DOC_STORE + '.id=?');
sqlArgs = [id];
}
tx.executeSql(sql, sqlArgs, function (a, results) {
if (!results.rows.length) {
err = errors.MISSING_DOC;
return finish();
}
var item = results.rows.item(0);
metadata = vuvuzela.parse(item.metadata);
if (item.deleted && !opts.rev) {
err = errors.error(errors.MISSING_DOC, 'deleted');
return finish();
}
doc = unstringifyDoc(item.data, metadata.id, item.rev);
finish();
});
};
function countDocs(tx, callback) {
if (docCount !== -1) {
return callback(docCount);
}
// count the total rows
var sql = select(
'COUNT(' + DOC_STORE + '.id) AS \'num\'',
[DOC_STORE, BY_SEQ_STORE],
DOC_STORE_AND_BY_SEQ_JOINER,
BY_SEQ_STORE + '.deleted=0');
tx.executeSql(sql, [], function (tx, result) {
docCount = result.rows.item(0).num;
callback(docCount);
});
}
api._allDocs = function (opts, callback) {
var results = [];
var totalRows;
var start = 'startkey' in opts ? opts.startkey : false;
var end = 'endkey' in opts ? opts.endkey : false;
var key = 'key' in opts ? opts.key : false;
var descending = 'descending' in opts ? opts.descending : false;
var limit = 'limit' in opts ? opts.limit : -1;
var offset = 'skip' in opts ? opts.skip : 0;
var inclusiveEnd = opts.inclusive_end !== false;
var sqlArgs = [];
var criteria = [];
if (key !== false) {
criteria.push(DOC_STORE + '.id = ?');
sqlArgs.push(key);
} else if (start !== false || end !== false) {
if (start !== false) {
criteria.push(DOC_STORE + '.id ' + (descending ? '<=' : '>=') + ' ?');
sqlArgs.push(start);
}
if (end !== false) {
var comparator = descending ? '>' : '<';
if (inclusiveEnd) {
comparator += '=';
}
criteria.push(DOC_STORE + '.id ' + comparator + ' ?');
sqlArgs.push(end);
}
if (key !== false) {
criteria.push(DOC_STORE + '.id = ?');
sqlArgs.push(key);
}
}
if (opts.deleted !== 'ok') {
// report deleted if keys are specified
criteria.push(BY_SEQ_STORE + '.deleted = 0');
}
db.readTransaction(function (tx) {
// first count up the total rows
countDocs(tx, function (count) {
totalRows = count;
if (limit === 0) {
return;
}
// then actually fetch the documents
var sql = select(
SELECT_DOCS,
[DOC_STORE, BY_SEQ_STORE],
DOC_STORE_AND_BY_SEQ_JOINER,
criteria,
DOC_STORE + '.id ' + (descending ? 'DESC' : 'ASC')
);
sql += ' LIMIT ' + limit + ' OFFSET ' + offset;
tx.executeSql(sql, sqlArgs, function (tx, result) {
for (var i = 0, l = result.rows.length; i < l; i++) {
var item = result.rows.item(i);
var metadata = vuvuzela.parse(item.metadata);
var data = unstringifyDoc(item.data, metadata.id, item.rev);
var winningRev = data._rev;
var doc = {
id: metadata.id,
key: metadata.id,
value: {rev: winningRev}
};
if (opts.include_docs) {
doc.doc = data;
doc.doc._rev = winningRev;
if (opts.conflicts) {
doc.doc._conflicts = merge.collectConflicts(metadata);
}
for (var att in doc.doc._attachments) {
if (doc.doc._attachments.hasOwnProperty(att)) {
doc.doc._attachments[att].stub = true;
}
}
}
if (item.deleted) {
if (opts.deleted === 'ok') {
doc.value.deleted = true;
doc.doc = null;
} else {
continue;
}
}
results.push(doc);
}
});
});
}, unknownError(callback), function () {
callback(null, {
total_rows: totalRows,
offset: opts.skip,
rows: results
});
});
};
api._changes = function (opts) {
opts = utils.clone(opts);
if (opts.continuous) {
var id = name + ':' + utils.uuid();
WebSqlPouch.Changes.addListener(name, id, api, opts);
WebSqlPouch.Changes.notify(name);
return {
cancel: function () {
WebSqlPouch.Changes.removeListener(name, id);
}
};
}
var descending = opts.descending;
// Ignore the `since` parameter when `descending` is true
opts.since = opts.since && !descending ? opts.since : 0;
var limit = 'limit' in opts ? opts.limit : -1;
if (limit === 0) {
limit = 1; // per CouchDB _changes spec
}
var returnDocs;
if ('returnDocs' in opts) {
returnDocs = opts.returnDocs;
} else {
returnDocs = true;
}
var results = [];
var numResults = 0;
function fetchChanges() {
var criteria = [
DOC_STORE + '.winningseq > ' + opts.since
];
var sqlArgs = [];
if (opts.doc_ids) {
criteria.push(DOC_STORE + '.id IN (' + opts.doc_ids.map(function () {
return '?';
}).join(',') + ')');
sqlArgs = opts.doc_ids;
}
var sql = select(SELECT_DOCS, [DOC_STORE, BY_SEQ_STORE],
DOC_STORE_AND_BY_SEQ_JOINER, criteria,
DOC_STORE + '.winningseq ' + (descending ? 'DESC' : 'ASC'));
var filter = utils.filterChange(opts);
if (!opts.view && !opts.filter) {
// we can just limit in the query
sql += ' LIMIT ' + limit;
}
db.readTransaction(function (tx) {
tx.executeSql(sql, sqlArgs, function (tx, result) {
var lastSeq = 0;
for (var i = 0, l = result.rows.length; i < l; i++) {
var res = result.rows.item(i);
var metadata = vuvuzela.parse(res.metadata);
if (lastSeq < res.seq) {
lastSeq = res.seq;
}
var doc = unstringifyDoc(res.data, metadata.id, res.rev);
var change = opts.processChange(doc, metadata, opts);
change.seq = res.seq;
if (filter(change)) {
numResults++;
if (returnDocs) {
results.push(change);
}
opts.onChange(change);
}
if (numResults === limit) {
break;
}
}
if (!opts.continuous) {
opts.complete(null, {
results: results,
last_seq: lastSeq
});
}
});
});
}
fetchChanges();
};
api._close = function (callback) {
//WebSQL databases do not need to be closed
callback();
};
api._getAttachment = function (attachment, opts, callback) {
var res;
var tx = opts.ctx;
var digest = attachment.digest;
var type = attachment.content_type;
var sql = 'SELECT hex(body) as body FROM ' + ATTACH_STORE +
' WHERE digest=?';
tx.executeSql(sql, [digest], function (tx, result) {
// sqlite normally stores data as utf8, so even the hex() function
// "encodes" the binary data in utf8/16 before returning it. yet hex()
// is the only way to get the full data, so we do this.
var data = parseHexString(result.rows.item(0).body, encoding);
if (opts.encode) {
res = btoa(data);
} else {
data = utils.fixBinary(data);
res = utils.createBlob([data], {type: type});
}
callback(null, res);
});
};
api._getRevisionTree = function (docId, callback) {
db.readTransaction(function (tx) {
var sql = 'SELECT json AS metadata FROM ' + DOC_STORE + ' WHERE id = ?';
tx.executeSql(sql, [docId], function (tx, result) {
if (!result.rows.length) {
callback(errors.MISSING_DOC);
} else {
var data = vuvuzela.parse(result.rows.item(0).metadata);
callback(null, data.rev_tree);
}
});
});
};
api._doCompaction = function (docId, rev_tree, revs, callback) {
if (!revs.length) {
return callback();
}
db.transaction(function (tx) {
var sql = 'SELECT json AS metadata FROM ' + DOC_STORE + ' WHERE id = ?';
tx.executeSql(sql, [docId], function (tx, result) {
if (!result.rows.length) {
return utils.call(callback);
}
var metadata = vuvuzela.parse(result.rows.item(0).metadata);
metadata.rev_tree = rev_tree;
var numDone = 0;
revs.forEach(function (rev) {
var sql = 'DELETE FROM ' + BY_SEQ_STORE + ' WHERE doc_id=? AND rev=?';
tx.executeSql(sql, [docId, rev], function (tx) {
if (++numDone === revs.length) {
var sql = 'UPDATE ' + DOC_STORE + ' SET json = ? WHERE id = ?';
tx.executeSql(sql, [vuvuzela.stringify(metadata), docId],
function () {
callback();
});
}
});
});
});
});
};
api._getLocal = function (id, callback) {
db.readTransaction(function (tx) {
var sql = 'SELECT json, rev FROM ' + LOCAL_STORE + ' WHERE id=?';
tx.executeSql(sql, [id], function (tx, res) {
if (res.rows.length) {
var item = res.rows.item(0);
var doc = unstringifyDoc(item.json, id, item.rev);
callback(null, doc);
} else {
callback(errors.MISSING_DOC);
}
});
});
};
api._putLocal = function (doc, opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
delete doc._revisions; // ignore this, trust the rev
var oldRev = doc._rev;
var id = doc._id;
var newRev;
if (!oldRev) {
newRev = doc._rev = '0-1';
} else {
newRev = doc._rev = '0-' + (parseInt(oldRev.split('-')[1], 10) + 1);
}
var json = stringifyDoc(doc);
var ret;
function putLocal(tx) {
var sql;
var values;
if (oldRev) {
sql = 'UPDATE ' + LOCAL_STORE + ' SET rev=?, json=? ' +
'WHERE id=? AND rev=?';
values = [newRev, json, id, oldRev];
} else {
sql = 'INSERT INTO ' + LOCAL_STORE + ' (id, rev, json) VALUES (?,?,?)';
values = [id, newRev, json];
}
tx.executeSql(sql, values, function (tx, res) {
if (res.rowsAffected) {
ret = {ok: true, id: id, rev: newRev};
if (opts.ctx) { // return immediately
callback(null, ret);
}
} else {
callback(errors.REV_CONFLICT);
}
}, function () {
callback(errors.REV_CONFLICT);
return false; // ack that we handled the error
});
}
if (opts.ctx) {
putLocal(opts.ctx);
} else {
db.transaction(function (tx) {
putLocal(tx);
}, unknownError(callback), function () {
if (ret) {
callback(null, ret);
}
});
}
};
api._removeLocal = function (doc, callback) {
var ret;
db.transaction(function (tx) {
var sql = 'DELETE FROM ' + LOCAL_STORE + ' WHERE id=? AND rev=?';
var params = [doc._id, doc._rev];
tx.executeSql(sql, params, function (tx, res) {
if (!res.rowsAffected) {
return callback(errors.REV_CONFLICT);
}
ret = {ok: true, id: doc._id, rev: '0-0'};
});
}, unknownError(callback), function () {
callback(null, ret);
});
};
}
WebSqlPouch.valid = function () {
if (typeof global !== 'undefined') {
if (global.navigator &&
global.navigator.sqlitePlugin &&
global.navigator.sqlitePlugin.openDatabase) {
return true;
} else if (global.sqlitePlugin && global.sqlitePlugin.openDatabase) {
return true;
} else if (global.openDatabase) {
return true;
}
}
return false;
};
WebSqlPouch.destroy = utils.toPromise(function (name, opts, callback) {
WebSqlPouch.Changes.removeAllListeners(name);
var size = getSize(opts);
var db = openDB(name, POUCH_VERSION, name, size);
db.transaction(function (tx) {
var stores = [DOC_STORE, BY_SEQ_STORE, ATTACH_STORE, META_STORE,
LOCAL_STORE];
stores.forEach(function (store) {
tx.executeSql('DROP TABLE IF EXISTS ' + store, []);
});
}, unknownError(callback), function () {
if (utils.hasLocalStorage()) {
delete global.localStorage['_pouch__websqldb_' + name];
delete global.localStorage[name];
}
callback(null, {'ok': true});
});
});
WebSqlPouch.Changes = new utils.Changes();
module.exports = WebSqlPouch;
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"../deps/errors":11,"../merge":18,"../utils":23,"vuvuzela":58}],6:[function(_dereq_,module,exports){
'use strict';
var utils = _dereq_('./utils');
var merge = _dereq_('./merge');
var errors = _dereq_('./deps/errors');
var EE = _dereq_('events').EventEmitter;
var evalFilter = _dereq_('./evalFilter');
var evalView = _dereq_('./evalView');
module.exports = Changes;
utils.inherits(Changes, EE);
function Changes(db, opts, callback) {
EE.call(this);
var self = this;
this.db = db;
opts = opts ? utils.clone(opts) : {};
var oldComplete = callback || opts.complete || function () {};
var complete = opts.complete = utils.once(function (err, resp) {
if (err) {
self.emit('error', err);
} else {
self.emit('complete', resp);
}
self.removeAllListeners();
db.removeListener('destroyed', onDestroy);
});
if (oldComplete) {
self.on('complete', function (resp) {
oldComplete(null, resp);
});
self.on('error', function (err) {
oldComplete(err);
});
}
var oldOnChange = opts.onChange;
if (oldOnChange) {
self.on('change', oldOnChange);
}
function onDestroy() {
self.cancel();
}
db.once('destroyed', onDestroy);
opts.onChange = function (change) {
if (opts.isCancelled) {
return;
}
self.emit('change', change);
if (self.startSeq && self.startSeq <= change.seq) {
self.emit('uptodate');
self.startSeq = false;
}
if (change.deleted) {
self.emit('delete', change);
} else if (change.changes.length === 1 &&
change.changes[0].rev.slice(0, 2) === '1-') {
self.emit('create', change);
} else {
self.emit('update', change);
}
};
var promise = new utils.Promise(function (fulfill, reject) {
opts.complete = function (err, res) {
if (err) {
reject(err);
} else {
fulfill(res);
}
};
});
self.once('cancel', function () {
if (oldOnChange) {
self.removeListener('change', oldOnChange);
}
opts.complete(null, {status: 'cancelled'});
});
this.then = promise.then.bind(promise);
this['catch'] = promise['catch'].bind(promise);
this.then(function (result) {
complete(null, result);
}, complete);
if (!db.taskqueue.isReady) {
db.taskqueue.addTask(function () {
if (self.isCancelled) {
self.emit('cancel');
} else {
self.doChanges(opts);
}
});
} else {
self.doChanges(opts);
}
}
Changes.prototype.cancel = function () {
this.isCancelled = true;
if (this.db.taskqueue.isReady) {
this.emit('cancel');
}
};
function processChange(doc, metadata, opts) {
var changeList = [{rev: doc._rev}];
if (opts.style === 'all_docs') {
changeList = merge.collectLeaves(metadata.rev_tree)
.map(function (x) { return {rev: x.rev}; });
}
var change = {
id: metadata.id,
changes: changeList,
doc: doc
};
if (utils.isDeleted(metadata, doc._rev)) {
change.deleted = true;
}
if (opts.conflicts) {
change.doc._conflicts = merge.collectConflicts(metadata);
if (!change.doc._conflicts.length) {
delete change.doc._conflicts;
}
}
return change;
}
Changes.prototype.doChanges = function (opts) {
var self = this;
var callback = opts.complete;
opts = utils.clone(opts);
if ('live' in opts && !('continuous' in opts)) {
opts.continuous = opts.live;
}
opts.processChange = processChange;
if (opts.since === 'latest') {
opts.since = 'now';
}
if (!opts.since) {
opts.since = 0;
}
if (opts.since === 'now') {
this.db.info().then(function (info) {
if (self.isCancelled) {
callback(null, {status: 'cancelled'});
return;
}
opts.since = info.update_seq - 1;
self.doChanges(opts);
}, callback);
return;
}
if (opts.continuous && opts.since !== 'now') {
this.db.info().then(function (info) {
self.startSeq = info.update_seq - 1;
}, function (err) {
if (err.id === 'idbNull') {
//db closed before this returned
//thats ok
return;
}
throw err;
});
}
if (this.db.type() !== 'http' &&
opts.filter && typeof opts.filter === 'string') {
return this.filterChanges(opts);
}
if (!('descending' in opts)) {
opts.descending = false;
}
// 0 and 1 should return 1 document
opts.limit = opts.limit === 0 ? 1 : opts.limit;
opts.complete = callback;
var newPromise = this.db._changes(opts);
if (newPromise && typeof newPromise.cancel === 'function') {
var cancel = self.cancel;
self.cancel = utils.getArguments(function (args) {
newPromise.cancel();
cancel.apply(this, args);
});
}
};
Changes.prototype.filterChanges = function (opts) {
var self = this;
var callback = opts.complete;
if (opts.filter === '_view') {
if (!opts.view || typeof opts.view !== 'string') {
var err = new Error('`view` filter parameter is not provided.');
err.status = errors.BAD_REQUEST.status;
err.name = errors.BAD_REQUEST.name;
err.error = true;
callback(err);
return;
}
// fetch a view from a design doc, make it behave like a filter
var viewName = opts.view.split('/');
this.db.get('_design/' + viewName[0], function (err, ddoc) {
if (self.isCancelled) {
callback(null, {status: 'cancelled'});
return;
}
if (err) {
callback(err);
return;
}
if (ddoc && ddoc.views && ddoc.views[viewName[1]]) {
var filter = evalView(ddoc.views[viewName[1]].map);
opts.filter = filter;
self.doChanges(opts);
return;
}
var msg = ddoc.views ? 'missing json key: ' + viewName[1] :
'missing json key: views';
if (!err) {
err = new Error(msg);
err.status = errors.MISSING_DOC.status;
err.name = errors.MISSING_DOC.name;
err.error = true;
}
callback(err);
return;
});
} else {
// fetch a filter from a design doc
var filterName = opts.filter.split('/');
this.db.get('_design/' + filterName[0], function (err, ddoc) {
if (self.isCancelled) {
callback(null, {status: 'cancelled'});
return;
}
if (err) {
callback(err);
return;
}
if (ddoc && ddoc.filters && ddoc.filters[filterName[1]]) {
var filter = evalFilter(ddoc.filters[filterName[1]]);
opts.filter = filter;
self.doChanges(opts);
return;
} else {
var msg = (ddoc && ddoc.filters) ? 'missing json key: ' + filterName[1]
: 'missing json key: filters';
if (!err) {
err = new Error(msg);
err.status = errors.MISSING_DOC.status;
err.name = errors.MISSING_DOC.name;
err.error = true;
}
callback(err);
return;
}
});
}
};
},{"./deps/errors":11,"./evalFilter":15,"./evalView":16,"./merge":18,"./utils":23,"events":27}],7:[function(_dereq_,module,exports){
(function (global){
/*globals cordova */
"use strict";
var Adapter = _dereq_('./adapter');
var utils = _dereq_('./utils');
var TaskQueue = _dereq_('./taskqueue');
var Promise = utils.Promise;
function defaultCallback(err) {
if (err && global.debug) {
console.error(err);
}
}
utils.inherits(PouchDB, Adapter);
function PouchDB(name, opts, callback) {
if (!(this instanceof PouchDB)) {
return new PouchDB(name, opts, callback);
}
var self = this;
if (typeof opts === 'function' || typeof opts === 'undefined') {
callback = opts;
opts = {};
}
if (name && typeof name === 'object') {
opts = name;
name = undefined;
}
if (typeof callback === 'undefined') {
callback = defaultCallback;
}
opts = opts || {};
var oldCB = callback;
self.auto_compaction = opts.auto_compaction;
self.prefix = PouchDB.prefix;
Adapter.call(self);
self.taskqueue = new TaskQueue();
var promise = new Promise(function (fulfill, reject) {
callback = function (err, resp) {
if (err) {
return reject(err);
}
delete resp.then;
fulfill(resp);
};
opts = utils.clone(opts);
var originalName = opts.name || name;
var backend, error;
(function () {
try {
if (typeof originalName !== 'string') {
error = new Error('Missing/invalid DB name');
error.code = 400;
throw error;
}
backend = PouchDB.parseAdapter(originalName, opts);
opts.originalName = originalName;
opts.name = backend.name;
if (opts.prefix && backend.adapter !== 'http' &&
backend.adapter !== 'https') {
opts.name = opts.prefix + opts.name;
}
opts.adapter = opts.adapter || backend.adapter;
self._adapter = opts.adapter;
self._db_name = originalName;
if (!PouchDB.adapters[opts.adapter]) {
error = new Error('Adapter is missing');
error.code = 404;
throw error;
}
if (!PouchDB.adapters[opts.adapter].valid()) {
error = new Error('Invalid Adapter');
error.code = 404;
throw error;
}
} catch (err) {
self.taskqueue.fail(err);
self.changes = utils.toPromise(function (opts) {
if (opts.complete) {
opts.complete(err);
}
});
}
}());
if (error) {
return reject(error); // constructor error, see above
}
self.adapter = opts.adapter;
// needs access to PouchDB;
self.replicate = {};
self.replicate.from = function (url, opts, callback) {
return self.constructor.replicate(url, self, opts, callback);
};
self.replicate.to = function (url, opts, callback) {
return self.constructor.replicate(self, url, opts, callback);
};
self.sync = function (dbName, opts, callback) {
return self.constructor.sync(self, dbName, opts, callback);
};
self.replicate.sync = self.sync;
self.destroy = utils.adapterFun('destroy', function (callback) {
var self = this;
self.info(function (err, info) {
if (err) {
return callback(err);
}
self.constructor.destroy(info.db_name, callback);
});
});
PouchDB.adapters[opts.adapter].call(self, opts, function (err, db) {
if (err) {
if (callback) {
self.taskqueue.fail(err);
callback(err);
}
return;
}
function destructionListener(event) {
if (event === 'destroyed') {
self.emit('destroyed');
PouchDB.removeListener(originalName, destructionListener);
}
}
PouchDB.on(originalName, destructionListener);
self.emit('created', self);
PouchDB.emit('created', opts.originalName);
self.taskqueue.ready(self);
callback(null, self);
});
if (opts.skipSetup) {
self.taskqueue.ready(self);
}
if (utils.isCordova()) {
//to inform websql adapter that we can use api
cordova.fireWindowEvent(opts.name + "_pouch", {});
}
});
promise.then(function (resp) {
oldCB(null, resp);
}, oldCB);
self.then = promise.then.bind(promise);
self["catch"] = promise["catch"].bind(promise);
}
module.exports = PouchDB;
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./adapter":1,"./taskqueue":22,"./utils":23}],8:[function(_dereq_,module,exports){
"use strict";
var createBlob = _dereq_('./blob.js');
var errors = _dereq_('./errors');
var utils = _dereq_("../utils");
var hasUpload;
function ajax(options, adapterCallback) {
var requestCompleted = false;
var callback = utils.getArguments(function (args) {
if (requestCompleted) {
return;
}
adapterCallback.apply(this, args);
requestCompleted = true;
});
if (typeof options === "function") {
callback = options;
options = {};
}
options = utils.clone(options);
var defaultOptions = {
method : "GET",
headers: {},
json: true,
processData: true,
timeout: 10000,
cache: false
};
options = utils.extend(true, defaultOptions, options);
// cache-buster, specifically designed to work around IE's aggressive caching
// see http://www.dashbay.com/2011/05/internet-explorer-caches-ajax/
if (options.method === 'GET' && !options.cache) {
var hasArgs = options.url.indexOf('?') !== -1;
options.url += (hasArgs ? '&' : '?') + '_nonce=' + utils.uuid(16);
}
function onSuccess(obj, resp, cb) {
if (!options.binary && !options.json && options.processData &&
typeof obj !== 'string') {
obj = JSON.stringify(obj);
} else if (!options.binary && options.json && typeof obj === 'string') {
try {
obj = JSON.parse(obj);
} catch (e) {
// Probably a malformed JSON from server
return cb(e);
}
}
if (Array.isArray(obj)) {
obj = obj.map(function (v) {
var obj;
if (v.ok) {
return v;
} else if (v.error && v.error === 'conflict') {
obj = errors.REV_CONFLICT;
obj.id = v.id;
return obj;
} else if (v.error && v.error === 'forbidden') {
obj = errors.FORBIDDEN;
obj.id = v.id;
obj.reason = v.reason;
return obj;
} else if (v.missing) {
obj = errors.MISSING_DOC;
obj.missing = v.missing;
return obj;
} else {
return v;
}
});
}
cb(null, obj, resp);
}
function onError(err, cb) {
var errParsed, errObj, errType, key;
try {
errParsed = JSON.parse(err.responseText);
//would prefer not to have a try/catch clause
for (key in errors) {
if (errors.hasOwnProperty(key) &&
errors[key].name === errParsed.error) {
errType = errors[key];
break;
}
}
if (!errType) {
errType = errors.UNKNOWN_ERROR;
if (err.status) {
errType.status = err.status;
}
if (err.statusText) {
err.name = err.statusText;
}
}
errObj = errors.error(errType, errParsed.reason);
} catch (e) {
for (var key in errors) {
if (errors.hasOwnProperty(key) && errors[key].status === err.status) {
errType = errors[key];
break;
}
}
if (!errType) {
errType = errors.UNKNOWN_ERROR;
if (err.status) {
errType.status = err.status;
}
if (err.statusText) {
err.name = err.statusText;
}
}
errObj = errors.error(errType);
}
if (err.withCredentials && err.status === 0) {
// apparently this is what we get when the method
// is reported as not allowed by CORS. so fudge it
errObj.status = 405;
errObj.statusText = "Method Not Allowed";
}
cb(errObj);
}
var timer;
var xhr;
if (options.xhr) {
xhr = new options.xhr();
} else {
xhr = new XMLHttpRequest();
}
xhr.open(options.method, options.url);
xhr.withCredentials = true;
if (options.json) {
options.headers.Accept = 'application/json';
options.headers['Content-Type'] = options.headers['Content-Type'] ||
'application/json';
if (options.body &&
options.processData &&
typeof options.body !== "string") {
options.body = JSON.stringify(options.body);
}
}
if (options.binary) {
xhr.responseType = 'arraybuffer';
}
var createCookie = function (name, value, days) {
var expires = "";
if (days) {
var date = new Date();
date.setTime(date.getTime() + (days * 24 * 60 * 60 * 1000));
expires = "; expires=" + date.toGMTString();
}
document.cookie = name + "=" + value + expires + "; path=/";
};
for (var key in options.headers) {
if (key === 'Cookie') {
var cookie = options.headers[key].split('=');
createCookie(cookie[0], cookie[1], 10);
} else {
xhr.setRequestHeader(key, options.headers[key]);
}
}
if (!("body" in options)) {
options.body = null;
}
var abortReq = function () {
if (requestCompleted) {
return;
}
xhr.abort();
onError(xhr, callback);
};
xhr.onreadystatechange = function () {
if (xhr.readyState !== 4 || requestCompleted) {
return;
}
clearTimeout(timer);
if (xhr.status >= 200 && xhr.status < 300) {
var data;
if (options.binary) {
data = createBlob([xhr.response || ''], {
type: xhr.getResponseHeader('Content-Type')
});
} else {
data = xhr.responseText;
}
onSuccess(data, xhr, callback);
} else {
onError(xhr, callback);
}
};
if (options.timeout > 0) {
timer = setTimeout(abortReq, options.timeout);
xhr.onprogress = function () {
clearTimeout(timer);
timer = setTimeout(abortReq, options.timeout);
};
if (typeof hasUpload === 'undefined') {
// IE throws an error if you try to access it directly
hasUpload = Object.keys(xhr).indexOf('upload') !== -1;
}
if (hasUpload) { // does not exist in ie9
xhr.upload.onprogress = xhr.onprogress;
}
}
if (options.body && (options.body instanceof Blob)) {
var reader = new FileReader();
reader.onloadend = function (e) {
var binary = "";
var bytes = new Uint8Array(this.result);
var length = bytes.byteLength;
for (var i = 0; i < length; i++) {
binary += String.fromCharCode(bytes[i]);
}
binary = utils.fixBinary(binary);
xhr.send(binary);
};
reader.readAsArrayBuffer(options.body);
} else {
xhr.send(options.body);
}
return {abort: abortReq};
}
module.exports = ajax;
},{"../utils":23,"./blob.js":9,"./errors":11}],9:[function(_dereq_,module,exports){
(function (global){
"use strict";
//Abstracts constructing a Blob object, so it also works in older
//browsers that don't support the native Blob constructor. (i.e.
//old QtWebKit versions, at least).
function createBlob(parts, properties) {
parts = parts || [];
properties = properties || {};
try {
return new Blob(parts, properties);
} catch (e) {
if (e.name !== "TypeError") {
throw e;
}
var BlobBuilder = global.BlobBuilder ||
global.MSBlobBuilder ||
global.MozBlobBuilder ||
global.WebKitBlobBuilder;
var builder = new BlobBuilder();
for (var i = 0; i < parts.length; i += 1) {
builder.append(parts[i]);
}
return builder.getBlob(properties.type);
}
}
module.exports = createBlob;
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],10:[function(_dereq_,module,exports){
'use strict';
exports.Map = LazyMap; // TODO: use ES6 map
exports.Set = LazySet; // TODO: use ES6 set
// based on https://github.com/montagejs/collections
function LazyMap() {
this.store = {};
}
LazyMap.prototype.mangle = function (key) {
if (typeof key !== "string") {
throw new TypeError("key must be a string but Got " + key);
}
return '$' + key;
};
LazyMap.prototype.unmangle = function (key) {
return key.substring(1);
};
LazyMap.prototype.get = function (key) {
var mangled = this.mangle(key);
if (mangled in this.store) {
return this.store[mangled];
} else {
return void 0;
}
};
LazyMap.prototype.set = function (key, value) {
var mangled = this.mangle(key);
this.store[mangled] = value;
return true;
};
LazyMap.prototype.has = function (key) {
var mangled = this.mangle(key);
return mangled in this.store;
};
LazyMap.prototype["delete"] = function (key) {
var mangled = this.mangle(key);
if (mangled in this.store) {
delete this.store[mangled];
return true;
}
return false;
};
LazyMap.prototype.forEach = function (cb) {
var self = this;
var keys = Object.keys(self.store);
keys.forEach(function (key) {
var value = self.store[key];
key = self.unmangle(key);
cb(value, key);
});
};
function LazySet() {
this.store = new LazyMap();
}
LazySet.prototype.add = function (key) {
return this.store.set(key, true);
};
LazySet.prototype.has = function (key) {
return this.store.has(key);
};
LazySet.prototype["delete"] = function (key) {
return this.store["delete"](key);
};
},{}],11:[function(_dereq_,module,exports){
"use strict";
function PouchError(opts) {
this.status = opts.status;
this.name = opts.error;
this.message = opts.reason;
this.error = true;
}
PouchError.prototype__proto__ = Error.prototype;
PouchError.prototype.toString = function () {
return JSON.stringify({
status: this.status,
name: this.name,
message: this.message
});
};
exports.UNAUTHORIZED = new PouchError({
status: 401,
error: 'unauthorized',
reason: "Name or password is incorrect."
});
exports.MISSING_BULK_DOCS = new PouchError({
status: 400,
error: 'bad_request',
reason: "Missing JSON list of 'docs'"
});
exports.MISSING_DOC = new PouchError({
status: 404,
error: 'not_found',
reason: 'missing'
});
exports.REV_CONFLICT = new PouchError({
status: 409,
error: 'conflict',
reason: 'Document update conflict'
});
exports.INVALID_ID = new PouchError({
status: 400,
error: 'invalid_id',
reason: '_id field must contain a string'
});
exports.MISSING_ID = new PouchError({
status: 412,
error: 'missing_id',
reason: '_id is required for puts'
});
exports.RESERVED_ID = new PouchError({
status: 400,
error: 'bad_request',
reason: 'Only reserved document ids may start with underscore.'
});
exports.NOT_OPEN = new PouchError({
status: 412,
error: 'precondition_failed',
reason: 'Database not open'
});
exports.UNKNOWN_ERROR = new PouchError({
status: 500,
error: 'unknown_error',
reason: 'Database encountered an unknown error'
});
exports.BAD_ARG = new PouchError({
status: 500,
error: 'badarg',
reason: 'Some query argument is invalid'
});
exports.INVALID_REQUEST = new PouchError({
status: 400,
error: 'invalid_request',
reason: 'Request was invalid'
});
exports.QUERY_PARSE_ERROR = new PouchError({
status: 400,
error: 'query_parse_error',
reason: 'Some query parameter is invalid'
});
exports.DOC_VALIDATION = new PouchError({
status: 500,
error: 'doc_validation',
reason: 'Bad special document member'
});
exports.BAD_REQUEST = new PouchError({
status: 400,
error: 'bad_request',
reason: 'Something wrong with the request'
});
exports.NOT_AN_OBJECT = new PouchError({
status: 400,
error: 'bad_request',
reason: 'Document must be a JSON object'
});
exports.DB_MISSING = new PouchError({
status: 404,
error: 'not_found',
reason: 'Database not found'
});
exports.IDB_ERROR = new PouchError({
status: 500,
error: 'indexed_db_went_bad',
reason: 'unknown'
});
exports.WSQ_ERROR = new PouchError({
status: 500,
error: 'web_sql_went_bad',
reason: 'unknown'
});
exports.LDB_ERROR = new PouchError({
status: 500,
error: 'levelDB_went_went_bad',
reason: 'unknown'
});
exports.FORBIDDEN = new PouchError({
status: 403,
error: 'forbidden',
reason: 'Forbidden by design doc validate_doc_update function'
});
exports.error = function (error, reason, name) {
function CustomPouchError(msg) {
this.message = reason;
if (name) {
this.name = name;
}
}
CustomPouchError.prototype = error;
return new CustomPouchError(reason);
};
},{}],12:[function(_dereq_,module,exports){
(function (process,global){
'use strict';
var crypto = _dereq_('crypto');
var Md5 = _dereq_('spark-md5');
var setImmediateShim = global.setImmediate || global.setTimeout;
function sliceShim(arrayBuffer, begin, end) {
if (typeof arrayBuffer.slice === 'function') {
if (!begin) {
return arrayBuffer.slice();
} else if (!end) {
return arrayBuffer.slice(begin);
} else {
return arrayBuffer.slice(begin, end);
}
}
//
// shim for IE courtesy of http://stackoverflow.com/a/21440217
//
//If `begin`/`end` is unspecified, Chrome assumes 0, so we do the same
//Chrome also converts the values to integers via flooring
begin = Math.floor(begin || 0);
end = Math.floor(end || 0);
var len = arrayBuffer.byteLength;
//If either `begin` or `end` is negative, it refers to an
//index from the end of the array, as opposed to from the beginning.
//The range specified by the `begin` and `end` values is clamped to the
//valid index range for the current array.
begin = begin < 0 ? Math.max(begin + len, 0) : Math.min(len, begin);
end = end < 0 ? Math.max(end + len, 0) : Math.min(len, end);
//If the computed length of the new ArrayBuffer would be negative, it
//is clamped to zero.
if (end - begin <= 0) {
return new ArrayBuffer(0);
}
var result = new ArrayBuffer(end - begin);
var resultBytes = new Uint8Array(result);
var sourceBytes = new Uint8Array(arrayBuffer, begin, end - begin);
resultBytes.set(sourceBytes);
return result;
}
// convert a 64-bit int to a binary string
function intToString(int) {
var bytes = [
(int & 0xff),
((int >>> 8) & 0xff),
((int >>> 16) & 0xff),
((int >>> 24) & 0xff)
];
return bytes.map(function (byte) {
return String.fromCharCode(byte);
}).join('');
}
// convert an array of 64-bit ints into
// a base64-encoded string
function rawToBase64(raw) {
var res = '';
for (var i = 0; i < raw.length; i++) {
res += intToString(raw[i]);
}
return global.btoa(res);
}
module.exports = function (data, callback) {
if (!process.browser) {
var base64 = crypto.createHash('md5').update(data).digest('base64');
callback(null, base64);
return;
}
var inputIsString = typeof data === 'string';
var len = inputIsString ? data.length : data.byteLength;
var chunkSize = Math.min(524288, len);
var chunks = Math.ceil(len / chunkSize);
var currentChunk = 0;
var buffer = inputIsString ? new Md5() : new Md5.ArrayBuffer();
function append(buffer, data, start, end) {
if (inputIsString) {
buffer.appendBinary(data.substring(start, end));
} else {
buffer.append(sliceShim(data, start, end));
}
}
function loadNextChunk() {
var start = currentChunk * chunkSize;
var end = start + chunkSize;
if ((start + chunkSize) >= data.size) {
end = data.size;
}
currentChunk++;
if (currentChunk < chunks) {
append(buffer, data, start, end);
setImmediateShim(loadNextChunk);
} else {
append(buffer, data, start, end);
var raw = buffer.end(true);
var base64 = rawToBase64(raw);
callback(null, base64);
buffer.destroy();
}
}
loadNextChunk();
};
}).call(this,_dereq_("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":28,"crypto":26,"spark-md5":57}],13:[function(_dereq_,module,exports){
'use strict';
var Promise = _dereq_('../utils').Promise;
// this is essentially the "update sugar" function from daleharvey/pouchdb#1388
// the diffFun tells us what delta to apply to the doc. it either returns
// the doc, or false if it doesn't need to do an update after all
function upsert(db, docId, diffFun) {
return new Promise(function (fulfill, reject) {
if (docId && typeof docId === 'object') {
docId = docId._id;
}
if (typeof docId !== 'string') {
return reject(new Error('doc id is required'));
}
db.get(docId, function (err, doc) {
if (err) {
if (err.status !== 404) {
return reject(err);
}
return fulfill(tryAndPut(db, diffFun({_id : docId}), diffFun));
}
var newDoc = diffFun(doc);
if (!newDoc) {
return fulfill(doc);
}
fulfill(tryAndPut(db, newDoc, diffFun));
});
});
}
function tryAndPut(db, doc, diffFun) {
return db.put(doc)["catch"](function (err) {
if (err.status !== 409) {
throw err;
}
return upsert(db, doc, diffFun);
});
}
module.exports = function (db, docId, diffFun, cb) {
if (typeof cb === 'function') {
upsert(db, docId, diffFun).then(function (resp) {
cb(null, resp);
}, cb);
} else {
return upsert(db, docId, diffFun);
}
};
},{"../utils":23}],14:[function(_dereq_,module,exports){
"use strict";
// BEGIN Math.uuid.js
/*!
Math.uuid.js (v1.4)
http://www.broofa.com
mailto:[email protected]
Copyright (c) 2010 Robert Kieffer
Dual licensed under the MIT and GPL licenses.
*/
/*
* Generate a random uuid.
*
* USAGE: Math.uuid(length, radix)
* length - the desired number of characters
* radix - the number of allowable values for each character.
*
* EXAMPLES:
* // No arguments - returns RFC4122, version 4 ID
* >>> Math.uuid()
* "92329D39-6F5C-4520-ABFC-AAB64544E172"
*
* // One argument - returns ID of the specified length
* >>> Math.uuid(15) // 15 character ID (default base=62)
* "VcydxgltxrVZSTV"
*
* // Two arguments - returns ID of the specified length, and radix.
* // (Radix must be <= 62)
* >>> Math.uuid(8, 2) // 8 character ID (base=2)
* "01001010"
* >>> Math.uuid(8, 10) // 8 character ID (base=10)
* "47473046"
* >>> Math.uuid(8, 16) // 8 character ID (base=16)
* "098F4D35"
*/
var chars = (
'0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' +
'abcdefghijklmnopqrstuvwxyz'
).split('');
function getValue(radix) {
return 0 | Math.random() * radix;
}
function uuid(len, radix) {
radix = radix || chars.length;
var out = '';
var i = -1;
if (len) {
// Compact form
while (++i < len) {
out += chars[getValue(radix)];
}
return out;
}
// rfc4122, version 4 form
// Fill in random data. At i==19 set the high bits of clock sequence as
// per rfc4122, sec. 4.1.5
while (++i < 36) {
switch (i) {
case 8:
case 13:
case 18:
case 23:
out += '-';
break;
case 19:
out += chars[(getValue(16) & 0x3) | 0x8];
break;
default:
out += chars[getValue(16)];
}
}
return out;
}
module.exports = uuid;
},{}],15:[function(_dereq_,module,exports){
'use strict';
module.exports = evalFilter;
function evalFilter(input) {
/*jshint evil: true */
return eval([
'(function () { return ',
input,
' })()'
].join(''));
}
},{}],16:[function(_dereq_,module,exports){
'use strict';
module.exports = evalView;
function evalView(input) {
/*jshint evil: true */
return eval([
'(function () {',
' return function (doc) {',
' var emitted = false;',
' var emit = function (a, b) {',
' emitted = true;',
' };',
' var view = ' + input + ';',
' view(doc);',
' if (emitted) {',
' return true;',
' }',
' }',
'})()'
].join('\n'));
}
},{}],17:[function(_dereq_,module,exports){
(function (process){
"use strict";
var PouchDB = _dereq_('./setup');
module.exports = PouchDB;
PouchDB.ajax = _dereq_('./deps/ajax');
PouchDB.extend = _dereq_('pouchdb-extend');
PouchDB.utils = _dereq_('./utils');
PouchDB.Errors = _dereq_('./deps/errors');
PouchDB.replicate = _dereq_('./replicate').replicate;
PouchDB.sync = _dereq_('./sync');
PouchDB.version = _dereq_('./version');
var httpAdapter = _dereq_('./adapters/http');
PouchDB.adapter('http', httpAdapter);
PouchDB.adapter('https', httpAdapter);
PouchDB.adapter('idb', _dereq_('./adapters/idb'));
PouchDB.adapter('websql', _dereq_('./adapters/websql'));
PouchDB.plugin(_dereq_('pouchdb-mapreduce'));
if (!process.browser) {
var ldbAdapter = _dereq_('./adapters/leveldb');
PouchDB.adapter('ldb', ldbAdapter);
PouchDB.adapter('leveldb', ldbAdapter);
}
}).call(this,_dereq_("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"))
},{"./adapters/http":2,"./adapters/idb":3,"./adapters/leveldb":26,"./adapters/websql":5,"./deps/ajax":8,"./deps/errors":11,"./replicate":19,"./setup":20,"./sync":21,"./utils":23,"./version":24,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":28,"pouchdb-extend":48,"pouchdb-mapreduce":51}],18:[function(_dereq_,module,exports){
'use strict';
var extend = _dereq_('pouchdb-extend');
// for a better overview of what this is doing, read:
// https://github.com/apache/couchdb/blob/master/src/couchdb/couch_key_tree.erl
//
// But for a quick intro, CouchDB uses a revision tree to store a documents
// history, A -> B -> C, when a document has conflicts, that is a branch in the
// tree, A -> (B1 | B2 -> C), We store these as a nested array in the format
//
// KeyTree = [Path ... ]
// Path = {pos: position_from_root, ids: Tree}
// Tree = [Key, Opts, [Tree, ...]], in particular single node: [Key, []]
// Turn a path as a flat array into a tree with a single branch
function pathToTree(path) {
var doc = path.shift();
var root = [doc.id, doc.opts, []];
var leaf = root;
var nleaf;
while (path.length) {
doc = path.shift();
nleaf = [doc.id, doc.opts, []];
leaf[2].push(nleaf);
leaf = nleaf;
}
return root;
}
// Merge two trees together
// The roots of tree1 and tree2 must be the same revision
function mergeTree(in_tree1, in_tree2) {
var queue = [{tree1: in_tree1, tree2: in_tree2}];
var conflicts = false;
while (queue.length > 0) {
var item = queue.pop();
var tree1 = item.tree1;
var tree2 = item.tree2;
if (tree1[1].status || tree2[1].status) {
tree1[1].status =
(tree1[1].status === 'available' ||
tree2[1].status === 'available') ? 'available' : 'missing';
}
for (var i = 0; i < tree2[2].length; i++) {
if (!tree1[2][0]) {
conflicts = 'new_leaf';
tree1[2][0] = tree2[2][i];
continue;
}
var merged = false;
for (var j = 0; j < tree1[2].length; j++) {
if (tree1[2][j][0] === tree2[2][i][0]) {
queue.push({tree1: tree1[2][j], tree2: tree2[2][i]});
merged = true;
}
}
if (!merged) {
conflicts = 'new_branch';
tree1[2].push(tree2[2][i]);
tree1[2].sort();
}
}
}
return {conflicts: conflicts, tree: in_tree1};
}
function doMerge(tree, path, dontExpand) {
var restree = [];
var conflicts = false;
var merged = false;
var res;
if (!tree.length) {
return {tree: [path], conflicts: 'new_leaf'};
}
tree.forEach(function (branch) {
if (branch.pos === path.pos && branch.ids[0] === path.ids[0]) {
// Paths start at the same position and have the same root, so they need
// merged
res = mergeTree(branch.ids, path.ids);
restree.push({pos: branch.pos, ids: res.tree});
conflicts = conflicts || res.conflicts;
merged = true;
} else if (dontExpand !== true) {
// The paths start at a different position, take the earliest path and
// traverse up until it as at the same point from root as the path we
// want to merge. If the keys match we return the longer path with the
// other merged After stemming we dont want to expand the trees
var t1 = branch.pos < path.pos ? branch : path;
var t2 = branch.pos < path.pos ? path : branch;
var diff = t2.pos - t1.pos;
var candidateParents = [];
var trees = [];
trees.push({ids: t1.ids, diff: diff, parent: null, parentIdx: null});
while (trees.length > 0) {
var item = trees.pop();
if (item.diff === 0) {
if (item.ids[0] === t2.ids[0]) {
candidateParents.push(item);
}
continue;
}
if (!item.ids) {
continue;
}
/*jshint loopfunc:true */
item.ids[2].forEach(function (el, idx) {
trees.push(
{ids: el, diff: item.diff - 1, parent: item.ids, parentIdx: idx});
});
}
var el = candidateParents[0];
if (!el) {
restree.push(branch);
} else {
res = mergeTree(el.ids, t2.ids);
el.parent[2][el.parentIdx] = res.tree;
restree.push({pos: t1.pos, ids: t1.ids});
conflicts = conflicts || res.conflicts;
merged = true;
}
} else {
restree.push(branch);
}
});
// We didnt find
if (!merged) {
restree.push(path);
}
restree.sort(function (a, b) {
return a.pos - b.pos;
});
return {
tree: restree,
conflicts: conflicts || 'internal_node'
};
}
// To ensure we dont grow the revision tree infinitely, we stem old revisions
function stem(tree, depth) {
// First we break out the tree into a complete list of root to leaf paths,
// we cut off the start of the path and generate a new set of flat trees
var stemmedPaths = PouchMerge.rootToLeaf(tree).map(function (path) {
var stemmed = path.ids.slice(-depth);
return {
pos: path.pos + (path.ids.length - stemmed.length),
ids: pathToTree(stemmed)
};
});
// Then we remerge all those flat trees together, ensuring that we dont
// connect trees that would go beyond the depth limit
return stemmedPaths.reduce(function (prev, current, i, arr) {
return doMerge(prev, current, true).tree;
}, [stemmedPaths.shift()]);
}
var PouchMerge = {};
PouchMerge.merge = function (tree, path, depth) {
// Ugh, nicer way to not modify arguments in place?
tree = extend(true, [], tree);
path = extend(true, {}, path);
var newTree = doMerge(tree, path);
return {
tree: stem(newTree.tree, depth),
conflicts: newTree.conflicts
};
};
// We fetch all leafs of the revision tree, and sort them based on tree length
// and whether they were deleted, undeleted documents with the longest revision
// tree (most edits) win
// The final sort algorithm is slightly documented in a sidebar here:
// http://guide.couchdb.org/draft/conflicts.html
PouchMerge.winningRev = function (metadata) {
var leafs = [];
PouchMerge.traverseRevTree(metadata.rev_tree,
function (isLeaf, pos, id, something, opts) {
if (isLeaf) {
leafs.push({pos: pos, id: id, deleted: !!opts.deleted});
}
});
leafs.sort(function (a, b) {
if (a.deleted !== b.deleted) {
return a.deleted > b.deleted ? 1 : -1;
}
if (a.pos !== b.pos) {
return b.pos - a.pos;
}
return a.id < b.id ? 1 : -1;
});
return leafs[0].pos + '-' + leafs[0].id;
};
// Pretty much all below can be combined into a higher order function to
// traverse revisions
// The return value from the callback will be passed as context to all
// children of that node
PouchMerge.traverseRevTree = function (revs, callback) {
var toVisit = revs.slice();
var node;
while ((node = toVisit.pop())) {
var pos = node.pos;
var tree = node.ids;
var branches = tree[2];
var newCtx =
callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]);
for (var i = 0, len = branches.length; i < len; i++) {
toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx});
}
}
};
PouchMerge.collectLeaves = function (revs) {
var leaves = [];
PouchMerge.traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) {
if (isLeaf) {
leaves.unshift({rev: pos + "-" + id, pos: pos, opts: opts});
}
});
leaves.sort(function (a, b) {
return b.pos - a.pos;
});
leaves.map(function (leaf) { delete leaf.pos; });
return leaves;
};
// returns revs of all conflicts that is leaves such that
// 1. are not deleted and
// 2. are different than winning revision
PouchMerge.collectConflicts = function (metadata) {
var win = PouchMerge.winningRev(metadata);
var leaves = PouchMerge.collectLeaves(metadata.rev_tree);
var conflicts = [];
leaves.forEach(function (leaf) {
if (leaf.rev !== win && !leaf.opts.deleted) {
conflicts.push(leaf.rev);
}
});
return conflicts;
};
PouchMerge.rootToLeaf = function (tree) {
var paths = [];
PouchMerge.traverseRevTree(tree, function (isLeaf, pos, id, history, opts) {
history = history ? history.slice(0) : [];
history.push({id: id, opts: opts});
if (isLeaf) {
var rootPos = pos + 1 - history.length;
paths.unshift({pos: rootPos, ids: history});
}
return history;
});
return paths;
};
module.exports = PouchMerge;
},{"pouchdb-extend":48}],19:[function(_dereq_,module,exports){
'use strict';
var utils = _dereq_('./utils');
var EE = _dereq_('events').EventEmitter;
var MAX_SIMULTANEOUS_REVS = 50;
// We create a basic promise so the caller can cancel the replication possibly
// before we have actually started listening to changes etc
utils.inherits(Replication, EE);
function Replication(opts) {
EE.call(this);
this.cancelled = false;
var self = this;
var promise = new utils.Promise(function (fulfill, reject) {
self.once('complete', fulfill);
self.once('error', reject);
});
self.then = function (resolve, reject) {
return promise.then(resolve, reject);
};
self["catch"] = function (reject) {
return promise["catch"](reject);
};
// As we allow error handling via "error" event as well,
// put a stub in here so that rejecting never throws UnhandledError.
self["catch"](function (err) {});
}
Replication.prototype.cancel = function () {
this.cancelled = true;
this.emit('cancel');
};
Replication.prototype.ready = function (src, target) {
var self = this;
function onDestroy() {
self.cancel();
}
src.once('destroyed', onDestroy);
target.once('destroyed', onDestroy);
function cleanup() {
src.removeListener('destroyed', onDestroy);
target.removeListener('destroyed', onDestroy);
}
this.then(cleanup, cleanup);
};
// TODO: check CouchDB's replication id generation
// Generate a unique id particular to this replication
function genReplicationId(src, target, opts) {
var filterFun = opts.filter ? opts.filter.toString() : '';
return src.id().then(function (src_id) {
return target.id().then(function (target_id) {
var queryData = src_id + target_id + filterFun +
JSON.stringify(opts.query_params) + opts.doc_ids;
return utils.MD5(queryData).then(function (md5) {
// can't use straight-up md5 alphabet, because
// the char '/' is interpreted as being for attachments,
// and + is also not url-safe
md5 = md5.replace(/\//g, '.').replace(/\+/g, '_');
return '_local/' + md5;
});
});
});
}
function updateCheckpoint(db, id, checkpoint, returnValue) {
return db.get(id)["catch"](function (err) {
if (err.status === 404) {
return {_id: id};
}
throw err;
}).then(function (doc) {
if (returnValue.cancelled) {
return;
}
doc.last_seq = checkpoint;
return db.put(doc);
});
}
function Checkpointer(src, target, id, returnValue) {
this.src = src;
this.target = target;
this.id = id;
this.returnValue = returnValue;
}
Checkpointer.prototype.writeCheckpoint = function (checkpoint) {
var self = this;
return this.updateTarget(checkpoint).then(function () {
return self.updateSource(checkpoint);
});
};
Checkpointer.prototype.updateTarget = function (checkpoint) {
return updateCheckpoint(this.target, this.id, checkpoint, this.returnValue);
};
Checkpointer.prototype.updateSource = function (checkpoint) {
var self = this;
if (this.readOnlySource) {
return utils.Promise.resolve(true);
}
return updateCheckpoint(this.src, this.id, checkpoint, this.returnValue)[
"catch"](function (err) {
var isForbidden = typeof err.status === 'number' &&
Math.floor(err.status / 100) === 4;
if (isForbidden) {
self.readOnlySource = true;
return true;
}
throw err;
});
};
Checkpointer.prototype.getCheckpoint = function () {
var self = this;
return self.target.get(self.id).then(function (targetDoc) {
return self.src.get(self.id).then(function (sourceDoc) {
if (targetDoc.last_seq === sourceDoc.last_seq) {
return sourceDoc.last_seq;
}
return 0;
}, function (err) {
if (err.status === 404 && targetDoc.last_seq) {
return self.src.put({
_id: self.id,
last_seq: 0
}).then(function () {
return 0;
}, function (err) {
if (err.status === 401) {
self.readOnlySource = true;
return targetDoc.last_seq;
}
return 0;
});
}
throw err;
});
})["catch"](function (err) {
if (err.status !== 404) {
throw err;
}
return 0;
});
};
function replicate(repId, src, target, opts, returnValue) {
var batches = []; // list of batches to be processed
var currentBatch; // the batch currently being processed
var pendingBatch = {
seq: 0,
changes: [],
docs: []
}; // next batch, not yet ready to be processed
var writingCheckpoint = false; // true while checkpoint is being written
var changesCompleted = false; // true when all changes received
var replicationCompleted = false; // true when replication has completed
var last_seq = 0;
var continuous = opts.continuous || opts.live || false;
var batch_size = opts.batch_size || 100;
var batches_limit = opts.batches_limit || 10;
var changesPending = false; // true while src.changes is running
var doc_ids = opts.doc_ids;
var checkpointer = new Checkpointer(src, target, repId, returnValue);
var result = {
ok: true,
start_time: new Date(),
docs_read: 0,
docs_written: 0,
doc_write_failures: 0,
errors: []
};
var changesOpts = {};
returnValue.ready(src, target);
function writeDocs() {
if (currentBatch.docs.length === 0) {
return;
}
var docs = currentBatch.docs;
return target.bulkDocs({
docs: docs
}, {
new_edits: false
}).then(function (res) {
if (returnValue.cancelled) {
completeReplication();
throw new Error('cancelled');
}
var errors = [];
res.forEach(function (res) {
if (res.error) {
result.doc_write_failures++;
var error = new Error(res.reason || res.message || 'Unknown reason');
error.name = res.name || res.error;
errors.push(error);
}
});
result.errors = result.errors.concat(errors);
result.docs_written += currentBatch.docs.length - errors.length;
var non403s = errors.filter(function (error) {
return error.name !== 'unauthorized' && error.name !== 'forbidden';
});
if (non403s.length > 0) {
var error = new Error('bulkDocs error');
error.other_errors = errors;
abortReplication('target.bulkDocs failed to write docs', error);
throw new Error('bulkWrite partial failure');
}
}, function (err) {
result.doc_write_failures += docs.length;
throw err;
});
}
function getNextDoc() {
var diffs = currentBatch.diffs;
var id = Object.keys(diffs)[0];
var allMissing = diffs[id].missing;
// avoid url too long error by batching
var missingBatches = [];
for (var i = 0; i < allMissing.length; i += MAX_SIMULTANEOUS_REVS) {
missingBatches.push(allMissing.slice(i, Math.min(allMissing.length,
i + MAX_SIMULTANEOUS_REVS)));
}
return utils.Promise.all(missingBatches.map(function (missing) {
return src.get(id, {revs: true, open_revs: missing, attachments: true})
.then(function (docs) {
docs.forEach(function (doc) {
if (returnValue.cancelled) {
return completeReplication();
}
if (doc.ok) {
result.docs_read++;
currentBatch.pendingRevs++;
currentBatch.docs.push(doc.ok);
delete diffs[doc.ok._id];
}
});
});
}));
}
function getAllDocs() {
if (Object.keys(currentBatch.diffs).length > 0) {
return getNextDoc().then(getAllDocs);
} else {
return utils.Promise.resolve();
}
}
function getRevisionOneDocs() {
// filter out the generation 1 docs and get them
// leaving the non-generation one docs to be got otherwise
var ids = Object.keys(currentBatch.diffs).filter(function (id) {
var missing = currentBatch.diffs[id].missing;
return missing.length === 1 && missing[0].slice(0, 2) === '1-';
});
return src.allDocs({
keys: ids,
include_docs: true
}).then(function (res) {
if (returnValue.cancelled) {
completeReplication();
throw (new Error('cancelled'));
}
res.rows.forEach(function (row) {
if (row.doc && !row.deleted &&
row.value.rev.slice(0, 2) === '1-' && (
!row.doc._attachments ||
Object.keys(row.doc._attachments).length === 0
)
) {
result.docs_read++;
currentBatch.pendingRevs++;
currentBatch.docs.push(row.doc);
delete currentBatch.diffs[row.id];
}
});
});
}
function getDocs() {
return getRevisionOneDocs().then(getAllDocs);
}
function finishBatch() {
writingCheckpoint = true;
return checkpointer.writeCheckpoint(
currentBatch.seq
).then(function (res) {
writingCheckpoint = false;
if (returnValue.cancelled) {
completeReplication();
throw new Error('cancelled');
}
result.last_seq = last_seq = currentBatch.seq;
returnValue.emit('change', utils.clone(result));
currentBatch = undefined;
getChanges();
})["catch"](function (err) {
writingCheckpoint = false;
abortReplication('writeCheckpoint completed with error', err);
throw err;
});
}
function getDiffs() {
var diff = {};
currentBatch.changes.forEach(function (change) {
diff[change.id] = change.changes.map(function (x) {
return x.rev;
});
});
return target.revsDiff(diff).then(function (diffs) {
if (returnValue.cancelled) {
completeReplication();
throw new Error('cancelled');
}
// currentBatch.diffs elements are deleted as the documents are written
currentBatch.diffs = diffs;
currentBatch.pendingRevs = 0;
});
}
function startNextBatch() {
if (returnValue.cancelled || currentBatch) {
return;
}
if (batches.length === 0) {
processPendingBatch(true);
return;
}
currentBatch = batches.shift();
getDiffs()
.then(getDocs)
.then(writeDocs)
.then(finishBatch)
.then(startNextBatch)[
"catch"](function (err) {
abortReplication('batch processing terminated with error', err);
});
}
function processPendingBatch(immediate) {
if (pendingBatch.changes.length === 0) {
if (batches.length === 0 && !currentBatch) {
if ((continuous && changesOpts.live) || changesCompleted) {
returnValue.emit('uptodate', utils.clone(result));
}
if (changesCompleted) {
completeReplication();
}
}
return;
}
if (
immediate ||
changesCompleted ||
pendingBatch.changes.length >= batch_size
) {
batches.push(pendingBatch);
pendingBatch = {
seq: 0,
changes: [],
docs: []
};
startNextBatch();
}
}
function abortReplication(reason, err) {
if (replicationCompleted) {
return;
}
result.ok = false;
result.status = 'aborted';
result.errors.push(err);
batches = [];
pendingBatch = {
seq: 0,
changes: [],
docs: []
};
completeReplication();
}
function completeReplication() {
if (replicationCompleted) {
return;
}
if (returnValue.cancelled) {
result.status = 'cancelled';
if (writingCheckpoint) {
return;
}
}
result.status = result.status || 'complete';
result.end_time = new Date();
result.last_seq = last_seq;
replicationCompleted = returnValue.cancelled = true;
var non403s = result.errors.filter(function (error) {
return error.name !== 'unauthorized' && error.name !== 'forbidden';
});
if (non403s.length > 0) {
var error = result.errors.pop();
if (result.errors.length > 0) {
error.other_errors = result.errors;
}
error.result = result;
returnValue.emit('error', error);
} else {
returnValue.emit('complete', result);
}
returnValue.removeAllListeners();
}
function onChange(change) {
if (returnValue.cancelled) {
return completeReplication();
}
if (
pendingBatch.changes.length === 0 &&
batches.length === 0 &&
!currentBatch
) {
returnValue.emit('outofdate', utils.clone(result));
}
pendingBatch.seq = change.seq;
pendingBatch.changes.push(change);
processPendingBatch(batches.length === 0);
}
function onChangesComplete(changes) {
changesPending = false;
if (returnValue.cancelled) {
return completeReplication();
}
if (changesOpts.since < changes.last_seq) {
changesOpts.since = changes.last_seq;
getChanges();
} else {
if (continuous) {
changesOpts.live = true;
getChanges();
} else {
changesCompleted = true;
}
}
processPendingBatch(true);
}
function onChangesError(err) {
changesPending = false;
if (returnValue.cancelled) {
return completeReplication();
}
abortReplication('changes rejected', err);
}
function getChanges() {
if (!(
!changesPending &&
!changesCompleted &&
batches.length < batches_limit
)) {
return;
}
changesPending = true;
function abortChanges() {
changes.cancel();
}
function removeListener() {
returnValue.removeListener('cancel', abortChanges);
}
returnValue.once('cancel', abortChanges);
var changes = src.changes(changesOpts)
.on('change', onChange);
changes.then(removeListener, removeListener);
changes.then(onChangesComplete)[
"catch"](onChangesError);
}
function startChanges() {
checkpointer.getCheckpoint().then(function (checkpoint) {
last_seq = checkpoint;
changesOpts = {
since: last_seq,
limit: batch_size,
batch_size: batch_size,
style: 'all_docs',
doc_ids: doc_ids,
returnDocs: false
};
if (opts.filter) {
changesOpts.filter = opts.filter;
}
if (opts.query_params) {
changesOpts.query_params = opts.query_params;
}
getChanges();
})["catch"](function (err) {
abortReplication('getCheckpoint rejected with ', err);
});
}
returnValue.once('cancel', completeReplication);
if (typeof opts.onChange === 'function') {
returnValue.on('change', opts.onChange);
}
if (typeof opts.complete === 'function') {
returnValue.once('error', opts.complete);
returnValue.once('complete', function (result) {
opts.complete(null, result);
});
}
if (typeof opts.since === 'undefined') {
startChanges();
} else {
writingCheckpoint = true;
checkpointer.writeCheckpoint(opts.since).then(function (res) {
writingCheckpoint = false;
if (returnValue.cancelled) {
completeReplication();
return;
}
last_seq = opts.since;
startChanges();
})["catch"](function (err) {
writingCheckpoint = false;
abortReplication('writeCheckpoint completed with error', err);
throw err;
});
}
}
exports.toPouch = toPouch;
function toPouch(db, opts) {
var PouchConstructor = opts.PouchConstructor;
if (typeof db === 'string') {
return new PouchConstructor(db);
} else if (db.then) {
return db;
} else {
return utils.Promise.resolve(db);
}
}
exports.replicate = replicateWrapper;
function replicateWrapper(src, target, opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
if (typeof opts === 'undefined') {
opts = {};
}
if (!opts.complete) {
opts.complete = callback || function () {};
}
opts = utils.clone(opts);
opts.continuous = opts.continuous || opts.live;
/*jshint validthis:true */
opts.PouchConstructor = opts.PouchConstructor || this;
var replicateRet = new Replication(opts);
toPouch(src, opts).then(function (src) {
return toPouch(target, opts).then(function (target) {
return genReplicationId(src, target, opts).then(function (repId) {
replicate(repId, src, target, opts, replicateRet);
});
});
})["catch"](function (err) {
replicateRet.emit('error', err);
opts.complete(err);
});
return replicateRet;
}
},{"./utils":23,"events":27}],20:[function(_dereq_,module,exports){
(function (global){
"use strict";
var PouchDB = _dereq_("./constructor");
var utils = _dereq_('./utils');
var Promise = utils.Promise;
var EventEmitter = _dereq_('events').EventEmitter;
PouchDB.adapters = {};
PouchDB.preferredAdapters = _dereq_('./adapters/preferredAdapters.js');
PouchDB.prefix = '_pouch_';
var eventEmitter = new EventEmitter();
var eventEmitterMethods = [
'on',
'addListener',
'emit',
'listeners',
'once',
'removeAllListeners',
'removeListener',
'setMaxListeners'
];
eventEmitterMethods.forEach(function (method) {
PouchDB[method] = eventEmitter[method].bind(eventEmitter);
});
PouchDB.setMaxListeners(0);
PouchDB.parseAdapter = function (name, opts) {
var match = name.match(/([a-z\-]*):\/\/(.*)/);
var adapter, adapterName;
if (match) {
// the http adapter expects the fully qualified name
name = /http(s?)/.test(match[1]) ? match[1] + '://' + match[2] : match[2];
adapter = match[1];
if (!PouchDB.adapters[adapter].valid()) {
throw 'Invalid adapter';
}
return {name: name, adapter: match[1]};
}
// check for browsers that have been upgraded from websql-only to websql+idb
var skipIdb = 'idb' in PouchDB.adapters && 'websql' in PouchDB.adapters &&
utils.hasLocalStorage() &&
global.localStorage['_pouch__websqldb_' + PouchDB.prefix + name];
if (typeof opts !== 'undefined' && opts.db) {
adapterName = 'leveldb';
} else {
for (var i = 0; i < PouchDB.preferredAdapters.length; ++i) {
adapterName = PouchDB.preferredAdapters[i];
if (adapterName in PouchDB.adapters) {
if (skipIdb && adapterName === 'idb') {
continue; // keep using websql to avoid user data loss
}
break;
}
}
}
adapter = PouchDB.adapters[adapterName];
if (adapterName && adapter) {
var use_prefix = 'use_prefix' in adapter ? adapter.use_prefix : true;
return {
name: use_prefix ? PouchDB.prefix + name : name,
adapter: adapterName
};
}
throw 'No valid adapter found';
};
PouchDB.destroy = utils.toPromise(function (name, opts, callback) {
if (typeof opts === 'function' || typeof opts === 'undefined') {
callback = opts;
opts = {};
}
if (name && typeof name === 'object') {
opts = name;
name = undefined;
}
var backend = PouchDB.parseAdapter(opts.name || name, opts);
var dbName = backend.name;
var adapter = PouchDB.adapters[backend.adapter];
var usePrefix = 'use_prefix' in adapter ? adapter.use_prefix : true;
var baseName = usePrefix ?
dbName.replace(new RegExp('^' + PouchDB.prefix), '') : dbName;
var fullName = (backend.adapter === 'http' || backend.adapter === 'https' ?
'' : (opts.prefix || '')) + dbName;
function destroyDb() {
// call destroy method of the particular adaptor
adapter.destroy(fullName, opts, function (err, resp) {
if (err) {
callback(err);
} else {
PouchDB.emit('destroyed', name);
//so we don't have to sift through all dbnames
PouchDB.emit(name, 'destroyed');
callback(null, resp || { 'ok': true });
}
});
}
var createOpts = utils.extend(true, {}, opts, {adapter : backend.adapter});
new PouchDB(baseName, createOpts, function (err, db) {
if (err) {
return callback(err);
}
db.get('_local/_pouch_dependentDbs', function (err, localDoc) {
if (err) {
if (err.status !== 404) {
return callback(err);
} else { // no dependencies
return destroyDb();
}
}
var dependentDbs = localDoc.dependentDbs;
var deletedMap = Object.keys(dependentDbs).map(function (name) {
var trueName = usePrefix ?
name.replace(new RegExp('^' + PouchDB.prefix), '') : name;
var subOpts = utils.extend(true, opts, {adapter: backend.adapter});
return PouchDB.destroy(trueName, subOpts);
});
Promise.all(deletedMap).then(destroyDb, function (error) {
callback(error);
});
});
});
});
PouchDB.allDbs = utils.toPromise(function (callback) {
var err = new Error('allDbs method removed');
err.stats = '400';
callback(err);
});
PouchDB.adapter = function (id, obj) {
if (obj.valid()) {
PouchDB.adapters[id] = obj;
}
};
PouchDB.plugin = function (obj) {
Object.keys(obj).forEach(function (id) {
PouchDB.prototype[id] = obj[id];
});
};
PouchDB.defaults = function (defaultOpts) {
function PouchAlt(name, opts, callback) {
if (typeof opts === 'function' || typeof opts === 'undefined') {
callback = opts;
opts = {};
}
if (name && typeof name === 'object') {
opts = name;
name = undefined;
}
opts = utils.extend(true, {}, defaultOpts, opts);
PouchDB.call(this, name, opts, callback);
}
utils.inherits(PouchAlt, PouchDB);
PouchAlt.destroy = utils.toPromise(function (name, opts, callback) {
if (typeof opts === 'function' || typeof opts === 'undefined') {
callback = opts;
opts = {};
}
if (name && typeof name === 'object') {
opts = name;
name = undefined;
}
opts = utils.extend(true, {}, defaultOpts, opts);
return PouchDB.destroy(name, opts, callback);
});
eventEmitterMethods.forEach(function (method) {
PouchAlt[method] = eventEmitter[method].bind(eventEmitter);
});
PouchAlt.setMaxListeners(0);
PouchAlt.preferredAdapters = PouchDB.preferredAdapters.slice();
Object.keys(PouchDB).forEach(function (key) {
if (!(key in PouchAlt)) {
PouchAlt[key] = PouchDB[key];
}
});
return PouchAlt;
};
module.exports = PouchDB;
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./adapters/preferredAdapters.js":4,"./constructor":7,"./utils":23,"events":27}],21:[function(_dereq_,module,exports){
'use strict';
var utils = _dereq_('./utils');
var replication = _dereq_('./replicate');
var replicate = replication.replicate;
var EE = _dereq_('events').EventEmitter;
utils.inherits(Sync, EE);
module.exports = sync;
function sync(src, target, opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
if (typeof opts === 'undefined') {
opts = {};
}
opts = utils.clone(opts);
/*jshint validthis:true */
opts.PouchConstructor = opts.PouchConstructor || this;
src = replication.toPouch(src, opts);
target = replication.toPouch(target, opts);
return new Sync(src, target, opts, callback);
}
function Sync(src, target, opts, callback) {
var self = this;
this.canceled = false;
var onChange, complete;
if ('onChange' in opts) {
onChange = opts.onChange;
delete opts.onChange;
}
if (typeof callback === 'function' && !opts.complete) {
complete = callback;
} else if ('complete' in opts) {
complete = opts.complete;
delete opts.complete;
}
this.push = replicate(src, target, opts);
this.pull = replicate(target, src, opts);
var emittedCancel = false;
function onCancel(data) {
if (!emittedCancel) {
emittedCancel = true;
self.emit('cancel', data);
}
}
function pullChange(change) {
self.emit('change', {
direction: 'pull',
change: change
});
}
function pushChange(change) {
self.emit('change', {
direction: 'push',
change: change
});
}
var listeners = {};
var removed = {};
function removeAll(type) { // type is 'push' or 'pull'
return function (event, func) {
var isChange = event === 'change' &&
(func === pullChange || func === pushChange);
var isCancel = event === 'cancel' && func === onCancel;
var isOtherEvent = event in listeners && func === listeners[event];
if (isChange || isCancel || isOtherEvent) {
if (!(event in removed)) {
removed[event] = {};
}
removed[event][type] = true;
if (Object.keys(removed[event]).length === 2) {
// both push and pull have asked to be removed
self.removeAllListeners(event);
}
}
};
}
this.on('newListener', function (event) {
if (event === 'change') {
self.pull.on('change', pullChange);
self.push.on('change', pushChange);
} else if (event === 'cancel') {
self.pull.on('cancel', onCancel);
self.push.on('cancel', onCancel);
} else if (event !== 'error' &&
event !== 'removeListener' &&
event !== 'complete' && !(event in listeners)) {
listeners[event] = function (e) {
self.emit(event, e);
};
self.pull.on(event, listeners[event]);
self.push.on(event, listeners[event]);
}
});
this.on('removeListener', function (event) {
if (event === 'change') {
self.pull.removeListener('change', pullChange);
self.push.removeListener('change', pushChange);
} else if (event === 'cancel') {
self.pull.removeListener('cancel', onCancel);
self.push.removeListener('cancel', onCancel);
} else if (event in listeners) {
if (typeof listeners[event] === 'function') {
self.pull.removeListener(event, listeners[event]);
self.push.removeListener(event, listeners[event]);
delete listeners[event];
}
}
});
this.pull.on('removeListener', removeAll('pull'));
this.push.on('removeListener', removeAll('push'));
var promise = utils.Promise.all([
this.push,
this.pull
]).then(function (resp) {
var out = {
push: resp[0],
pull: resp[1]
};
self.emit('complete', out);
if (complete) {
complete(null, out);
}
self.removeAllListeners();
return out;
}, function (err) {
self.cancel();
self.emit('error', err);
if (complete) {
complete(err);
}
self.removeAllListeners();
throw err;
});
this.then = function (success, err) {
return promise.then(success, err);
};
this["catch"] = function (err) {
return promise["catch"](err);
};
}
Sync.prototype.cancel = function () {
if (!this.canceled) {
this.canceled = true;
this.push.cancel();
this.pull.cancel();
}
};
},{"./replicate":19,"./utils":23,"events":27}],22:[function(_dereq_,module,exports){
'use strict';
module.exports = TaskQueue;
function TaskQueue() {
this.isReady = false;
this.failed = false;
this.queue = [];
}
TaskQueue.prototype.execute = function () {
var d, func;
if (this.failed) {
while ((d = this.queue.shift())) {
if (typeof d === 'function') {
d(this.failed);
continue;
}
func = d.parameters[d.parameters.length - 1];
if (typeof func === 'function') {
func(this.failed);
} else if (d.name === 'changes' && typeof func.complete === 'function') {
func.complete(this.failed);
}
}
} else if (this.isReady) {
while ((d = this.queue.shift())) {
if (typeof d === 'function') {
d();
} else {
d.task = this.db[d.name].apply(this.db, d.parameters);
}
}
}
};
TaskQueue.prototype.fail = function (err) {
this.failed = err;
this.execute();
};
TaskQueue.prototype.ready = function (db) {
if (this.failed) {
return false;
} else if (arguments.length === 0) {
return this.isReady;
}
this.isReady = db ? true: false;
this.db = db;
this.execute();
};
TaskQueue.prototype.addTask = function (name, parameters) {
if (typeof name === 'function') {
this.queue.push(name);
if (this.failed) {
this.execute();
}
} else {
var task = { name: name, parameters: parameters };
this.queue.push(task);
if (this.failed) {
this.execute();
}
return task;
}
};
},{}],23:[function(_dereq_,module,exports){
(function (process,global){
/*jshint strict: false */
/*global chrome */
var merge = _dereq_('./merge');
exports.extend = _dereq_('pouchdb-extend');
exports.ajax = _dereq_('./deps/ajax');
exports.createBlob = _dereq_('./deps/blob');
exports.uuid = _dereq_('./deps/uuid');
exports.getArguments = _dereq_('argsarray');
var buffer = _dereq_('./deps/buffer');
var errors = _dereq_('./deps/errors');
var EventEmitter = _dereq_('events').EventEmitter;
var collections = _dereq_('./deps/collections');
exports.Map = collections.Map;
exports.Set = collections.Set;
if (typeof global.Promise === 'function') {
exports.Promise = global.Promise;
} else {
exports.Promise = _dereq_('bluebird');
}
var Promise = exports.Promise;
function toObject(array) {
var obj = {};
array.forEach(function (item) { obj[item] = true; });
return obj;
}
// List of top level reserved words for doc
var reservedWords = toObject([
'_id',
'_rev',
'_attachments',
'_deleted',
'_revisions',
'_revs_info',
'_conflicts',
'_deleted_conflicts',
'_local_seq',
'_rev_tree',
//replication documents
'_replication_id',
'_replication_state',
'_replication_state_time',
'_replication_state_reason',
'_replication_stats'
]);
// List of reserved words that should end up the document
var dataWords = toObject([
'_attachments',
//replication documents
'_replication_id',
'_replication_state',
'_replication_state_time',
'_replication_state_reason',
'_replication_stats'
]);
exports.clone = function (obj) {
return exports.extend(true, {}, obj);
};
exports.inherits = _dereq_('inherits');
// Determine id an ID is valid
// - invalid IDs begin with an underescore that does not begin '_design' or
// '_local'
// - any other string value is a valid id
// Returns the specific error object for each case
exports.invalidIdError = function (id) {
var err;
if (!id) {
err = new TypeError(errors.MISSING_ID.message);
err.status = 412;
} else if (typeof id !== 'string') {
err = new TypeError(errors.INVALID_ID.message);
err.status = 400;
} else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) {
err = new TypeError(errors.RESERVED_ID.message);
err.status = 400;
}
if (err) {
throw err;
}
};
function isChromeApp() {
return (typeof chrome !== "undefined" &&
typeof chrome.storage !== "undefined" &&
typeof chrome.storage.local !== "undefined");
}
// Pretty dumb name for a function, just wraps callback calls so we dont
// to if (callback) callback() everywhere
exports.call = exports.getArguments(function (args) {
if (!args.length) {
return;
}
var fun = args.shift();
if (typeof fun === 'function') {
fun.apply(this, args);
}
});
exports.isLocalId = function (id) {
return (/^_local/).test(id);
};
// check if a specific revision of a doc has been deleted
// - metadata: the metadata object from the doc store
// - rev: (optional) the revision to check. defaults to winning revision
exports.isDeleted = function (metadata, rev) {
if (!rev) {
rev = merge.winningRev(metadata);
}
var dashIndex = rev.indexOf('-');
if (dashIndex !== -1) {
rev = rev.substring(dashIndex + 1);
}
var deleted = false;
merge.traverseRevTree(metadata.rev_tree,
function (isLeaf, pos, id, acc, opts) {
if (id === rev) {
deleted = !!opts.deleted;
}
});
return deleted;
};
exports.filterChange = function (opts) {
return function (change) {
var req = {};
var hasFilter = opts.filter && typeof opts.filter === 'function';
req.query = opts.query_params;
if (opts.filter && hasFilter && !opts.filter.call(this, change.doc, req)) {
return false;
}
if (opts.doc_ids && opts.doc_ids.indexOf(change.id) === -1) {
return false;
}
if (!opts.include_docs) {
delete change.doc;
} else {
for (var att in change.doc._attachments) {
if (change.doc._attachments.hasOwnProperty(att)) {
change.doc._attachments[att].stub = true;
}
}
}
return true;
};
};
// Preprocess documents, parse their revisions, assign an id and a
// revision for new writes that are missing them, etc
exports.parseDoc = function (doc, newEdits) {
var nRevNum;
var newRevId;
var revInfo;
var error;
var opts = {status: 'available'};
if (doc._deleted) {
opts.deleted = true;
}
if (newEdits) {
if (!doc._id) {
doc._id = exports.uuid();
}
newRevId = exports.uuid(32, 16).toLowerCase();
if (doc._rev) {
revInfo = /^(\d+)-(.+)$/.exec(doc._rev);
if (!revInfo) {
var err = new TypeError("invalid value for property '_rev'");
err.status = 400;
}
doc._rev_tree = [{
pos: parseInt(revInfo[1], 10),
ids: [revInfo[2], {status: 'missing'}, [[newRevId, opts, []]]]
}];
nRevNum = parseInt(revInfo[1], 10) + 1;
} else {
doc._rev_tree = [{
pos: 1,
ids : [newRevId, opts, []]
}];
nRevNum = 1;
}
} else {
if (doc._revisions) {
doc._rev_tree = [{
pos: doc._revisions.start - doc._revisions.ids.length + 1,
ids: doc._revisions.ids.reduce(function (acc, x) {
if (acc === null) {
return [x, opts, []];
} else {
return [x, {status: 'missing'}, [acc]];
}
}, null)
}];
nRevNum = doc._revisions.start;
newRevId = doc._revisions.ids[0];
}
if (!doc._rev_tree) {
revInfo = /^(\d+)-(.+)$/.exec(doc._rev);
if (!revInfo) {
error = new TypeError(errors.BAD_ARG.message);
error.status = errors.BAD_ARG.status;
throw error;
}
nRevNum = parseInt(revInfo[1], 10);
newRevId = revInfo[2];
doc._rev_tree = [{
pos: parseInt(revInfo[1], 10),
ids: [revInfo[2], opts, []]
}];
}
}
exports.invalidIdError(doc._id);
doc._rev = [nRevNum, newRevId].join('-');
var result = {metadata : {}, data : {}};
for (var key in doc) {
if (doc.hasOwnProperty(key)) {
var specialKey = key[0] === '_';
if (specialKey && !reservedWords[key]) {
error = new Error(errors.DOC_VALIDATION.message + ': ' + key);
error.status = errors.DOC_VALIDATION.status;
throw error;
} else if (specialKey && !dataWords[key]) {
result.metadata[key.slice(1)] = doc[key];
} else {
result.data[key] = doc[key];
}
}
}
return result;
};
exports.isCordova = function () {
return (typeof cordova !== "undefined" ||
typeof PhoneGap !== "undefined" ||
typeof phonegap !== "undefined");
};
exports.hasLocalStorage = function () {
if (isChromeApp()) {
return false;
}
try {
return global.localStorage;
} catch (e) {
return false;
}
};
exports.Changes = Changes;
exports.inherits(Changes, EventEmitter);
function Changes() {
if (!(this instanceof Changes)) {
return new Changes();
}
var self = this;
EventEmitter.call(this);
this.isChrome = isChromeApp();
this.listeners = {};
this.hasLocal = false;
if (!this.isChrome) {
this.hasLocal = exports.hasLocalStorage();
}
if (this.isChrome) {
chrome.storage.onChanged.addListener(function (e) {
// make sure it's event addressed to us
if (e.db_name != null) {
//object only has oldValue, newValue members
self.emit(e.dbName.newValue);
}
});
} else if (this.hasLocal) {
if (global.addEventListener) {
global.addEventListener("storage", function (e) {
self.emit(e.key);
});
} else {
global.attachEvent("storage", function (e) {
self.emit(e.key);
});
}
}
}
Changes.prototype.addListener = function (dbName, id, db, opts) {
if (this.listeners[id]) {
return;
}
function eventFunction() {
db.changes({
include_docs: opts.include_docs,
conflicts: opts.conflicts,
continuous: false,
descending: false,
filter: opts.filter,
view: opts.view,
since: opts.since,
query_params: opts.query_params,
onChange: function (c) {
if (c.seq > opts.since && !opts.cancelled) {
opts.since = c.seq;
exports.call(opts.onChange, c);
}
}
});
}
this.listeners[id] = eventFunction;
this.on(dbName, eventFunction);
};
Changes.prototype.removeListener = function (dbName, id) {
if (!(id in this.listeners)) {
return;
}
EventEmitter.prototype.removeListener.call(this, dbName,
this.listeners[id]);
};
Changes.prototype.notifyLocalWindows = function (dbName) {
//do a useless change on a storage thing
//in order to get other windows's listeners to activate
if (this.isChrome) {
chrome.storage.local.set({dbName: dbName});
} else if (this.hasLocal) {
localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a";
}
};
Changes.prototype.notify = function (dbName) {
this.emit(dbName);
this.notifyLocalWindows(dbName);
};
if (!process.browser || !('atob' in global)) {
exports.atob = function (str) {
var base64 = new buffer(str, 'base64');
// Node.js will just skip the characters it can't encode instead of
// throwing and exception
if (base64.toString('base64') !== str) {
throw ("Cannot base64 encode full string");
}
return base64.toString('binary');
};
} else {
exports.atob = function (str) {
return atob(str);
};
}
if (!process.browser || !('btoa' in global)) {
exports.btoa = function (str) {
return new buffer(str, 'binary').toString('base64');
};
} else {
exports.btoa = function (str) {
return btoa(str);
};
}
// From http://stackoverflow.com/questions/14967647/ (continues on next line)
// encode-decode-image-with-base64-breaks-image (2013-04-21)
exports.fixBinary = function (bin) {
if (!process.browser) {
// don't need to do this in Node
return bin;
}
var length = bin.length;
var buf = new ArrayBuffer(length);
var arr = new Uint8Array(buf);
for (var i = 0; i < length; i++) {
arr[i] = bin.charCodeAt(i);
}
return buf;
};
exports.once = function (fun) {
var called = false;
return exports.getArguments(function (args) {
if (called) {
if (typeof console.trace === 'function') {
console.trace();
}
throw new Error('once called more than once');
} else {
called = true;
fun.apply(this, args);
}
});
};
exports.toPromise = function (func) {
//create the function we will be returning
return exports.getArguments(function (args) {
var self = this;
var tempCB =
(typeof args[args.length - 1] === 'function') ? args.pop() : false;
// if the last argument is a function, assume its a callback
var usedCB;
if (tempCB) {
// if it was a callback, create a new callback which calls it,
// but do so async so we don't trap any errors
usedCB = function (err, resp) {
process.nextTick(function () {
tempCB(err, resp);
});
};
}
var promise = new Promise(function (fulfill, reject) {
var resp;
try {
var callback = exports.once(function (err, mesg) {
if (err) {
reject(err);
} else {
fulfill(mesg);
}
});
// create a callback for this invocation
// apply the function in the orig context
args.push(callback);
resp = func.apply(self, args);
if (resp && typeof resp.then === 'function') {
fulfill(resp);
}
} catch (e) {
reject(e);
}
});
// if there is a callback, call it back
if (usedCB) {
promise.then(function (result) {
usedCB(null, result);
}, usedCB);
}
promise.cancel = function () {
return this;
};
return promise;
});
};
exports.adapterFun = function (name, callback) {
return exports.toPromise(exports.getArguments(function (args) {
if (this._closed) {
return Promise.reject(new Error('database is closed'));
}
var self = this;
if (!this.taskqueue.isReady) {
return new exports.Promise(function (fulfill, reject) {
self.taskqueue.addTask(function (failed) {
if (failed) {
reject(failed);
} else {
fulfill(self[name].apply(self, args));
}
});
});
}
return callback.apply(this, args);
}));
};
//Can't find original post, but this is close
//http://stackoverflow.com/questions/6965107/ (continues on next line)
//converting-between-strings-and-arraybuffers
exports.arrayBufferToBinaryString = function (buffer) {
var binary = "";
var bytes = new Uint8Array(buffer);
var length = bytes.byteLength;
for (var i = 0; i < length; i++) {
binary += String.fromCharCode(bytes[i]);
}
return binary;
};
exports.cancellableFun = function (fun, self, opts) {
opts = opts ? exports.clone(true, {}, opts) : {};
var emitter = new EventEmitter();
var oldComplete = opts.complete || function () { };
var complete = opts.complete = exports.once(function (err, resp) {
if (err) {
oldComplete(err);
} else {
emitter.emit('end', resp);
oldComplete(null, resp);
}
emitter.removeAllListeners();
});
var oldOnChange = opts.onChange || function () {};
var lastChange = 0;
self.on('destroyed', function () {
emitter.removeAllListeners();
});
opts.onChange = function (change) {
oldOnChange(change);
if (change.seq <= lastChange) {
return;
}
lastChange = change.seq;
emitter.emit('change', change);
if (change.deleted) {
emitter.emit('delete', change);
} else if (change.changes.length === 1 &&
change.changes[0].rev.slice(0, 1) === '1-') {
emitter.emit('create', change);
} else {
emitter.emit('update', change);
}
};
var promise = new Promise(function (fulfill, reject) {
opts.complete = function (err, res) {
if (err) {
reject(err);
} else {
fulfill(res);
}
};
});
promise.then(function (result) {
complete(null, result);
}, complete);
// this needs to be overwridden by caller, dont fire complete until
// the task is ready
promise.cancel = function () {
promise.isCancelled = true;
if (self.taskqueue.isReady) {
opts.complete(null, {status: 'cancelled'});
}
};
if (!self.taskqueue.isReady) {
self.taskqueue.addTask(function () {
if (promise.isCancelled) {
opts.complete(null, {status: 'cancelled'});
} else {
fun(self, opts, promise);
}
});
} else {
fun(self, opts, promise);
}
promise.on = emitter.on.bind(emitter);
promise.once = emitter.once.bind(emitter);
promise.addListener = emitter.addListener.bind(emitter);
promise.removeListener = emitter.removeListener.bind(emitter);
promise.removeAllListeners = emitter.removeAllListeners.bind(emitter);
promise.setMaxListeners = emitter.setMaxListeners.bind(emitter);
promise.listeners = emitter.listeners.bind(emitter);
promise.emit = emitter.emit.bind(emitter);
return promise;
};
exports.MD5 = exports.toPromise(_dereq_('./deps/md5'));
}).call(this,_dereq_("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./deps/ajax":8,"./deps/blob":9,"./deps/buffer":26,"./deps/collections":10,"./deps/errors":11,"./deps/md5":12,"./deps/uuid":14,"./merge":18,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":28,"argsarray":25,"bluebird":33,"events":27,"inherits":29,"pouchdb-extend":48}],24:[function(_dereq_,module,exports){
module.exports = "3.0.7-prerelease";
},{}],25:[function(_dereq_,module,exports){
'use strict';
module.exports = argsArray;
function argsArray(fun) {
return function () {
var len = arguments.length;
if (len) {
var args = [];
var i = -1;
while (++i < len) {
args[i] = arguments[i];
}
return fun.call(this, args);
} else {
return fun.call(this, []);
}
};
}
},{}],26:[function(_dereq_,module,exports){
},{}],27:[function(_dereq_,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
function EventEmitter() {
this._events = this._events || {};
this._maxListeners = this._maxListeners || undefined;
}
module.exports = EventEmitter;
// Backwards-compat with node 0.10.x
EventEmitter.EventEmitter = EventEmitter;
EventEmitter.prototype._events = undefined;
EventEmitter.prototype._maxListeners = undefined;
// By default EventEmitters will print a warning if more than 10 listeners are
// added to it. This is a useful default which helps finding memory leaks.
EventEmitter.defaultMaxListeners = 10;
// Obviously not all Emitters should be limited to 10. This function allows
// that to be increased. Set to zero for unlimited.
EventEmitter.prototype.setMaxListeners = function(n) {
if (!isNumber(n) || n < 0 || isNaN(n))
throw TypeError('n must be a positive number');
this._maxListeners = n;
return this;
};
EventEmitter.prototype.emit = function(type) {
var er, handler, len, args, i, listeners;
if (!this._events)
this._events = {};
// If there is no 'error' event listener then throw.
if (type === 'error') {
if (!this._events.error ||
(isObject(this._events.error) && !this._events.error.length)) {
er = arguments[1];
if (er instanceof Error) {
throw er; // Unhandled 'error' event
}
throw TypeError('Uncaught, unspecified "error" event.');
}
}
handler = this._events[type];
if (isUndefined(handler))
return false;
if (isFunction(handler)) {
switch (arguments.length) {
// fast cases
case 1:
handler.call(this);
break;
case 2:
handler.call(this, arguments[1]);
break;
case 3:
handler.call(this, arguments[1], arguments[2]);
break;
// slower
default:
len = arguments.length;
args = new Array(len - 1);
for (i = 1; i < len; i++)
args[i - 1] = arguments[i];
handler.apply(this, args);
}
} else if (isObject(handler)) {
len = arguments.length;
args = new Array(len - 1);
for (i = 1; i < len; i++)
args[i - 1] = arguments[i];
listeners = handler.slice();
len = listeners.length;
for (i = 0; i < len; i++)
listeners[i].apply(this, args);
}
return true;
};
EventEmitter.prototype.addListener = function(type, listener) {
var m;
if (!isFunction(listener))
throw TypeError('listener must be a function');
if (!this._events)
this._events = {};
// To avoid recursion in the case that type === "newListener"! Before
// adding it to the listeners, first emit "newListener".
if (this._events.newListener)
this.emit('newListener', type,
isFunction(listener.listener) ?
listener.listener : listener);
if (!this._events[type])
// Optimize the case of one listener. Don't need the extra array object.
this._events[type] = listener;
else if (isObject(this._events[type]))
// If we've already got an array, just append.
this._events[type].push(listener);
else
// Adding the second element, need to change to array.
this._events[type] = [this._events[type], listener];
// Check for listener leak
if (isObject(this._events[type]) && !this._events[type].warned) {
var m;
if (!isUndefined(this._maxListeners)) {
m = this._maxListeners;
} else {
m = EventEmitter.defaultMaxListeners;
}
if (m && m > 0 && this._events[type].length > m) {
this._events[type].warned = true;
console.error('(node) warning: possible EventEmitter memory ' +
'leak detected. %d listeners added. ' +
'Use emitter.setMaxListeners() to increase limit.',
this._events[type].length);
if (typeof console.trace === 'function') {
// not supported in IE 10
console.trace();
}
}
}
return this;
};
EventEmitter.prototype.on = EventEmitter.prototype.addListener;
EventEmitter.prototype.once = function(type, listener) {
if (!isFunction(listener))
throw TypeError('listener must be a function');
var fired = false;
function g() {
this.removeListener(type, g);
if (!fired) {
fired = true;
listener.apply(this, arguments);
}
}
g.listener = listener;
this.on(type, g);
return this;
};
// emits a 'removeListener' event iff the listener was removed
EventEmitter.prototype.removeListener = function(type, listener) {
var list, position, length, i;
if (!isFunction(listener))
throw TypeError('listener must be a function');
if (!this._events || !this._events[type])
return this;
list = this._events[type];
length = list.length;
position = -1;
if (list === listener ||
(isFunction(list.listener) && list.listener === listener)) {
delete this._events[type];
if (this._events.removeListener)
this.emit('removeListener', type, listener);
} else if (isObject(list)) {
for (i = length; i-- > 0;) {
if (list[i] === listener ||
(list[i].listener && list[i].listener === listener)) {
position = i;
break;
}
}
if (position < 0)
return this;
if (list.length === 1) {
list.length = 0;
delete this._events[type];
} else {
list.splice(position, 1);
}
if (this._events.removeListener)
this.emit('removeListener', type, listener);
}
return this;
};
EventEmitter.prototype.removeAllListeners = function(type) {
var key, listeners;
if (!this._events)
return this;
// not listening for removeListener, no need to emit
if (!this._events.removeListener) {
if (arguments.length === 0)
this._events = {};
else if (this._events[type])
delete this._events[type];
return this;
}
// emit removeListener for all listeners on all events
if (arguments.length === 0) {
for (key in this._events) {
if (key === 'removeListener') continue;
this.removeAllListeners(key);
}
this.removeAllListeners('removeListener');
this._events = {};
return this;
}
listeners = this._events[type];
if (isFunction(listeners)) {
this.removeListener(type, listeners);
} else {
// LIFO order
while (listeners.length)
this.removeListener(type, listeners[listeners.length - 1]);
}
delete this._events[type];
return this;
};
EventEmitter.prototype.listeners = function(type) {
var ret;
if (!this._events || !this._events[type])
ret = [];
else if (isFunction(this._events[type]))
ret = [this._events[type]];
else
ret = this._events[type].slice();
return ret;
};
EventEmitter.listenerCount = function(emitter, type) {
var ret;
if (!emitter._events || !emitter._events[type])
ret = 0;
else if (isFunction(emitter._events[type]))
ret = 1;
else
ret = emitter._events[type].length;
return ret;
};
function isFunction(arg) {
return typeof arg === 'function';
}
function isNumber(arg) {
return typeof arg === 'number';
}
function isObject(arg) {
return typeof arg === 'object' && arg !== null;
}
function isUndefined(arg) {
return arg === void 0;
}
},{}],28:[function(_dereq_,module,exports){
// shim for using process in browser
var process = module.exports = {};
process.nextTick = (function () {
var canSetImmediate = typeof window !== 'undefined'
&& window.setImmediate;
var canPost = typeof window !== 'undefined'
&& window.postMessage && window.addEventListener
;
if (canSetImmediate) {
return function (f) { return window.setImmediate(f) };
}
if (canPost) {
var queue = [];
window.addEventListener('message', function (ev) {
var source = ev.source;
if ((source === window || source === null) && ev.data === 'process-tick') {
ev.stopPropagation();
if (queue.length > 0) {
var fn = queue.shift();
fn();
}
}
}, true);
return function nextTick(fn) {
queue.push(fn);
window.postMessage('process-tick', '*');
};
}
return function nextTick(fn) {
setTimeout(fn, 0);
};
})();
process.title = 'browser';
process.browser = true;
process.env = {};
process.argv = [];
process.binding = function (name) {
throw new Error('process.binding is not supported');
}
// TODO(shtylman)
process.cwd = function () { return '/' };
process.chdir = function (dir) {
throw new Error('process.chdir is not supported');
};
},{}],29:[function(_dereq_,module,exports){
if (typeof Object.create === 'function') {
// implementation from standard node.js 'util' module
module.exports = function inherits(ctor, superCtor) {
ctor.super_ = superCtor
ctor.prototype = Object.create(superCtor.prototype, {
constructor: {
value: ctor,
enumerable: false,
writable: true,
configurable: true
}
});
};
} else {
// old school shim for old browsers
module.exports = function inherits(ctor, superCtor) {
ctor.super_ = superCtor
var TempCtor = function () {}
TempCtor.prototype = superCtor.prototype
ctor.prototype = new TempCtor()
ctor.prototype.constructor = ctor
}
}
},{}],30:[function(_dereq_,module,exports){
'use strict';
module.exports = INTERNAL;
function INTERNAL() {}
},{}],31:[function(_dereq_,module,exports){
'use strict';
var Promise = _dereq_('./promise');
var reject = _dereq_('./reject');
var resolve = _dereq_('./resolve');
var INTERNAL = _dereq_('./INTERNAL');
var handlers = _dereq_('./handlers');
module.exports = all;
function all(iterable) {
if (Object.prototype.toString.call(iterable) !== '[object Array]') {
return reject(new TypeError('must be an array'));
}
var len = iterable.length;
var called = false;
if (!len) {
return resolve([]);
}
var values = new Array(len);
var resolved = 0;
var i = -1;
var promise = new Promise(INTERNAL);
while (++i < len) {
allResolver(iterable[i], i);
}
return promise;
function allResolver(value, i) {
resolve(value).then(resolveFromAll, function (error) {
if (!called) {
called = true;
handlers.reject(promise, error);
}
});
function resolveFromAll(outValue) {
values[i] = outValue;
if (++resolved === len & !called) {
called = true;
handlers.resolve(promise, values);
}
}
}
}
},{"./INTERNAL":30,"./handlers":32,"./promise":34,"./reject":37,"./resolve":38}],32:[function(_dereq_,module,exports){
'use strict';
var tryCatch = _dereq_('./tryCatch');
var resolveThenable = _dereq_('./resolveThenable');
var states = _dereq_('./states');
exports.resolve = function (self, value) {
var result = tryCatch(getThen, value);
if (result.status === 'error') {
return exports.reject(self, result.value);
}
var thenable = result.value;
if (thenable) {
resolveThenable.safely(self, thenable);
} else {
self.state = states.FULFILLED;
self.outcome = value;
var i = -1;
var len = self.queue.length;
while (++i < len) {
self.queue[i].callFulfilled(value);
}
}
return self;
};
exports.reject = function (self, error) {
self.state = states.REJECTED;
self.outcome = error;
var i = -1;
var len = self.queue.length;
while (++i < len) {
self.queue[i].callRejected(error);
}
return self;
};
function getThen(obj) {
// Make sure we only access the accessor once as required by the spec
var then = obj && obj.then;
if (obj && typeof obj === 'object' && typeof then === 'function') {
return function appyThen() {
then.apply(obj, arguments);
};
}
}
},{"./resolveThenable":39,"./states":40,"./tryCatch":41}],33:[function(_dereq_,module,exports){
module.exports = exports = _dereq_('./promise');
exports.resolve = _dereq_('./resolve');
exports.reject = _dereq_('./reject');
exports.all = _dereq_('./all');
exports.race = _dereq_('./race');
},{"./all":31,"./promise":34,"./race":36,"./reject":37,"./resolve":38}],34:[function(_dereq_,module,exports){
'use strict';
var unwrap = _dereq_('./unwrap');
var INTERNAL = _dereq_('./INTERNAL');
var resolveThenable = _dereq_('./resolveThenable');
var states = _dereq_('./states');
var QueueItem = _dereq_('./queueItem');
module.exports = Promise;
function Promise(resolver) {
if (!(this instanceof Promise)) {
return new Promise(resolver);
}
if (typeof resolver !== 'function') {
throw new TypeError('reslover must be a function');
}
this.state = states.PENDING;
this.queue = [];
this.outcome = void 0;
if (resolver !== INTERNAL) {
resolveThenable.safely(this, resolver);
}
}
Promise.prototype['catch'] = function (onRejected) {
return this.then(null, onRejected);
};
Promise.prototype.then = function (onFulfilled, onRejected) {
if (typeof onFulfilled !== 'function' && this.state === states.FULFILLED ||
typeof onRejected !== 'function' && this.state === states.REJECTED) {
return this;
}
var promise = new Promise(INTERNAL);
if (this.state !== states.PENDING) {
var resolver = this.state === states.FULFILLED ? onFulfilled: onRejected;
unwrap(promise, resolver, this.outcome);
} else {
this.queue.push(new QueueItem(promise, onFulfilled, onRejected));
}
return promise;
};
},{"./INTERNAL":30,"./queueItem":35,"./resolveThenable":39,"./states":40,"./unwrap":42}],35:[function(_dereq_,module,exports){
'use strict';
var handlers = _dereq_('./handlers');
var unwrap = _dereq_('./unwrap');
module.exports = QueueItem;
function QueueItem(promise, onFulfilled, onRejected) {
this.promise = promise;
if (typeof onFulfilled === 'function') {
this.onFulfilled = onFulfilled;
this.callFulfilled = this.otherCallFulfilled;
}
if (typeof onRejected === 'function') {
this.onRejected = onRejected;
this.callRejected = this.otherCallRejected;
}
}
QueueItem.prototype.callFulfilled = function (value) {
handlers.resolve(this.promise, value);
};
QueueItem.prototype.otherCallFulfilled = function (value) {
unwrap(this.promise, this.onFulfilled, value);
};
QueueItem.prototype.callRejected = function (value) {
handlers.reject(this.promise, value);
};
QueueItem.prototype.otherCallRejected = function (value) {
unwrap(this.promise, this.onRejected, value);
};
},{"./handlers":32,"./unwrap":42}],36:[function(_dereq_,module,exports){
'use strict';
var Promise = _dereq_('./promise');
var reject = _dereq_('./reject');
var resolve = _dereq_('./resolve');
var INTERNAL = _dereq_('./INTERNAL');
var handlers = _dereq_('./handlers');
module.exports = race;
function race(iterable) {
if (Object.prototype.toString.call(iterable) !== '[object Array]') {
return reject(new TypeError('must be an array'));
}
var len = iterable.length;
var called = false;
if (!len) {
return resolve([]);
}
var resolved = 0;
var i = -1;
var promise = new Promise(INTERNAL);
while (++i < len) {
resolver(iterable[i]);
}
return promise;
function resolver(value) {
resolve(value).then(function (response) {
if (!called) {
called = true;
handlers.resolve(promise, response);
}
}, function (error) {
if (!called) {
called = true;
handlers.reject(promise, error);
}
});
}
}
},{"./INTERNAL":30,"./handlers":32,"./promise":34,"./reject":37,"./resolve":38}],37:[function(_dereq_,module,exports){
'use strict';
var Promise = _dereq_('./promise');
var INTERNAL = _dereq_('./INTERNAL');
var handlers = _dereq_('./handlers');
module.exports = reject;
function reject(reason) {
var promise = new Promise(INTERNAL);
return handlers.reject(promise, reason);
}
},{"./INTERNAL":30,"./handlers":32,"./promise":34}],38:[function(_dereq_,module,exports){
'use strict';
var Promise = _dereq_('./promise');
var INTERNAL = _dereq_('./INTERNAL');
var handlers = _dereq_('./handlers');
module.exports = resolve;
var FALSE = handlers.resolve(new Promise(INTERNAL), false);
var NULL = handlers.resolve(new Promise(INTERNAL), null);
var UNDEFINED = handlers.resolve(new Promise(INTERNAL), void 0);
var ZERO = handlers.resolve(new Promise(INTERNAL), 0);
var EMPTYSTRING = handlers.resolve(new Promise(INTERNAL), '');
function resolve(value) {
if (value) {
if (value instanceof Promise) {
return value;
}
return handlers.resolve(new Promise(INTERNAL), value);
}
var valueType = typeof value;
switch (valueType) {
case 'boolean':
return FALSE;
case 'undefined':
return UNDEFINED;
case 'object':
return NULL;
case 'number':
return ZERO;
case 'string':
return EMPTYSTRING;
}
}
},{"./INTERNAL":30,"./handlers":32,"./promise":34}],39:[function(_dereq_,module,exports){
'use strict';
var handlers = _dereq_('./handlers');
var tryCatch = _dereq_('./tryCatch');
function safelyResolveThenable(self, thenable) {
// Either fulfill, reject or reject with error
var called = false;
function onError(value) {
if (called) {
return;
}
called = true;
handlers.reject(self, value);
}
function onSuccess(value) {
if (called) {
return;
}
called = true;
handlers.resolve(self, value);
}
function tryToUnwrap() {
thenable(onSuccess, onError);
}
var result = tryCatch(tryToUnwrap);
if (result.status === 'error') {
onError(result.value);
}
}
exports.safely = safelyResolveThenable;
},{"./handlers":32,"./tryCatch":41}],40:[function(_dereq_,module,exports){
// Lazy man's symbols for states
exports.REJECTED = ['REJECTED'];
exports.FULFILLED = ['FULFILLED'];
exports.PENDING = ['PENDING'];
},{}],41:[function(_dereq_,module,exports){
'use strict';
module.exports = tryCatch;
function tryCatch(func, value) {
var out = {};
try {
out.value = func(value);
out.status = 'success';
} catch (e) {
out.status = 'error';
out.value = e;
}
return out;
}
},{}],42:[function(_dereq_,module,exports){
'use strict';
var immediate = _dereq_('immediate');
var handlers = _dereq_('./handlers');
module.exports = unwrap;
function unwrap(promise, func, value) {
immediate(function () {
var returnValue;
try {
returnValue = func(value);
} catch (e) {
return handlers.reject(promise, e);
}
if (returnValue === promise) {
handlers.reject(promise, new TypeError('Cannot resolve promise with itself'));
} else {
handlers.resolve(promise, returnValue);
}
});
}
},{"./handlers":32,"immediate":43}],43:[function(_dereq_,module,exports){
'use strict';
var types = [
_dereq_('./nextTick'),
_dereq_('./mutation.js'),
_dereq_('./messageChannel'),
_dereq_('./stateChange'),
_dereq_('./timeout')
];
var draining;
var queue = [];
function drainQueue() {
draining = true;
var i, oldQueue;
var len = queue.length;
while (len) {
oldQueue = queue;
queue = [];
i = -1;
while (++i < len) {
oldQueue[i]();
}
len = queue.length;
}
draining = false;
}
var scheduleDrain;
var i = -1;
var len = types.length;
while (++ i < len) {
if (types[i] && types[i].test && types[i].test()) {
scheduleDrain = types[i].install(drainQueue);
break;
}
}
module.exports = immediate;
function immediate(task) {
if (queue.push(task) === 1 && !draining) {
scheduleDrain();
}
}
},{"./messageChannel":44,"./mutation.js":45,"./nextTick":26,"./stateChange":46,"./timeout":47}],44:[function(_dereq_,module,exports){
(function (global){
'use strict';
exports.test = function () {
if (global.setImmediate) {
// we can only get here in IE10
// which doesn't handel postMessage well
return false;
}
return typeof global.MessageChannel !== 'undefined';
};
exports.install = function (func) {
var channel = new global.MessageChannel();
channel.port1.onmessage = func;
return function () {
channel.port2.postMessage(0);
};
};
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],45:[function(_dereq_,module,exports){
(function (global){
'use strict';
//based off rsvp https://github.com/tildeio/rsvp.js
//license https://github.com/tildeio/rsvp.js/blob/master/LICENSE
//https://github.com/tildeio/rsvp.js/blob/master/lib/rsvp/asap.js
var Mutation = global.MutationObserver || global.WebKitMutationObserver;
exports.test = function () {
return Mutation;
};
exports.install = function (handle) {
var called = 0;
var observer = new Mutation(handle);
var element = global.document.createTextNode('');
observer.observe(element, {
characterData: true
});
return function () {
element.data = (called = ++called % 2);
};
};
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],46:[function(_dereq_,module,exports){
(function (global){
'use strict';
exports.test = function () {
return 'document' in global && 'onreadystatechange' in global.document.createElement('script');
};
exports.install = function (handle) {
return function () {
// Create a <script> element; its readystatechange event will be fired asynchronously once it is inserted
// into the document. Do so, thus queuing up the task. Remember to clean up once it's been called.
var scriptEl = global.document.createElement('script');
scriptEl.onreadystatechange = function () {
handle();
scriptEl.onreadystatechange = null;
scriptEl.parentNode.removeChild(scriptEl);
scriptEl = null;
};
global.document.documentElement.appendChild(scriptEl);
return handle;
};
};
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],47:[function(_dereq_,module,exports){
'use strict';
exports.test = function () {
return true;
};
exports.install = function (t) {
return function () {
setTimeout(t, 0);
};
};
},{}],48:[function(_dereq_,module,exports){
"use strict";
// Extends method
// (taken from http://code.jquery.com/jquery-1.9.0.js)
// Populate the class2type map
var class2type = {};
var types = [
"Boolean", "Number", "String", "Function", "Array",
"Date", "RegExp", "Object", "Error"
];
for (var i = 0; i < types.length; i++) {
var typename = types[i];
class2type["[object " + typename + "]"] = typename.toLowerCase();
}
var core_toString = class2type.toString;
var core_hasOwn = class2type.hasOwnProperty;
function type(obj) {
if (obj === null) {
return String(obj);
}
return typeof obj === "object" || typeof obj === "function" ?
class2type[core_toString.call(obj)] || "object" :
typeof obj;
}
function isWindow(obj) {
return obj !== null && obj === obj.window;
}
function isPlainObject(obj) {
// Must be an Object.
// Because of IE, we also have to check the presence of
// the constructor property.
// Make sure that DOM nodes and window objects don't pass through, as well
if (!obj || type(obj) !== "object" || obj.nodeType || isWindow(obj)) {
return false;
}
try {
// Not own constructor property must be Object
if (obj.constructor &&
!core_hasOwn.call(obj, "constructor") &&
!core_hasOwn.call(obj.constructor.prototype, "isPrototypeOf")) {
return false;
}
} catch ( e ) {
// IE8,9 Will throw exceptions on certain host objects #9897
return false;
}
// Own properties are enumerated firstly, so to speed up,
// if last one is own, then all properties are own.
var key;
for (key in obj) {}
return key === undefined || core_hasOwn.call(obj, key);
}
function isFunction(obj) {
return type(obj) === "function";
}
var isArray = Array.isArray || function (obj) {
return type(obj) === "array";
};
function extend() {
// originally extend() was recursive, but this ended up giving us
// "call stack exceeded", so it's been unrolled to use a literal stack
// (see https://github.com/pouchdb/pouchdb/issues/2543)
var stack = [];
var i = -1;
var len = arguments.length;
var args = new Array(len);
while (++i < len) {
args[i] = arguments[i];
}
var container = {};
stack.push({args: args, result: {container: container, key: 'key'}});
var next;
while ((next = stack.pop())) {
extendInner(stack, next.args, next.result);
}
return container.key;
}
function extendInner(stack, args, result) {
var options, name, src, copy, copyIsArray, clone,
target = args[0] || {},
i = 1,
length = args.length,
deep = false,
numericStringRegex = /\d+/,
optionsIsArray;
// Handle a deep copy situation
if (typeof target === "boolean") {
deep = target;
target = args[1] || {};
// skip the boolean and the target
i = 2;
}
// Handle case when target is a string or something (possible in deep copy)
if (typeof target !== "object" && !isFunction(target)) {
target = {};
}
// extend jQuery itself if only one argument is passed
if (length === i) {
/* jshint validthis: true */
target = this;
--i;
}
for (; i < length; i++) {
// Only deal with non-null/undefined values
if ((options = args[i]) != null) {
optionsIsArray = isArray(options);
// Extend the base object
for (name in options) {
//if (options.hasOwnProperty(name)) {
if (!(name in Object.prototype)) {
if (optionsIsArray && !numericStringRegex.test(name)) {
continue;
}
src = target[name];
copy = options[name];
// Prevent never-ending loop
if (target === copy) {
continue;
}
// Recurse if we're merging plain objects or arrays
if (deep && copy && (isPlainObject(copy) ||
(copyIsArray = isArray(copy)))) {
if (copyIsArray) {
copyIsArray = false;
clone = src && isArray(src) ? src : [];
} else {
clone = src && isPlainObject(src) ? src : {};
}
// Never move original objects, clone them
stack.push({
args: [deep, clone, copy],
result: {
container: target,
key: name
}
});
// Don't bring in undefined values
} else if (copy !== undefined) {
if (!(isArray(options) && isFunction(copy))) {
target[name] = copy;
}
}
}
}
}
}
// "Return" the modified object by setting the key
// on the given container
result.container[result.key] = target;
}
module.exports = extend;
},{}],49:[function(_dereq_,module,exports){
'use strict';
var upsert = _dereq_('./upsert');
var utils = _dereq_('./utils');
var Promise = utils.Promise;
module.exports = function (opts) {
var sourceDB = opts.db;
var viewName = opts.viewName;
var mapFun = opts.map;
var reduceFun = opts.reduce;
var temporary = opts.temporary;
// the "undefined" part is for backwards compatibility
var viewSignature = mapFun.toString() + (reduceFun && reduceFun.toString()) +
'undefined';
if (!temporary && sourceDB._cachedViews) {
var cachedView = sourceDB._cachedViews[viewSignature];
if (cachedView) {
return Promise.resolve(cachedView);
}
}
return sourceDB.info().then(function (info) {
var depDbName = info.db_name + '-mrview-' +
(temporary ? 'temp' : utils.MD5(viewSignature));
// save the view name in the source PouchDB so it can be cleaned up if necessary
// (e.g. when the _design doc is deleted, remove all associated view data)
function diffFunction(doc) {
doc.views = doc.views || {};
var fullViewName = viewName;
if (fullViewName.indexOf('/') === -1) {
fullViewName = viewName + '/' + viewName;
}
var depDbs = doc.views[fullViewName] = doc.views[fullViewName] || {};
/* istanbul ignore if */
if (depDbs[depDbName]) {
return; // no update necessary
}
depDbs[depDbName] = true;
return doc;
}
return upsert(sourceDB, '_local/mrviews', diffFunction).then(function () {
return sourceDB.registerDependentDatabase(depDbName).then(function (res) {
var db = res.db;
db.auto_compaction = true;
var view = {
name: depDbName,
db: db,
sourceDB: sourceDB,
adapter: sourceDB.adapter,
mapFun: mapFun,
reduceFun: reduceFun
};
return view.db.get('_local/lastSeq')["catch"](function (err) {
/* istanbul ignore if */
if (err.status !== 404) {
throw err;
}
}).then(function (lastSeqDoc) {
view.seq = lastSeqDoc ? lastSeqDoc.seq : 0;
if (!temporary) {
sourceDB._cachedViews = sourceDB._cachedViews || {};
sourceDB._cachedViews[viewSignature] = view;
view.db.on('destroyed', function () {
delete sourceDB._cachedViews[viewSignature];
});
}
return view;
});
});
});
});
};
},{"./upsert":55,"./utils":56}],50:[function(_dereq_,module,exports){
'use strict';
module.exports = function (func, emit, sum, log, isArray, toJSON) {
/*jshint evil:true,unused:false */
return eval("'use strict'; (" + func.replace(/;\s*$/, "") + ");");
};
},{}],51:[function(_dereq_,module,exports){
(function (process){
'use strict';
var pouchCollate = _dereq_('pouchdb-collate');
var TaskQueue = _dereq_('./taskqueue');
var collate = pouchCollate.collate;
var toIndexableString = pouchCollate.toIndexableString;
var normalizeKey = pouchCollate.normalizeKey;
var createView = _dereq_('./create-view');
var evalFunc = _dereq_('./evalfunc');
var log;
/* istanbul ignore else */
if ((typeof console !== 'undefined') && (typeof console.log === 'function')) {
log = Function.prototype.bind.call(console.log, console);
} else {
log = function () {};
}
var utils = _dereq_('./utils');
var Promise = utils.Promise;
var mainQueue = new TaskQueue();
var tempViewQueue = new TaskQueue();
var CHANGES_BATCH_SIZE = 50;
function parseViewName(name) {
// can be either 'ddocname/viewname' or just 'viewname'
// (where the ddoc name is the same)
return name.indexOf('/') === -1 ? [name, name] : name.split('/');
}
function tryCode(db, fun, args) {
// emit an event if there was an error thrown by a map/reduce function.
// putting try/catches in a single function also avoids deoptimizations.
try {
return {
output : fun.apply(null, args)
};
} catch (e) {
db.emit('error', e);
return {error : e};
}
}
function sortByKeyThenValue(x, y) {
var keyCompare = collate(x.key, y.key);
return keyCompare !== 0 ? keyCompare : collate(x.value, y.value);
}
function sliceResults(results, limit, skip) {
skip = skip || 0;
if (typeof limit === 'number') {
return results.slice(skip, limit + skip);
} else if (skip > 0) {
return results.slice(skip);
}
return results;
}
function createBuiltInError(name) {
var error = new Error('builtin ' + name +
' function requires map values to be numbers' +
' or number arrays');
error.name = 'invalid_value';
error.status = 500;
return error;
}
function sum(values) {
var result = 0;
for (var i = 0, len = values.length; i < len; i++) {
var num = values[i];
if (typeof num !== 'number') {
if (Array.isArray(num)) {
// lists of numbers are also allowed, sum them separately
result = typeof result === 'number' ? [result] : result;
for (var j = 0, jLen = num.length; j < jLen; j++) {
var jNum = num[j];
if (typeof jNum !== 'number') {
throw createBuiltInError('_sum');
} else if (typeof result[j] === 'undefined') {
result.push(jNum);
} else {
result[j] += jNum;
}
}
} else { // not array/number
throw createBuiltInError('_sum');
}
} else if (typeof result === 'number') {
result += num;
} else { // add number to array
result[0] += num;
}
}
return result;
}
var builtInReduce = {
_sum: function (keys, values) {
return sum(values);
},
_count: function (keys, values) {
return values.length;
},
_stats: function (keys, values) {
// no need to implement rereduce=true, because Pouch
// will never call it
function sumsqr(values) {
var _sumsqr = 0;
for (var i = 0, len = values.length; i < len; i++) {
var num = values[i];
_sumsqr += (num * num);
}
return _sumsqr;
}
return {
sum : sum(values),
min : Math.min.apply(null, values),
max : Math.max.apply(null, values),
count : values.length,
sumsqr : sumsqr(values)
};
}
};
function addHttpParam(paramName, opts, params, asJson) {
// add an http param from opts to params, optionally json-encoded
var val = opts[paramName];
if (typeof val !== 'undefined') {
if (asJson) {
val = encodeURIComponent(JSON.stringify(val));
}
params.push(paramName + '=' + val);
}
}
function checkQueryParseError(options, fun) {
var startkeyName = options.descending ? 'endkey' : 'startkey';
var endkeyName = options.descending ? 'startkey' : 'endkey';
if (typeof options[startkeyName] !== 'undefined' &&
typeof options[endkeyName] !== 'undefined' &&
collate(options[startkeyName], options[endkeyName]) > 0) {
throw new QueryParseError('No rows can match your key range, reverse your ' +
'start_key and end_key or set {descending : true}');
} else if (fun.reduce && options.reduce !== false) {
if (options.include_docs) {
throw new QueryParseError('{include_docs:true} is invalid for reduce');
} else if (options.keys && options.keys.length > 1 &&
!options.group && !options.group_level) {
throw new QueryParseError('Multi-key fetches for reduce views must use {group: true}');
}
}
if (options.group_level) {
if (typeof options.group_level !== 'number') {
throw new QueryParseError('Invalid value for integer: "' + options.group_level + '"');
}
if (options.group_level < 0) {
throw new QueryParseError('Invalid value for positive integer: ' +
'"' + options.group_level + '"');
}
}
}
function httpQuery(db, fun, opts) {
// List of parameters to add to the PUT request
var params = [];
var body;
var method = 'GET';
// If opts.reduce exists and is defined, then add it to the list
// of parameters.
// If reduce=false then the results are that of only the map function
// not the final result of map and reduce.
addHttpParam('reduce', opts, params);
addHttpParam('include_docs', opts, params);
addHttpParam('limit', opts, params);
addHttpParam('descending', opts, params);
addHttpParam('group', opts, params);
addHttpParam('group_level', opts, params);
addHttpParam('skip', opts, params);
addHttpParam('stale', opts, params);
addHttpParam('startkey', opts, params, true);
addHttpParam('endkey', opts, params, true);
addHttpParam('inclusive_end', opts, params);
addHttpParam('key', opts, params, true);
// Format the list of parameters into a valid URI query string
params = params.join('&');
params = params === '' ? '' : '?' + params;
// If keys are supplied, issue a POST request to circumvent GET query string limits
// see http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options
if (typeof opts.keys !== 'undefined') {
var MAX_URL_LENGTH = 2000;
// according to http://stackoverflow.com/a/417184/680742,
// the de facto URL length limit is 2000 characters
var keysAsString =
'keys=' + encodeURIComponent(JSON.stringify(opts.keys));
if (keysAsString.length + params.length + 1 <= MAX_URL_LENGTH) {
// If the keys are short enough, do a GET. we do this to work around
// Safari not understanding 304s on POSTs (see pouchdb/pouchdb#1239)
params += (params[0] === '?' ? '&' : '?') + keysAsString;
} else {
method = 'POST';
if (typeof fun === 'string') {
body = JSON.stringify({keys: opts.keys});
} else { // fun is {map : mapfun}, so append to this
fun.keys = opts.keys;
}
}
}
// We are referencing a query defined in the design doc
if (typeof fun === 'string') {
var parts = parseViewName(fun);
return db.request({
method: method,
url: '_design/' + parts[0] + '/_view/' + parts[1] + params,
body: body
});
}
// We are using a temporary view, terrible for performance but good for testing
body = body || {};
Object.keys(fun).forEach(function (key) {
if (Array.isArray(fun[key])) {
body[key] = fun[key];
} else {
body[key] = fun[key].toString();
}
});
return db.request({
method: 'POST',
url: '_temp_view' + params,
body: body
});
}
function defaultsTo(value) {
return function (reason) {
/* istanbul ignore else */
if (reason.status === 404) {
return value;
} else {
throw reason;
}
};
}
// returns a promise for a list of docs to update, based on the input docId.
// we update the metaDoc first (i.e. the doc that points from the sourceDB
// document Id to the ids of the documents in the mrview database), then
// the key/value docs. that way, if lightning strikes the user's computer
// in the middle of an update, we don't write any docs that we wouldn't
// be able to find later using the metaDoc.
function getDocsToPersist(docId, view, docIdsToEmits) {
var metaDocId = '_local/doc_' + docId;
return view.db.get(metaDocId)[
"catch"](defaultsTo({_id: metaDocId, keys: []}))
.then(function (metaDoc) {
return Promise.resolve().then(function () {
if (metaDoc.keys.length) {
return view.db.allDocs({
keys: metaDoc.keys,
include_docs: true
});
}
return {rows: []}; // no keys, no need for a lookup
}).then(function (res) {
var kvDocs = res.rows.map(function (row) {
return row.doc;
}).filter(function (row) {
return row;
});
var indexableKeysToKeyValues = docIdsToEmits[docId];
var oldKeysMap = {};
kvDocs.forEach(function (kvDoc) {
oldKeysMap[kvDoc._id] = true;
kvDoc._deleted = !indexableKeysToKeyValues[kvDoc._id];
if (!kvDoc._deleted) {
var keyValue = indexableKeysToKeyValues[kvDoc._id];
if ('value' in keyValue) {
kvDoc.value = keyValue.value;
}
}
});
var newKeys = Object.keys(indexableKeysToKeyValues);
newKeys.forEach(function (key) {
if (!oldKeysMap[key]) {
// new doc
var kvDoc = {
_id: key
};
var keyValue = indexableKeysToKeyValues[key];
if ('value' in keyValue) {
kvDoc.value = keyValue.value;
}
kvDocs.push(kvDoc);
}
});
metaDoc.keys = utils.uniq(newKeys.concat(metaDoc.keys));
kvDocs.splice(0, 0, metaDoc);
return kvDocs;
});
});
}
// updates all emitted key/value docs and metaDocs in the mrview database
// for the given batch of documents from the source database
function saveKeyValues(view, docIdsToEmits, seq) {
var seqDocId = '_local/lastSeq';
return view.db.get(seqDocId)[
"catch"](defaultsTo({_id: seqDocId, seq: 0}))
.then(function (lastSeqDoc) {
var docIds = Object.keys(docIdsToEmits);
return Promise.all(docIds.map(function (docId) {
return getDocsToPersist(docId, view, docIdsToEmits);
})).then(function (listOfDocsToPersist) {
var docsToPersist = [];
listOfDocsToPersist.forEach(function (docList) {
docsToPersist = docsToPersist.concat(docList);
});
// update the seq doc last, so that if a meteor strikes the user's
// computer in the middle of an update, we can apply the idempotent
// batch update operation again
lastSeqDoc.seq = seq;
docsToPersist.push(lastSeqDoc);
return view.db.bulkDocs({docs : docsToPersist});
});
});
}
var updateView = utils.sequentialize(mainQueue, function (view) {
// bind the emit function once
var mapResults;
var doc;
function emit(key, value) {
var output = { id: doc._id, key: normalizeKey(key) };
// Don't explicitly store the value unless it's defined and non-null.
// This saves on storage space, because often people don't use it.
if (typeof value !== 'undefined' && value !== null) {
output.value = normalizeKey(value);
}
mapResults.push(output);
}
var mapFun;
// for temp_views one can use emit(doc, emit), see #38
if (typeof view.mapFun === "function" && view.mapFun.length === 2) {
var origMap = view.mapFun;
mapFun = function (doc) {
return origMap(doc, emit);
};
} else {
mapFun = evalFunc(view.mapFun.toString(), emit, sum, log, Array.isArray, JSON.parse);
}
var currentSeq = view.seq || 0;
function processChange(docIdsToEmits, seq) {
return function () {
return saveKeyValues(view, docIdsToEmits, seq);
};
}
var queue = new TaskQueue();
// TODO(neojski): https://github.com/daleharvey/pouchdb/issues/1521
return new Promise(function (resolve, reject) {
function complete() {
queue.finish().then(function () {
view.seq = currentSeq;
resolve();
});
}
function processNextBatch() {
view.sourceDB.changes({
conflicts: true,
include_docs: true,
since : currentSeq,
limit : CHANGES_BATCH_SIZE
}).on('complete', function (response) {
var results = response.results;
if (!results.length) {
return complete();
}
var docIdsToEmits = {};
for (var i = 0, l = results.length; i < l; i++) {
var change = results[i];
if (change.doc._id[0] !== '_') {
mapResults = [];
doc = change.doc;
if (!doc._deleted) {
tryCode(view.sourceDB, mapFun, [doc]);
}
mapResults.sort(sortByKeyThenValue);
var indexableKeysToKeyValues = {};
var lastKey;
for (var j = 0, jl = mapResults.length; j < jl; j++) {
var obj = mapResults[j];
var complexKey = [obj.key, obj.id];
if (obj.key === lastKey) {
complexKey.push(j); // dup key+id, so make it unique
}
var indexableKey = toIndexableString(complexKey);
indexableKeysToKeyValues[indexableKey] = obj;
lastKey = obj.key;
}
docIdsToEmits[change.doc._id] = indexableKeysToKeyValues;
}
currentSeq = change.seq;
}
queue.add(processChange(docIdsToEmits, currentSeq));
if (results.length < CHANGES_BATCH_SIZE) {
return complete();
}
return processNextBatch();
}).on('error', onError);
/* istanbul ignore next */
function onError(err) {
reject(err);
}
}
processNextBatch();
});
});
function reduceView(view, results, options) {
if (options.group_level === 0) {
delete options.group_level;
}
var shouldGroup = options.group || options.group_level;
var reduceFun;
if (builtInReduce[view.reduceFun]) {
reduceFun = builtInReduce[view.reduceFun];
} else {
reduceFun = evalFunc(
view.reduceFun.toString(), null, sum, log, Array.isArray, JSON.parse);
}
var groups = [];
var lvl = options.group_level;
results.forEach(function (e) {
var last = groups[groups.length - 1];
var key = shouldGroup ? e.key : null;
// only set group_level for array keys
if (shouldGroup && Array.isArray(key) && typeof lvl === 'number') {
key = key.length > lvl ? key.slice(0, lvl) : key;
}
if (last && collate(last.key[0][0], key) === 0) {
last.key.push([key, e.id]);
last.value.push(e.value);
return;
}
groups.push({key: [
[key, e.id]
], value: [e.value]});
});
for (var i = 0, len = groups.length; i < len; i++) {
var e = groups[i];
var reduceTry = tryCode(view.sourceDB, reduceFun, [e.key, e.value, false]);
// CouchDB typically just sets the value to null if reduce errors out
e.value = reduceTry.error ? null : reduceTry.output;
e.key = e.key[0][0];
}
// no total_rows/offset when reducing
return {rows: sliceResults(groups, options.limit, options.skip)};
}
var queryView = utils.sequentialize(mainQueue, function (view, opts) {
var totalRows;
var shouldReduce = view.reduceFun && opts.reduce !== false;
var skip = opts.skip || 0;
if (typeof opts.keys !== 'undefined' && !opts.keys.length) {
// equivalent query
opts.limit = 0;
delete opts.keys;
}
function fetchFromView(viewOpts) {
viewOpts.include_docs = true;
return view.db.allDocs(viewOpts).then(function (res) {
totalRows = res.total_rows;
return res.rows.map(function (result) {
// implicit migration - in older versions of PouchDB,
// we explicitly stored the doc as {id: ..., key: ..., value: ...}
// this is tested in a migration test
/* istanbul ignore next */
if ('value' in result.doc && typeof result.doc.value === 'object' &&
result.doc.value !== null) {
var keys = Object.keys(result.doc.value).sort();
// this detection method is not perfect, but it's unlikely the user
// emitted a value which was an object with these 3 exact keys
var expectedKeys = ['id', 'key', 'value'];
if (!(keys < expectedKeys || keys > expectedKeys)) {
return result.doc.value;
}
}
var parsedKeyAndDocId = pouchCollate.parseIndexableString(result.doc._id);
return {
key: parsedKeyAndDocId[0],
id: parsedKeyAndDocId[1],
value: ('value' in result.doc ? result.doc.value : null)
};
});
});
}
function onMapResultsReady(results) {
var res;
if (shouldReduce) {
res = reduceView(view, results, opts);
} else {
res = {
total_rows: totalRows,
offset: skip,
rows: results
};
}
if (opts.include_docs) {
var getDocsPromises = results.map(function (row) {
var val = row.value;
var docId = (val && typeof val === 'object' && val._id) || row.id;
return view.sourceDB.get(docId).then(function (joinedDoc) {
row.doc = joinedDoc;
}, function () {
// document error = don't join
});
});
return Promise.all(getDocsPromises).then(function () {
return res;
});
} else {
return res;
}
}
var flatten = function (array) {
return array.reduce(function (prev, cur) {
return prev.concat(cur);
});
};
if (typeof opts.keys !== 'undefined') {
var keys = opts.keys;
var fetchPromises = keys.map(function (key) {
var viewOpts = {
startkey : toIndexableString([key]),
endkey : toIndexableString([key, {}])
};
return fetchFromView(viewOpts);
});
return Promise.all(fetchPromises).then(flatten).then(onMapResultsReady);
} else { // normal query, no 'keys'
var viewOpts = {
descending : opts.descending
};
if (typeof opts.startkey !== 'undefined') {
viewOpts.startkey = opts.descending ?
toIndexableString([opts.startkey, {}]) :
toIndexableString([opts.startkey]);
}
if (typeof opts.endkey !== 'undefined') {
var inclusiveEnd = opts.inclusive_end !== false;
if (opts.descending) {
inclusiveEnd = !inclusiveEnd;
}
viewOpts.endkey = toIndexableString(inclusiveEnd ? [opts.endkey, {}] : [opts.endkey]);
}
if (typeof opts.key !== 'undefined') {
var keyStart = toIndexableString([opts.key]);
var keyEnd = toIndexableString([opts.key, {}]);
if (viewOpts.descending) {
viewOpts.endkey = keyStart;
viewOpts.startkey = keyEnd;
} else {
viewOpts.startkey = keyStart;
viewOpts.endkey = keyEnd;
}
}
if (!shouldReduce) {
if (typeof opts.limit === 'number') {
viewOpts.limit = opts.limit;
}
viewOpts.skip = skip;
}
return fetchFromView(viewOpts).then(onMapResultsReady);
}
});
function httpViewCleanup(db) {
return db.request({
method: 'POST',
url: '_view_cleanup'
});
}
var localViewCleanup = utils.sequentialize(mainQueue, function (db) {
return db.get('_local/mrviews').then(function (metaDoc) {
var docsToViews = {};
Object.keys(metaDoc.views).forEach(function (fullViewName) {
var parts = parseViewName(fullViewName);
var designDocName = '_design/' + parts[0];
var viewName = parts[1];
docsToViews[designDocName] = docsToViews[designDocName] || {};
docsToViews[designDocName][viewName] = true;
});
var opts = {
keys : Object.keys(docsToViews),
include_docs : true
};
return db.allDocs(opts).then(function (res) {
var viewsToStatus = {};
res.rows.forEach(function (row) {
var ddocName = row.key.substring(8);
Object.keys(docsToViews[row.key]).forEach(function (viewName) {
var fullViewName = ddocName + '/' + viewName;
/* istanbul ignore if */
if (!metaDoc.views[fullViewName]) {
// new format, without slashes, to support PouchDB 2.2.0
// migration test in pouchdb's browser.migration.js verifies this
fullViewName = viewName;
}
var viewDBNames = Object.keys(metaDoc.views[fullViewName]);
// design doc deleted, or view function nonexistent
var statusIsGood = row.doc && row.doc.views && row.doc.views[viewName];
viewDBNames.forEach(function (viewDBName) {
viewsToStatus[viewDBName] = viewsToStatus[viewDBName] || statusIsGood;
});
});
});
var dbsToDelete = Object.keys(viewsToStatus).filter(function (viewDBName) {
return !viewsToStatus[viewDBName];
});
var destroyPromises = dbsToDelete.map(function (viewDBName) {
return db.constructor.destroy(viewDBName, {adapter : db.adapter});
});
return Promise.all(destroyPromises).then(function () {
return {ok: true};
});
});
}, defaultsTo({ok: true}));
});
exports.viewCleanup = utils.callbackify(function () {
var db = this;
if (db.type() === 'http') {
return httpViewCleanup(db);
}
return localViewCleanup(db);
});
function queryPromised(db, fun, opts) {
if (db.type() === 'http') {
return httpQuery(db, fun, opts);
}
if (typeof fun !== 'string') {
// temp_view
checkQueryParseError(opts, fun);
var createViewOpts = {
db : db,
viewName : 'temp_view/temp_view',
map : fun.map,
reduce : fun.reduce,
temporary : true
};
tempViewQueue.add(function () {
return createView(createViewOpts).then(function (view) {
function cleanup() {
return view.db.destroy();
}
return utils.fin(updateView(view).then(function () {
return queryView(view, opts);
}), cleanup);
});
});
return tempViewQueue.finish();
} else {
// persistent view
var fullViewName = fun;
var parts = parseViewName(fullViewName);
var designDocName = parts[0];
var viewName = parts[1];
return db.get('_design/' + designDocName).then(function (doc) {
var fun = doc.views && doc.views[viewName];
if (!fun || typeof fun.map !== 'string') {
throw new NotFoundError('ddoc ' + designDocName + ' has no view named ' +
viewName);
}
checkQueryParseError(opts, fun);
var createViewOpts = {
db : db,
viewName : fullViewName,
map : fun.map,
reduce : fun.reduce
};
return createView(createViewOpts).then(function (view) {
if (opts.stale === 'ok' || opts.stale === 'update_after') {
if (opts.stale === 'update_after') {
process.nextTick(function () {
updateView(view);
});
}
return queryView(view, opts);
} else { // stale not ok
return updateView(view).then(function () {
return queryView(view, opts);
});
}
});
});
}
}
exports.query = function (fun, opts, callback) {
if (typeof opts === 'function') {
callback = opts;
opts = {};
}
opts = utils.extend(true, {}, opts);
if (typeof fun === 'function') {
fun = {map : fun};
}
var db = this;
var promise = Promise.resolve().then(function () {
return queryPromised(db, fun, opts);
});
utils.promisedCallback(promise, callback);
return promise;
};
function QueryParseError(message) {
this.status = 400;
this.name = 'query_parse_error';
this.message = message;
this.error = true;
try {
Error.captureStackTrace(this, QueryParseError);
} catch (e) {}
}
utils.inherits(QueryParseError, Error);
function NotFoundError(message) {
this.status = 404;
this.name = 'not_found';
this.message = message;
this.error = true;
try {
Error.captureStackTrace(this, NotFoundError);
} catch (e) {}
}
utils.inherits(NotFoundError, Error);
}).call(this,_dereq_("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"))
},{"./create-view":49,"./evalfunc":50,"./taskqueue":54,"./utils":56,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":28,"pouchdb-collate":52}],52:[function(_dereq_,module,exports){
'use strict';
var MIN_MAGNITUDE = -324; // verified by -Number.MIN_VALUE
var MAGNITUDE_DIGITS = 3; // ditto
var SEP = ''; // set to '_' for easier debugging
var utils = _dereq_('./utils');
exports.collate = function (a, b) {
if (a === b) {
return 0;
}
a = exports.normalizeKey(a);
b = exports.normalizeKey(b);
var ai = collationIndex(a);
var bi = collationIndex(b);
if ((ai - bi) !== 0) {
return ai - bi;
}
if (a === null) {
return 0;
}
switch (typeof a) {
case 'number':
return a - b;
case 'boolean':
return a === b ? 0 : (a < b ? -1 : 1);
case 'string':
return stringCollate(a, b);
}
return Array.isArray(a) ? arrayCollate(a, b) : objectCollate(a, b);
};
// couch considers null/NaN/Infinity/-Infinity === undefined,
// for the purposes of mapreduce indexes. also, dates get stringified.
exports.normalizeKey = function (key) {
switch (typeof key) {
case 'undefined':
return null;
case 'number':
if (key === Infinity || key === -Infinity || isNaN(key)) {
return null;
}
return key;
case 'object':
var origKey = key;
if (Array.isArray(key)) {
var len = key.length;
key = new Array(len);
for (var i = 0; i < len; i++) {
key[i] = exports.normalizeKey(origKey[i]);
}
} else if (key instanceof Date) {
return key.toJSON();
} else if (key !== null) { // generic object
key = {};
for (var k in origKey) {
if (origKey.hasOwnProperty(k)) {
var val = origKey[k];
if (typeof val !== 'undefined') {
key[k] = exports.normalizeKey(val);
}
}
}
}
}
return key;
};
function indexify(key) {
if (key !== null) {
switch (typeof key) {
case 'boolean':
return key ? 1 : 0;
case 'number':
return numToIndexableString(key);
case 'string':
// We've to be sure that key does not contain \u0000
// Do order-preserving replacements:
// 0 -> 1, 1
// 1 -> 1, 2
// 2 -> 2, 2
return key
.replace(/\u0002/g, '\u0002\u0002')
.replace(/\u0001/g, '\u0001\u0002')
.replace(/\u0000/g, '\u0001\u0001');
case 'object':
var isArray = Array.isArray(key);
var arr = isArray ? key : Object.keys(key);
var i = -1;
var len = arr.length;
var result = '';
if (isArray) {
while (++i < len) {
result += exports.toIndexableString(arr[i]);
}
} else {
while (++i < len) {
var objKey = arr[i];
result += exports.toIndexableString(objKey) +
exports.toIndexableString(key[objKey]);
}
}
return result;
}
}
return '';
}
// convert the given key to a string that would be appropriate
// for lexical sorting, e.g. within a database, where the
// sorting is the same given by the collate() function.
exports.toIndexableString = function (key) {
var zero = '\u0000';
key = exports.normalizeKey(key);
return collationIndex(key) + SEP + indexify(key) + zero;
};
function parseNumber(str, i) {
var originalIdx = i;
var num;
var zero = str[i] === '1';
if (zero) {
num = 0;
i++;
} else {
var neg = str[i] === '0';
i++;
var numAsString = '';
var magAsString = str.substring(i, i + MAGNITUDE_DIGITS);
var magnitude = parseInt(magAsString, 10) + MIN_MAGNITUDE;
if (neg) {
magnitude = -magnitude;
}
i += MAGNITUDE_DIGITS;
while (true) {
var ch = str[i];
if (ch === '\u0000') {
break;
} else {
numAsString += ch;
}
i++;
}
numAsString = numAsString.split('.');
if (numAsString.length === 1) {
num = parseInt(numAsString, 10);
} else {
num = parseFloat(numAsString[0] + '.' + numAsString[1]);
}
if (neg) {
num = num - 10;
}
if (magnitude !== 0) {
// parseFloat is more reliable than pow due to rounding errors
// e.g. Number.MAX_VALUE would return Infinity if we did
// num * Math.pow(10, magnitude);
num = parseFloat(num + 'e' + magnitude);
}
}
return {num: num, length : i - originalIdx};
}
// move up the stack while parsing
// this function moved outside of parseIndexableString for performance
function pop(stack, metaStack) {
var obj = stack.pop();
if (metaStack.length) {
var lastMetaElement = metaStack[metaStack.length - 1];
if (obj === lastMetaElement.element) {
// popping a meta-element, e.g. an object whose value is another object
metaStack.pop();
lastMetaElement = metaStack[metaStack.length - 1];
}
var element = lastMetaElement.element;
var lastElementIndex = lastMetaElement.index;
if (Array.isArray(element)) {
element.push(obj);
} else if (lastElementIndex === stack.length - 2) { // obj with key+value
var key = stack.pop();
element[key] = obj;
} else {
stack.push(obj); // obj with key only
}
}
}
exports.parseIndexableString = function (str) {
var stack = [];
var metaStack = []; // stack for arrays and objects
var i = 0;
while (true) {
var collationIndex = str[i++];
if (collationIndex === '\u0000') {
if (stack.length === 1) {
return stack.pop();
} else {
pop(stack, metaStack);
continue;
}
}
switch (collationIndex) {
case '1':
stack.push(null);
break;
case '2':
stack.push(str[i] === '1');
i++;
break;
case '3':
var parsedNum = parseNumber(str, i);
stack.push(parsedNum.num);
i += parsedNum.length;
break;
case '4':
var parsedStr = '';
while (true) {
var ch = str[i];
if (ch === '\u0000') {
break;
}
parsedStr += ch;
i++;
}
// perform the reverse of the order-preserving replacement
// algorithm (see above)
parsedStr = parsedStr.replace(/\u0001\u0001/g, '\u0000')
.replace(/\u0001\u0002/g, '\u0001')
.replace(/\u0002\u0002/g, '\u0002');
stack.push(parsedStr);
break;
case '5':
var arrayElement = { element: [], index: stack.length };
stack.push(arrayElement.element);
metaStack.push(arrayElement);
break;
case '6':
var objElement = { element: {}, index: stack.length };
stack.push(objElement.element);
metaStack.push(objElement);
break;
default:
throw new Error(
'bad collationIndex or unexpectedly reached end of input: ' + collationIndex);
}
}
};
function arrayCollate(a, b) {
var len = Math.min(a.length, b.length);
for (var i = 0; i < len; i++) {
var sort = exports.collate(a[i], b[i]);
if (sort !== 0) {
return sort;
}
}
return (a.length === b.length) ? 0 :
(a.length > b.length) ? 1 : -1;
}
function stringCollate(a, b) {
// See: https://github.com/daleharvey/pouchdb/issues/40
// This is incompatible with the CouchDB implementation, but its the
// best we can do for now
return (a === b) ? 0 : ((a > b) ? 1 : -1);
}
function objectCollate(a, b) {
var ak = Object.keys(a), bk = Object.keys(b);
var len = Math.min(ak.length, bk.length);
for (var i = 0; i < len; i++) {
// First sort the keys
var sort = exports.collate(ak[i], bk[i]);
if (sort !== 0) {
return sort;
}
// if the keys are equal sort the values
sort = exports.collate(a[ak[i]], b[bk[i]]);
if (sort !== 0) {
return sort;
}
}
return (ak.length === bk.length) ? 0 :
(ak.length > bk.length) ? 1 : -1;
}
// The collation is defined by erlangs ordered terms
// the atoms null, true, false come first, then numbers, strings,
// arrays, then objects
// null/undefined/NaN/Infinity/-Infinity are all considered null
function collationIndex(x) {
var id = ['boolean', 'number', 'string', 'object'];
var idx = id.indexOf(typeof x);
//false if -1 otherwise true, but fast!!!!1
if (~idx) {
if (x === null) {
return 1;
}
if (Array.isArray(x)) {
return 5;
}
return idx < 3 ? (idx + 2) : (idx + 3);
}
if (Array.isArray(x)) {
return 5;
}
}
// conversion:
// x yyy zz...zz
// x = 0 for negative, 1 for 0, 2 for positive
// y = exponent (for negative numbers negated) moved so that it's >= 0
// z = mantisse
function numToIndexableString(num) {
if (num === 0) {
return '1';
}
// convert number to exponential format for easier and
// more succinct string sorting
var expFormat = num.toExponential().split(/e\+?/);
var magnitude = parseInt(expFormat[1], 10);
var neg = num < 0;
var result = neg ? '0' : '2';
// first sort by magnitude
// it's easier if all magnitudes are positive
var magForComparison = ((neg ? -magnitude : magnitude) - MIN_MAGNITUDE);
var magString = utils.padLeft((magForComparison).toString(), '0', MAGNITUDE_DIGITS);
result += SEP + magString;
// then sort by the factor
var factor = Math.abs(parseFloat(expFormat[0])); // [1..10)
if (neg) { // for negative reverse ordering
factor = 10 - factor;
}
var factorStr = factor.toFixed(20);
// strip zeros from the end
factorStr = factorStr.replace(/\.?0+$/, '');
result += SEP + factorStr;
return result;
}
},{"./utils":53}],53:[function(_dereq_,module,exports){
'use strict';
function pad(str, padWith, upToLength) {
var padding = '';
var targetLength = upToLength - str.length;
while (padding.length < targetLength) {
padding += padWith;
}
return padding;
}
exports.padLeft = function (str, padWith, upToLength) {
var padding = pad(str, padWith, upToLength);
return padding + str;
};
exports.padRight = function (str, padWith, upToLength) {
var padding = pad(str, padWith, upToLength);
return str + padding;
};
exports.stringLexCompare = function (a, b) {
var aLen = a.length;
var bLen = b.length;
var i;
for (i = 0; i < aLen; i++) {
if (i === bLen) {
// b is shorter substring of a
return 1;
}
var aChar = a.charAt(i);
var bChar = b.charAt(i);
if (aChar !== bChar) {
return aChar < bChar ? -1 : 1;
}
}
if (aLen < bLen) {
// a is shorter substring of b
return -1;
}
return 0;
};
/*
* returns the decimal form for the given integer, i.e. writes
* out all the digits (in base-10) instead of using scientific notation
*/
exports.intToDecimalForm = function (int) {
var isNeg = int < 0;
var result = '';
do {
var remainder = isNeg ? -Math.ceil(int % 10) : Math.floor(int % 10);
result = remainder + result;
int = isNeg ? Math.ceil(int / 10) : Math.floor(int / 10);
} while (int);
if (isNeg && result !== '0') {
result = '-' + result;
}
return result;
};
},{}],54:[function(_dereq_,module,exports){
'use strict';
/*
* Simple task queue to sequentialize actions. Assumes callbacks will eventually fire (once).
*/
var Promise = _dereq_('./utils').Promise;
function TaskQueue() {
this.promise = new Promise(function (fulfill) {fulfill(); });
}
TaskQueue.prototype.add = function (promiseFactory) {
this.promise = this.promise["catch"](function () {
// just recover
}).then(function () {
return promiseFactory();
});
return this.promise;
};
TaskQueue.prototype.finish = function () {
return this.promise;
};
module.exports = TaskQueue;
},{"./utils":56}],55:[function(_dereq_,module,exports){
'use strict';
var Promise = _dereq_('./utils').Promise;
// this is essentially the "update sugar" function from daleharvey/pouchdb#1388
// the diffFun tells us what delta to apply to the doc. it either returns
// the doc, or false if it doesn't need to do an update after all
function upsert(db, docId, diffFun) {
return new Promise(function (fulfill, reject) {
if (docId && typeof docId === 'object') {
docId = docId._id;
}
if (typeof docId !== 'string') {
return reject(new Error('doc id is required'));
}
db.get(docId, function (err, doc) {
if (err) {
if (err.status !== 404) {
return reject(err);
}
return fulfill(tryAndPut(db, diffFun({_id : docId}), diffFun));
}
var newDoc = diffFun(doc);
if (!newDoc) {
return fulfill(doc);
}
fulfill(tryAndPut(db, newDoc, diffFun));
});
});
}
function tryAndPut(db, doc, diffFun) {
return db.put(doc)["catch"](function (err) {
if (err.status !== 409) {
throw err;
}
return upsert(db, doc, diffFun);
});
}
module.exports = upsert;
},{"./utils":56}],56:[function(_dereq_,module,exports){
(function (process,global){
'use strict';
/* istanbul ignore if */
if (typeof global.Promise === 'function') {
exports.Promise = global.Promise;
} else {
exports.Promise = _dereq_('lie');
}
// uniquify a list, similar to underscore's _.uniq
exports.uniq = function (arr) {
var map = {};
arr.forEach(function (element) {
map[element] = true;
});
return Object.keys(map);
};
exports.inherits = _dereq_('inherits');
exports.extend = _dereq_('pouchdb-extend');
var argsarray = _dereq_('argsarray');
exports.promisedCallback = function (promise, callback) {
if (callback) {
promise.then(function (res) {
process.nextTick(function () {
callback(null, res);
});
}, function (reason) {
process.nextTick(function () {
callback(reason);
});
});
}
return promise;
};
exports.callbackify = function (fun) {
return argsarray(function (args) {
var cb = args.pop();
var promise = fun.apply(this, args);
if (typeof cb === 'function') {
exports.promisedCallback(promise, cb);
}
return promise;
});
};
// Promise finally util similar to Q.finally
exports.fin = function (promise, cb) {
return promise.then(function (res) {
var promise2 = cb();
if (typeof promise2.then === 'function') {
return promise2.then(function () {
return res;
});
}
return res;
}, function (reason) {
var promise2 = cb();
if (typeof promise2.then === 'function') {
return promise2.then(function () {
throw reason;
});
}
throw reason;
});
};
exports.sequentialize = function (queue, promiseFactory) {
return function () {
var args = arguments;
var that = this;
return queue.add(function () {
return promiseFactory.apply(that, args);
});
};
};
var crypto = _dereq_('crypto');
var Md5 = _dereq_('spark-md5');
exports.MD5 = function (string) {
/* istanbul ignore else */
if (!process.browser) {
return crypto.createHash('md5').update(string).digest('hex');
} else {
return Md5.hash(string);
}
};
}).call(this,_dereq_("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":28,"argsarray":25,"crypto":26,"inherits":29,"lie":33,"pouchdb-extend":48,"spark-md5":57}],57:[function(_dereq_,module,exports){
/*jshint bitwise:false*/
/*global unescape*/
(function (factory) {
if (typeof exports === 'object') {
// Node/CommonJS
module.exports = factory();
} else if (typeof define === 'function' && define.amd) {
// AMD
define(factory);
} else {
// Browser globals (with support for web workers)
var glob;
try {
glob = window;
} catch (e) {
glob = self;
}
glob.SparkMD5 = factory();
}
}(function (undefined) {
'use strict';
////////////////////////////////////////////////////////////////////////////
/*
* Fastest md5 implementation around (JKM md5)
* Credits: Joseph Myers
*
* @see http://www.myersdaily.org/joseph/javascript/md5-text.html
* @see http://jsperf.com/md5-shootout/7
*/
/* this function is much faster,
so if possible we use it. Some IEs
are the only ones I know of that
need the idiotic second function,
generated by an if clause. */
var add32 = function (a, b) {
return (a + b) & 0xFFFFFFFF;
},
cmn = function (q, a, b, x, s, t) {
a = add32(add32(a, q), add32(x, t));
return add32((a << s) | (a >>> (32 - s)), b);
},
ff = function (a, b, c, d, x, s, t) {
return cmn((b & c) | ((~b) & d), a, b, x, s, t);
},
gg = function (a, b, c, d, x, s, t) {
return cmn((b & d) | (c & (~d)), a, b, x, s, t);
},
hh = function (a, b, c, d, x, s, t) {
return cmn(b ^ c ^ d, a, b, x, s, t);
},
ii = function (a, b, c, d, x, s, t) {
return cmn(c ^ (b | (~d)), a, b, x, s, t);
},
md5cycle = function (x, k) {
var a = x[0],
b = x[1],
c = x[2],
d = x[3];
a = ff(a, b, c, d, k[0], 7, -680876936);
d = ff(d, a, b, c, k[1], 12, -389564586);
c = ff(c, d, a, b, k[2], 17, 606105819);
b = ff(b, c, d, a, k[3], 22, -1044525330);
a = ff(a, b, c, d, k[4], 7, -176418897);
d = ff(d, a, b, c, k[5], 12, 1200080426);
c = ff(c, d, a, b, k[6], 17, -1473231341);
b = ff(b, c, d, a, k[7], 22, -45705983);
a = ff(a, b, c, d, k[8], 7, 1770035416);
d = ff(d, a, b, c, k[9], 12, -1958414417);
c = ff(c, d, a, b, k[10], 17, -42063);
b = ff(b, c, d, a, k[11], 22, -1990404162);
a = ff(a, b, c, d, k[12], 7, 1804603682);
d = ff(d, a, b, c, k[13], 12, -40341101);
c = ff(c, d, a, b, k[14], 17, -1502002290);
b = ff(b, c, d, a, k[15], 22, 1236535329);
a = gg(a, b, c, d, k[1], 5, -165796510);
d = gg(d, a, b, c, k[6], 9, -1069501632);
c = gg(c, d, a, b, k[11], 14, 643717713);
b = gg(b, c, d, a, k[0], 20, -373897302);
a = gg(a, b, c, d, k[5], 5, -701558691);
d = gg(d, a, b, c, k[10], 9, 38016083);
c = gg(c, d, a, b, k[15], 14, -660478335);
b = gg(b, c, d, a, k[4], 20, -405537848);
a = gg(a, b, c, d, k[9], 5, 568446438);
d = gg(d, a, b, c, k[14], 9, -1019803690);
c = gg(c, d, a, b, k[3], 14, -187363961);
b = gg(b, c, d, a, k[8], 20, 1163531501);
a = gg(a, b, c, d, k[13], 5, -1444681467);
d = gg(d, a, b, c, k[2], 9, -51403784);
c = gg(c, d, a, b, k[7], 14, 1735328473);
b = gg(b, c, d, a, k[12], 20, -1926607734);
a = hh(a, b, c, d, k[5], 4, -378558);
d = hh(d, a, b, c, k[8], 11, -2022574463);
c = hh(c, d, a, b, k[11], 16, 1839030562);
b = hh(b, c, d, a, k[14], 23, -35309556);
a = hh(a, b, c, d, k[1], 4, -1530992060);
d = hh(d, a, b, c, k[4], 11, 1272893353);
c = hh(c, d, a, b, k[7], 16, -155497632);
b = hh(b, c, d, a, k[10], 23, -1094730640);
a = hh(a, b, c, d, k[13], 4, 681279174);
d = hh(d, a, b, c, k[0], 11, -358537222);
c = hh(c, d, a, b, k[3], 16, -722521979);
b = hh(b, c, d, a, k[6], 23, 76029189);
a = hh(a, b, c, d, k[9], 4, -640364487);
d = hh(d, a, b, c, k[12], 11, -421815835);
c = hh(c, d, a, b, k[15], 16, 530742520);
b = hh(b, c, d, a, k[2], 23, -995338651);
a = ii(a, b, c, d, k[0], 6, -198630844);
d = ii(d, a, b, c, k[7], 10, 1126891415);
c = ii(c, d, a, b, k[14], 15, -1416354905);
b = ii(b, c, d, a, k[5], 21, -57434055);
a = ii(a, b, c, d, k[12], 6, 1700485571);
d = ii(d, a, b, c, k[3], 10, -1894986606);
c = ii(c, d, a, b, k[10], 15, -1051523);
b = ii(b, c, d, a, k[1], 21, -2054922799);
a = ii(a, b, c, d, k[8], 6, 1873313359);
d = ii(d, a, b, c, k[15], 10, -30611744);
c = ii(c, d, a, b, k[6], 15, -1560198380);
b = ii(b, c, d, a, k[13], 21, 1309151649);
a = ii(a, b, c, d, k[4], 6, -145523070);
d = ii(d, a, b, c, k[11], 10, -1120210379);
c = ii(c, d, a, b, k[2], 15, 718787259);
b = ii(b, c, d, a, k[9], 21, -343485551);
x[0] = add32(a, x[0]);
x[1] = add32(b, x[1]);
x[2] = add32(c, x[2]);
x[3] = add32(d, x[3]);
},
/* there needs to be support for Unicode here,
* unless we pretend that we can redefine the MD-5
* algorithm for multi-byte characters (perhaps
* by adding every four 16-bit characters and
* shortening the sum to 32 bits). Otherwise
* I suggest performing MD-5 as if every character
* was two bytes--e.g., 0040 0025 = @%--but then
* how will an ordinary MD-5 sum be matched?
* There is no way to standardize text to something
* like UTF-8 before transformation; speed cost is
* utterly prohibitive. The JavaScript standard
* itself needs to look at this: it should start
* providing access to strings as preformed UTF-8
* 8-bit unsigned value arrays.
*/
md5blk = function (s) {
var md5blks = [],
i; /* Andy King said do it this way. */
for (i = 0; i < 64; i += 4) {
md5blks[i >> 2] = s.charCodeAt(i) + (s.charCodeAt(i + 1) << 8) + (s.charCodeAt(i + 2) << 16) + (s.charCodeAt(i + 3) << 24);
}
return md5blks;
},
md5blk_array = function (a) {
var md5blks = [],
i; /* Andy King said do it this way. */
for (i = 0; i < 64; i += 4) {
md5blks[i >> 2] = a[i] + (a[i + 1] << 8) + (a[i + 2] << 16) + (a[i + 3] << 24);
}
return md5blks;
},
md51 = function (s) {
var n = s.length,
state = [1732584193, -271733879, -1732584194, 271733878],
i,
length,
tail,
tmp,
lo,
hi;
for (i = 64; i <= n; i += 64) {
md5cycle(state, md5blk(s.substring(i - 64, i)));
}
s = s.substring(i - 64);
length = s.length;
tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
for (i = 0; i < length; i += 1) {
tail[i >> 2] |= s.charCodeAt(i) << ((i % 4) << 3);
}
tail[i >> 2] |= 0x80 << ((i % 4) << 3);
if (i > 55) {
md5cycle(state, tail);
for (i = 0; i < 16; i += 1) {
tail[i] = 0;
}
}
// Beware that the final length might not fit in 32 bits so we take care of that
tmp = n * 8;
tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
lo = parseInt(tmp[2], 16);
hi = parseInt(tmp[1], 16) || 0;
tail[14] = lo;
tail[15] = hi;
md5cycle(state, tail);
return state;
},
md51_array = function (a) {
var n = a.length,
state = [1732584193, -271733879, -1732584194, 271733878],
i,
length,
tail,
tmp,
lo,
hi;
for (i = 64; i <= n; i += 64) {
md5cycle(state, md5blk_array(a.subarray(i - 64, i)));
}
// Not sure if it is a bug, however IE10 will always produce a sub array of length 1
// containing the last element of the parent array if the sub array specified starts
// beyond the length of the parent array - weird.
// https://connect.microsoft.com/IE/feedback/details/771452/typed-array-subarray-issue
a = (i - 64) < n ? a.subarray(i - 64) : new Uint8Array(0);
length = a.length;
tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
for (i = 0; i < length; i += 1) {
tail[i >> 2] |= a[i] << ((i % 4) << 3);
}
tail[i >> 2] |= 0x80 << ((i % 4) << 3);
if (i > 55) {
md5cycle(state, tail);
for (i = 0; i < 16; i += 1) {
tail[i] = 0;
}
}
// Beware that the final length might not fit in 32 bits so we take care of that
tmp = n * 8;
tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
lo = parseInt(tmp[2], 16);
hi = parseInt(tmp[1], 16) || 0;
tail[14] = lo;
tail[15] = hi;
md5cycle(state, tail);
return state;
},
hex_chr = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'],
rhex = function (n) {
var s = '',
j;
for (j = 0; j < 4; j += 1) {
s += hex_chr[(n >> (j * 8 + 4)) & 0x0F] + hex_chr[(n >> (j * 8)) & 0x0F];
}
return s;
},
hex = function (x) {
var i;
for (i = 0; i < x.length; i += 1) {
x[i] = rhex(x[i]);
}
return x.join('');
},
md5 = function (s) {
return hex(md51(s));
},
////////////////////////////////////////////////////////////////////////////
/**
* SparkMD5 OOP implementation.
*
* Use this class to perform an incremental md5, otherwise use the
* static methods instead.
*/
SparkMD5 = function () {
// call reset to init the instance
this.reset();
};
// In some cases the fast add32 function cannot be used..
if (md5('hello') !== '5d41402abc4b2a76b9719d911017c592') {
add32 = function (x, y) {
var lsw = (x & 0xFFFF) + (y & 0xFFFF),
msw = (x >> 16) + (y >> 16) + (lsw >> 16);
return (msw << 16) | (lsw & 0xFFFF);
};
}
/**
* Appends a string.
* A conversion will be applied if an utf8 string is detected.
*
* @param {String} str The string to be appended
*
* @return {SparkMD5} The instance itself
*/
SparkMD5.prototype.append = function (str) {
// converts the string to utf8 bytes if necessary
if (/[\u0080-\uFFFF]/.test(str)) {
str = unescape(encodeURIComponent(str));
}
// then append as binary
this.appendBinary(str);
return this;
};
/**
* Appends a binary string.
*
* @param {String} contents The binary string to be appended
*
* @return {SparkMD5} The instance itself
*/
SparkMD5.prototype.appendBinary = function (contents) {
this._buff += contents;
this._length += contents.length;
var length = this._buff.length,
i;
for (i = 64; i <= length; i += 64) {
md5cycle(this._state, md5blk(this._buff.substring(i - 64, i)));
}
this._buff = this._buff.substr(i - 64);
return this;
};
/**
* Finishes the incremental computation, reseting the internal state and
* returning the result.
* Use the raw parameter to obtain the raw result instead of the hex one.
*
* @param {Boolean} raw True to get the raw result, false to get the hex result
*
* @return {String|Array} The result
*/
SparkMD5.prototype.end = function (raw) {
var buff = this._buff,
length = buff.length,
i,
tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
ret;
for (i = 0; i < length; i += 1) {
tail[i >> 2] |= buff.charCodeAt(i) << ((i % 4) << 3);
}
this._finish(tail, length);
ret = !!raw ? this._state : hex(this._state);
this.reset();
return ret;
};
/**
* Finish the final calculation based on the tail.
*
* @param {Array} tail The tail (will be modified)
* @param {Number} length The length of the remaining buffer
*/
SparkMD5.prototype._finish = function (tail, length) {
var i = length,
tmp,
lo,
hi;
tail[i >> 2] |= 0x80 << ((i % 4) << 3);
if (i > 55) {
md5cycle(this._state, tail);
for (i = 0; i < 16; i += 1) {
tail[i] = 0;
}
}
// Do the final computation based on the tail and length
// Beware that the final length may not fit in 32 bits so we take care of that
tmp = this._length * 8;
tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/);
lo = parseInt(tmp[2], 16);
hi = parseInt(tmp[1], 16) || 0;
tail[14] = lo;
tail[15] = hi;
md5cycle(this._state, tail);
};
/**
* Resets the internal state of the computation.
*
* @return {SparkMD5} The instance itself
*/
SparkMD5.prototype.reset = function () {
this._buff = "";
this._length = 0;
this._state = [1732584193, -271733879, -1732584194, 271733878];
return this;
};
/**
* Releases memory used by the incremental buffer and other aditional
* resources. If you plan to use the instance again, use reset instead.
*/
SparkMD5.prototype.destroy = function () {
delete this._state;
delete this._buff;
delete this._length;
};
/**
* Performs the md5 hash on a string.
* A conversion will be applied if utf8 string is detected.
*
* @param {String} str The string
* @param {Boolean} raw True to get the raw result, false to get the hex result
*
* @return {String|Array} The result
*/
SparkMD5.hash = function (str, raw) {
// converts the string to utf8 bytes if necessary
if (/[\u0080-\uFFFF]/.test(str)) {
str = unescape(encodeURIComponent(str));
}
var hash = md51(str);
return !!raw ? hash : hex(hash);
};
/**
* Performs the md5 hash on a binary string.
*
* @param {String} content The binary string
* @param {Boolean} raw True to get the raw result, false to get the hex result
*
* @return {String|Array} The result
*/
SparkMD5.hashBinary = function (content, raw) {
var hash = md51(content);
return !!raw ? hash : hex(hash);
};
/**
* SparkMD5 OOP implementation for array buffers.
*
* Use this class to perform an incremental md5 ONLY for array buffers.
*/
SparkMD5.ArrayBuffer = function () {
// call reset to init the instance
this.reset();
};
////////////////////////////////////////////////////////////////////////////
/**
* Appends an array buffer.
*
* @param {ArrayBuffer} arr The array to be appended
*
* @return {SparkMD5.ArrayBuffer} The instance itself
*/
SparkMD5.ArrayBuffer.prototype.append = function (arr) {
// TODO: we could avoid the concatenation here but the algorithm would be more complex
// if you find yourself needing extra performance, please make a PR.
var buff = this._concatArrayBuffer(this._buff, arr),
length = buff.length,
i;
this._length += arr.byteLength;
for (i = 64; i <= length; i += 64) {
md5cycle(this._state, md5blk_array(buff.subarray(i - 64, i)));
}
// Avoids IE10 weirdness (documented above)
this._buff = (i - 64) < length ? buff.subarray(i - 64) : new Uint8Array(0);
return this;
};
/**
* Finishes the incremental computation, reseting the internal state and
* returning the result.
* Use the raw parameter to obtain the raw result instead of the hex one.
*
* @param {Boolean} raw True to get the raw result, false to get the hex result
*
* @return {String|Array} The result
*/
SparkMD5.ArrayBuffer.prototype.end = function (raw) {
var buff = this._buff,
length = buff.length,
tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
i,
ret;
for (i = 0; i < length; i += 1) {
tail[i >> 2] |= buff[i] << ((i % 4) << 3);
}
this._finish(tail, length);
ret = !!raw ? this._state : hex(this._state);
this.reset();
return ret;
};
SparkMD5.ArrayBuffer.prototype._finish = SparkMD5.prototype._finish;
/**
* Resets the internal state of the computation.
*
* @return {SparkMD5.ArrayBuffer} The instance itself
*/
SparkMD5.ArrayBuffer.prototype.reset = function () {
this._buff = new Uint8Array(0);
this._length = 0;
this._state = [1732584193, -271733879, -1732584194, 271733878];
return this;
};
/**
* Releases memory used by the incremental buffer and other aditional
* resources. If you plan to use the instance again, use reset instead.
*/
SparkMD5.ArrayBuffer.prototype.destroy = SparkMD5.prototype.destroy;
/**
* Concats two array buffers, returning a new one.
*
* @param {ArrayBuffer} first The first array buffer
* @param {ArrayBuffer} second The second array buffer
*
* @return {ArrayBuffer} The new array buffer
*/
SparkMD5.ArrayBuffer.prototype._concatArrayBuffer = function (first, second) {
var firstLength = first.length,
result = new Uint8Array(firstLength + second.byteLength);
result.set(first);
result.set(new Uint8Array(second), firstLength);
return result;
};
/**
* Performs the md5 hash on an array buffer.
*
* @param {ArrayBuffer} arr The array buffer
* @param {Boolean} raw True to get the raw result, false to get the hex result
*
* @return {String|Array} The result
*/
SparkMD5.ArrayBuffer.hash = function (arr, raw) {
var hash = md51_array(new Uint8Array(arr));
return !!raw ? hash : hex(hash);
};
return SparkMD5;
}));
},{}],58:[function(_dereq_,module,exports){
'use strict';
/**
* Stringify/parse functions that don't operate
* recursively, so they avoid call stack exceeded
* errors.
*/
exports.stringify = function stringify(input) {
var queue = [];
queue.push({obj: input});
var res = '';
var next, obj, prefix, val, i, arrayPrefix, keys, k, key, value, objPrefix;
while ((next = queue.pop())) {
obj = next.obj;
prefix = next.prefix || '';
val = next.val || '';
res += prefix;
if (val) {
res += val;
} else if (typeof obj !== 'object') {
res += typeof obj === 'undefined' ? null : JSON.stringify(obj);
} else if (obj === null) {
res += 'null';
} else if (Array.isArray(obj)) {
queue.push({val: ']'});
for (i = obj.length - 1; i >= 0; i--) {
arrayPrefix = i === 0 ? '' : ',';
queue.push({obj: obj[i], prefix: arrayPrefix});
}
queue.push({val: '['});
} else { // object
keys = [];
for (k in obj) {
if (obj.hasOwnProperty(k)) {
keys.push(k);
}
}
queue.push({val: '}'});
for (i = keys.length - 1; i >= 0; i--) {
key = keys[i];
value = obj[key];
objPrefix = (i > 0 ? ',' : '');
objPrefix += JSON.stringify(key) + ':';
queue.push({obj: value, prefix: objPrefix});
}
queue.push({val: '{'});
}
}
return res;
};
// Convenience function for the parse function.
// This pop function is basically copied from
// pouchCollate.parseIndexableString
function pop(obj, stack, metaStack) {
var lastMetaElement = metaStack[metaStack.length - 1];
if (obj === lastMetaElement.element) {
// popping a meta-element, e.g. an object whose value is another object
metaStack.pop();
lastMetaElement = metaStack[metaStack.length - 1];
}
var element = lastMetaElement.element;
var lastElementIndex = lastMetaElement.index;
if (Array.isArray(element)) {
element.push(obj);
} else if (lastElementIndex === stack.length - 2) { // obj with key+value
var key = stack.pop();
element[key] = obj;
} else {
stack.push(obj); // obj with key only
}
}
exports.parse = function (str) {
var stack = [];
var metaStack = []; // stack for arrays and objects
var i = 0;
var collationIndex,parsedNum,numChar;
var parsedString,lastCh,numConsecutiveSlashes,ch;
var arrayElement, objElement;
while (true) {
collationIndex = str[i++];
if (collationIndex === '}' ||
collationIndex === ']' ||
typeof collationIndex === 'undefined') {
if (stack.length === 1) {
return stack.pop();
} else {
pop(stack.pop(), stack, metaStack);
continue;
}
}
switch (collationIndex) {
case ' ':
case '\t':
case '\n':
case ':':
case ',':
break;
case 'n':
i += 3; // 'ull'
pop(null, stack, metaStack);
break;
case 't':
i += 3; // 'rue'
pop(true, stack, metaStack);
break;
case 'f':
i += 4; // 'alse'
pop(false, stack, metaStack);
break;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '-':
parsedNum = '';
i--;
while (true) {
numChar = str[i++];
if (/[\d\.\-e\+]/.test(numChar)) {
parsedNum += numChar;
} else {
i--;
break;
}
}
pop(parseFloat(parsedNum), stack, metaStack);
break;
case '"':
parsedString = '';
lastCh = void 0;
numConsecutiveSlashes = 0;
while (true) {
ch = str[i++];
if (ch !== '"' || (lastCh === '\\' &&
numConsecutiveSlashes % 2 === 1)) {
parsedString += ch;
lastCh = ch;
if (lastCh === '\\') {
numConsecutiveSlashes++;
} else {
numConsecutiveSlashes = 0;
}
} else {
break;
}
}
pop(JSON.parse('"' + parsedString + '"'), stack, metaStack);
break;
case '[':
arrayElement = { element: [], index: stack.length };
stack.push(arrayElement.element);
metaStack.push(arrayElement);
break;
case '{':
objElement = { element: {}, index: stack.length };
stack.push(objElement.element);
metaStack.push(objElement);
break;
default:
throw new Error(
'unexpectedly reached end of input: ' + collationIndex);
}
}
};
},{}]},{},[17])
(17)
});
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);throw new Error("Cannot find module '"+o+"'")}var f=n[o]={exports:{}};t[o][0].call(f.exports,function(e){var n=t[o][1][e];return s(n?n:e)},f,f.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
},{}],2:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
function EventEmitter() {
this._events = this._events || {};
this._maxListeners = this._maxListeners || undefined;
}
module.exports = EventEmitter;
// Backwards-compat with node 0.10.x
EventEmitter.EventEmitter = EventEmitter;
EventEmitter.prototype._events = undefined;
EventEmitter.prototype._maxListeners = undefined;
// By default EventEmitters will print a warning if more than 10 listeners are
// added to it. This is a useful default which helps finding memory leaks.
EventEmitter.defaultMaxListeners = 10;
// Obviously not all Emitters should be limited to 10. This function allows
// that to be increased. Set to zero for unlimited.
EventEmitter.prototype.setMaxListeners = function(n) {
if (!isNumber(n) || n < 0 || isNaN(n))
throw TypeError('n must be a positive number');
this._maxListeners = n;
return this;
};
EventEmitter.prototype.emit = function(type) {
var er, handler, len, args, i, listeners;
if (!this._events)
this._events = {};
// If there is no 'error' event listener then throw.
if (type === 'error') {
if (!this._events.error ||
(isObject(this._events.error) && !this._events.error.length)) {
er = arguments[1];
if (er instanceof Error) {
throw er; // Unhandled 'error' event
}
throw TypeError('Uncaught, unspecified "error" event.');
}
}
handler = this._events[type];
if (isUndefined(handler))
return false;
if (isFunction(handler)) {
switch (arguments.length) {
// fast cases
case 1:
handler.call(this);
break;
case 2:
handler.call(this, arguments[1]);
break;
case 3:
handler.call(this, arguments[1], arguments[2]);
break;
// slower
default:
len = arguments.length;
args = new Array(len - 1);
for (i = 1; i < len; i++)
args[i - 1] = arguments[i];
handler.apply(this, args);
}
} else if (isObject(handler)) {
len = arguments.length;
args = new Array(len - 1);
for (i = 1; i < len; i++)
args[i - 1] = arguments[i];
listeners = handler.slice();
len = listeners.length;
for (i = 0; i < len; i++)
listeners[i].apply(this, args);
}
return true;
};
EventEmitter.prototype.addListener = function(type, listener) {
var m;
if (!isFunction(listener))
throw TypeError('listener must be a function');
if (!this._events)
this._events = {};
// To avoid recursion in the case that type === "newListener"! Before
// adding it to the listeners, first emit "newListener".
if (this._events.newListener)
this.emit('newListener', type,
isFunction(listener.listener) ?
listener.listener : listener);
if (!this._events[type])
// Optimize the case of one listener. Don't need the extra array object.
this._events[type] = listener;
else if (isObject(this._events[type]))
// If we've already got an array, just append.
this._events[type].push(listener);
else
// Adding the second element, need to change to array.
this._events[type] = [this._events[type], listener];
// Check for listener leak
if (isObject(this._events[type]) && !this._events[type].warned) {
var m;
if (!isUndefined(this._maxListeners)) {
m = this._maxListeners;
} else {
m = EventEmitter.defaultMaxListeners;
}
if (m && m > 0 && this._events[type].length > m) {
this._events[type].warned = true;
console.error('(node) warning: possible EventEmitter memory ' +
'leak detected. %d listeners added. ' +
'Use emitter.setMaxListeners() to increase limit.',
this._events[type].length);
if (typeof console.trace === 'function') {
// not supported in IE 10
console.trace();
}
}
}
return this;
};
EventEmitter.prototype.on = EventEmitter.prototype.addListener;
EventEmitter.prototype.once = function(type, listener) {
if (!isFunction(listener))
throw TypeError('listener must be a function');
var fired = false;
function g() {
this.removeListener(type, g);
if (!fired) {
fired = true;
listener.apply(this, arguments);
}
}
g.listener = listener;
this.on(type, g);
return this;
};
// emits a 'removeListener' event iff the listener was removed
EventEmitter.prototype.removeListener = function(type, listener) {
var list, position, length, i;
if (!isFunction(listener))
throw TypeError('listener must be a function');
if (!this._events || !this._events[type])
return this;
list = this._events[type];
length = list.length;
position = -1;
if (list === listener ||
(isFunction(list.listener) && list.listener === listener)) {
delete this._events[type];
if (this._events.removeListener)
this.emit('removeListener', type, listener);
} else if (isObject(list)) {
for (i = length; i-- > 0;) {
if (list[i] === listener ||
(list[i].listener && list[i].listener === listener)) {
position = i;
break;
}
}
if (position < 0)
return this;
if (list.length === 1) {
list.length = 0;
delete this._events[type];
} else {
list.splice(position, 1);
}
if (this._events.removeListener)
this.emit('removeListener', type, listener);
}
return this;
};
EventEmitter.prototype.removeAllListeners = function(type) {
var key, listeners;
if (!this._events)
return this;
// not listening for removeListener, no need to emit
if (!this._events.removeListener) {
if (arguments.length === 0)
this._events = {};
else if (this._events[type])
delete this._events[type];
return this;
}
// emit removeListener for all listeners on all events
if (arguments.length === 0) {
for (key in this._events) {
if (key === 'removeListener') continue;
this.removeAllListeners(key);
}
this.removeAllListeners('removeListener');
this._events = {};
return this;
}
listeners = this._events[type];
if (isFunction(listeners)) {
this.removeListener(type, listeners);
} else {
// LIFO order
while (listeners.length)
this.removeListener(type, listeners[listeners.length - 1]);
}
delete this._events[type];
return this;
};
EventEmitter.prototype.listeners = function(type) {
var ret;
if (!this._events || !this._events[type])
ret = [];
else if (isFunction(this._events[type]))
ret = [this._events[type]];
else
ret = this._events[type].slice();
return ret;
};
EventEmitter.listenerCount = function(emitter, type) {
var ret;
if (!emitter._events || !emitter._events[type])
ret = 0;
else if (isFunction(emitter._events[type]))
ret = 1;
else
ret = emitter._events[type].length;
return ret;
};
function isFunction(arg) {
return typeof arg === 'function';
}
function isNumber(arg) {
return typeof arg === 'number';
}
function isObject(arg) {
return typeof arg === 'object' && arg !== null;
}
function isUndefined(arg) {
return arg === void 0;
}
},{}],3:[function(require,module,exports){
// shim for using process in browser
var process = module.exports = {};
process.nextTick = (function () {
var canSetImmediate = typeof window !== 'undefined'
&& window.setImmediate;
var canPost = typeof window !== 'undefined'
&& window.postMessage && window.addEventListener
;
if (canSetImmediate) {
return function (f) { return window.setImmediate(f) };
}
if (canPost) {
var queue = [];
window.addEventListener('message', function (ev) {
var source = ev.source;
if ((source === window || source === null) && ev.data === 'process-tick') {
ev.stopPropagation();
if (queue.length > 0) {
var fn = queue.shift();
fn();
}
}
}, true);
return function nextTick(fn) {
queue.push(fn);
window.postMessage('process-tick', '*');
};
}
return function nextTick(fn) {
setTimeout(fn, 0);
};
})();
process.title = 'browser';
process.browser = true;
process.env = {};
process.argv = [];
process.binding = function (name) {
throw new Error('process.binding is not supported');
}
// TODO(shtylman)
process.cwd = function () { return '/' };
process.chdir = function (dir) {
throw new Error('process.chdir is not supported');
};
},{}],4:[function(require,module,exports){
var base64 = require('base64-js')
var ieee754 = require('ieee754')
exports.Buffer = Buffer
exports.SlowBuffer = Buffer
exports.INSPECT_MAX_BYTES = 50
Buffer.poolSize = 8192
/**
* If `Buffer._useTypedArrays`:
* === true Use Uint8Array implementation (fastest)
* === false Use Object implementation (compatible down to IE6)
*/
Buffer._useTypedArrays = (function () {
// Detect if browser supports Typed Arrays. Supported browsers are IE 10+,
// Firefox 4+, Chrome 7+, Safari 5.1+, Opera 11.6+, iOS 4.2+.
if (typeof Uint8Array === 'undefined' || typeof ArrayBuffer === 'undefined')
return false
// Does the browser support adding properties to `Uint8Array` instances? If
// not, then that's the same as no `Uint8Array` support. We need to be able to
// add all the node Buffer API methods.
// Relevant Firefox bug: https://bugzilla.mozilla.org/show_bug.cgi?id=695438
try {
var arr = new Uint8Array(0)
arr.foo = function () { return 42 }
return 42 === arr.foo() &&
typeof arr.subarray === 'function' // Chrome 9-10 lack `subarray`
} catch (e) {
return false
}
})()
/**
* Class: Buffer
* =============
*
* The Buffer constructor returns instances of `Uint8Array` that are augmented
* with function properties for all the node `Buffer` API functions. We use
* `Uint8Array` so that square bracket notation works as expected -- it returns
* a single octet.
*
* By augmenting the instances, we can avoid modifying the `Uint8Array`
* prototype.
*/
function Buffer (subject, encoding, noZero) {
if (!(this instanceof Buffer))
return new Buffer(subject, encoding, noZero)
var type = typeof subject
// Workaround: node's base64 implementation allows for non-padded strings
// while base64-js does not.
if (encoding === 'base64' && type === 'string') {
subject = stringtrim(subject)
while (subject.length % 4 !== 0) {
subject = subject + '='
}
}
// Find the length
var length
if (type === 'number')
length = coerce(subject)
else if (type === 'string')
length = Buffer.byteLength(subject, encoding)
else if (type === 'object')
length = coerce(subject.length) // Assume object is an array
else
throw new Error('First argument needs to be a number, array or string.')
var buf
if (Buffer._useTypedArrays) {
// Preferred: Return an augmented `Uint8Array` instance for best performance
buf = augment(new Uint8Array(length))
} else {
// Fallback: Return THIS instance of Buffer (created by `new`)
buf = this
buf.length = length
buf._isBuffer = true
}
var i
if (Buffer._useTypedArrays && typeof Uint8Array === 'function' &&
subject instanceof Uint8Array) {
// Speed optimization -- use set if we're copying from a Uint8Array
buf._set(subject)
} else if (isArrayish(subject)) {
// Treat array-ish objects as a byte array
for (i = 0; i < length; i++) {
if (Buffer.isBuffer(subject))
buf[i] = subject.readUInt8(i)
else
buf[i] = subject[i]
}
} else if (type === 'string') {
buf.write(subject, 0, encoding)
} else if (type === 'number' && !Buffer._useTypedArrays && !noZero) {
for (i = 0; i < length; i++) {
buf[i] = 0
}
}
return buf
}
// STATIC METHODS
// ==============
Buffer.isEncoding = function (encoding) {
switch (String(encoding).toLowerCase()) {
case 'hex':
case 'utf8':
case 'utf-8':
case 'ascii':
case 'binary':
case 'base64':
case 'raw':
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return true
default:
return false
}
}
Buffer.isBuffer = function (b) {
return !!(b !== null && b !== undefined && b._isBuffer)
}
Buffer.byteLength = function (str, encoding) {
var ret
str = str + ''
switch (encoding || 'utf8') {
case 'hex':
ret = str.length / 2
break
case 'utf8':
case 'utf-8':
ret = utf8ToBytes(str).length
break
case 'ascii':
case 'binary':
case 'raw':
ret = str.length
break
case 'base64':
ret = base64ToBytes(str).length
break
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
ret = str.length * 2
break
default:
throw new Error('Unknown encoding')
}
return ret
}
Buffer.concat = function (list, totalLength) {
assert(isArray(list), 'Usage: Buffer.concat(list, [totalLength])\n' +
'list should be an Array.')
if (list.length === 0) {
return new Buffer(0)
} else if (list.length === 1) {
return list[0]
}
var i
if (typeof totalLength !== 'number') {
totalLength = 0
for (i = 0; i < list.length; i++) {
totalLength += list[i].length
}
}
var buf = new Buffer(totalLength)
var pos = 0
for (i = 0; i < list.length; i++) {
var item = list[i]
item.copy(buf, pos)
pos += item.length
}
return buf
}
// BUFFER INSTANCE METHODS
// =======================
function _hexWrite (buf, string, offset, length) {
offset = Number(offset) || 0
var remaining = buf.length - offset
if (!length) {
length = remaining
} else {
length = Number(length)
if (length > remaining) {
length = remaining
}
}
// must be an even number of digits
var strLen = string.length
assert(strLen % 2 === 0, 'Invalid hex string')
if (length > strLen / 2) {
length = strLen / 2
}
for (var i = 0; i < length; i++) {
var byte = parseInt(string.substr(i * 2, 2), 16)
assert(!isNaN(byte), 'Invalid hex string')
buf[offset + i] = byte
}
Buffer._charsWritten = i * 2
return i
}
function _utf8Write (buf, string, offset, length) {
var charsWritten = Buffer._charsWritten =
blitBuffer(utf8ToBytes(string), buf, offset, length)
return charsWritten
}
function _asciiWrite (buf, string, offset, length) {
var charsWritten = Buffer._charsWritten =
blitBuffer(asciiToBytes(string), buf, offset, length)
return charsWritten
}
function _binaryWrite (buf, string, offset, length) {
return _asciiWrite(buf, string, offset, length)
}
function _base64Write (buf, string, offset, length) {
var charsWritten = Buffer._charsWritten =
blitBuffer(base64ToBytes(string), buf, offset, length)
return charsWritten
}
Buffer.prototype.write = function (string, offset, length, encoding) {
// Support both (string, offset, length, encoding)
// and the legacy (string, encoding, offset, length)
if (isFinite(offset)) {
if (!isFinite(length)) {
encoding = length
length = undefined
}
} else { // legacy
var swap = encoding
encoding = offset
offset = length
length = swap
}
offset = Number(offset) || 0
var remaining = this.length - offset
if (!length) {
length = remaining
} else {
length = Number(length)
if (length > remaining) {
length = remaining
}
}
encoding = String(encoding || 'utf8').toLowerCase()
switch (encoding) {
case 'hex':
return _hexWrite(this, string, offset, length)
case 'utf8':
case 'utf-8':
case 'ucs2': // TODO: No support for ucs2 or utf16le encodings yet
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return _utf8Write(this, string, offset, length)
case 'ascii':
return _asciiWrite(this, string, offset, length)
case 'binary':
return _binaryWrite(this, string, offset, length)
case 'base64':
return _base64Write(this, string, offset, length)
default:
throw new Error('Unknown encoding')
}
}
Buffer.prototype.toString = function (encoding, start, end) {
var self = this
encoding = String(encoding || 'utf8').toLowerCase()
start = Number(start) || 0
end = (end !== undefined)
? Number(end)
: end = self.length
// Fastpath empty strings
if (end === start)
return ''
switch (encoding) {
case 'hex':
return _hexSlice(self, start, end)
case 'utf8':
case 'utf-8':
case 'ucs2': // TODO: No support for ucs2 or utf16le encodings yet
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return _utf8Slice(self, start, end)
case 'ascii':
return _asciiSlice(self, start, end)
case 'binary':
return _binarySlice(self, start, end)
case 'base64':
return _base64Slice(self, start, end)
default:
throw new Error('Unknown encoding')
}
}
Buffer.prototype.toJSON = function () {
return {
type: 'Buffer',
data: Array.prototype.slice.call(this._arr || this, 0)
}
}
// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)
Buffer.prototype.copy = function (target, target_start, start, end) {
var source = this
if (!start) start = 0
if (!end && end !== 0) end = this.length
if (!target_start) target_start = 0
// Copy 0 bytes; we're done
if (end === start) return
if (target.length === 0 || source.length === 0) return
// Fatal error conditions
assert(end >= start, 'sourceEnd < sourceStart')
assert(target_start >= 0 && target_start < target.length,
'targetStart out of bounds')
assert(start >= 0 && start < source.length, 'sourceStart out of bounds')
assert(end >= 0 && end <= source.length, 'sourceEnd out of bounds')
// Are we oob?
if (end > this.length)
end = this.length
if (target.length - target_start < end - start)
end = target.length - target_start + start
// copy!
for (var i = 0; i < end - start; i++)
target[i + target_start] = this[i + start]
}
function _base64Slice (buf, start, end) {
if (start === 0 && end === buf.length) {
return base64.fromByteArray(buf)
} else {
return base64.fromByteArray(buf.slice(start, end))
}
}
function _utf8Slice (buf, start, end) {
var res = ''
var tmp = ''
end = Math.min(buf.length, end)
for (var i = start; i < end; i++) {
if (buf[i] <= 0x7F) {
res += decodeUtf8Char(tmp) + String.fromCharCode(buf[i])
tmp = ''
} else {
tmp += '%' + buf[i].toString(16)
}
}
return res + decodeUtf8Char(tmp)
}
function _asciiSlice (buf, start, end) {
var ret = ''
end = Math.min(buf.length, end)
for (var i = start; i < end; i++)
ret += String.fromCharCode(buf[i])
return ret
}
function _binarySlice (buf, start, end) {
return _asciiSlice(buf, start, end)
}
function _hexSlice (buf, start, end) {
var len = buf.length
if (!start || start < 0) start = 0
if (!end || end < 0 || end > len) end = len
var out = ''
for (var i = start; i < end; i++) {
out += toHex(buf[i])
}
return out
}
// http://nodejs.org/api/buffer.html#buffer_buf_slice_start_end
Buffer.prototype.slice = function (start, end) {
var len = this.length
start = clamp(start, len, 0)
end = clamp(end, len, len)
if (Buffer._useTypedArrays) {
return augment(this.subarray(start, end))
} else {
var sliceLen = end - start
var newBuf = new Buffer(sliceLen, undefined, true)
for (var i = 0; i < sliceLen; i++) {
newBuf[i] = this[i + start]
}
return newBuf
}
}
// `get` will be removed in Node 0.13+
Buffer.prototype.get = function (offset) {
console.log('.get() is deprecated. Access using array indexes instead.')
return this.readUInt8(offset)
}
// `set` will be removed in Node 0.13+
Buffer.prototype.set = function (v, offset) {
console.log('.set() is deprecated. Access using array indexes instead.')
return this.writeUInt8(v, offset)
}
Buffer.prototype.readUInt8 = function (offset, noAssert) {
if (!noAssert) {
assert(offset !== undefined && offset !== null, 'missing offset')
assert(offset < this.length, 'Trying to read beyond buffer length')
}
if (offset >= this.length)
return
return this[offset]
}
function _readUInt16 (buf, offset, littleEndian, noAssert) {
if (!noAssert) {
assert(typeof littleEndian === 'boolean', 'missing or invalid endian')
assert(offset !== undefined && offset !== null, 'missing offset')
assert(offset + 1 < buf.length, 'Trying to read beyond buffer length')
}
var len = buf.length
if (offset >= len)
return
var val
if (littleEndian) {
val = buf[offset]
if (offset + 1 < len)
val |= buf[offset + 1] << 8
} else {
val = buf[offset] << 8
if (offset + 1 < len)
val |= buf[offset + 1]
}
return val
}
Buffer.prototype.readUInt16LE = function (offset, noAssert) {
return _readUInt16(this, offset, true, noAssert)
}
Buffer.prototype.readUInt16BE = function (offset, noAssert) {
return _readUInt16(this, offset, false, noAssert)
}
function _readUInt32 (buf, offset, littleEndian, noAssert) {
if (!noAssert) {
assert(typeof littleEndian === 'boolean', 'missing or invalid endian')
assert(offset !== undefined && offset !== null, 'missing offset')
assert(offset + 3 < buf.length, 'Trying to read beyond buffer length')
}
var len = buf.length
if (offset >= len)
return
var val
if (littleEndian) {
if (offset + 2 < len)
val = buf[offset + 2] << 16
if (offset + 1 < len)
val |= buf[offset + 1] << 8
val |= buf[offset]
if (offset + 3 < len)
val = val + (buf[offset + 3] << 24 >>> 0)
} else {
if (offset + 1 < len)
val = buf[offset + 1] << 16
if (offset + 2 < len)
val |= buf[offset + 2] << 8
if (offset + 3 < len)
val |= buf[offset + 3]
val = val + (buf[offset] << 24 >>> 0)
}
return val
}
Buffer.prototype.readUInt32LE = function (offset, noAssert) {
return _readUInt32(this, offset, true, noAssert)
}
Buffer.prototype.readUInt32BE = function (offset, noAssert) {
return _readUInt32(this, offset, false, noAssert)
}
Buffer.prototype.readInt8 = function (offset, noAssert) {
if (!noAssert) {
assert(offset !== undefined && offset !== null,
'missing offset')
assert(offset < this.length, 'Trying to read beyond buffer length')
}
if (offset >= this.length)
return
var neg = this[offset] & 0x80
if (neg)
return (0xff - this[offset] + 1) * -1
else
return this[offset]
}
function _readInt16 (buf, offset, littleEndian, noAssert) {
if (!noAssert) {
assert(typeof littleEndian === 'boolean', 'missing or invalid endian')
assert(offset !== undefined && offset !== null, 'missing offset')
assert(offset + 1 < buf.length, 'Trying to read beyond buffer length')
}
var len = buf.length
if (offset >= len)
return
var val = _readUInt16(buf, offset, littleEndian, true)
var neg = val & 0x8000
if (neg)
return (0xffff - val + 1) * -1
else
return val
}
Buffer.prototype.readInt16LE = function (offset, noAssert) {
return _readInt16(this, offset, true, noAssert)
}
Buffer.prototype.readInt16BE = function (offset, noAssert) {
return _readInt16(this, offset, false, noAssert)
}
function _readInt32 (buf, offset, littleEndian, noAssert) {
if (!noAssert) {
assert(typeof littleEndian === 'boolean', 'missing or invalid endian')
assert(offset !== undefined && offset !== null, 'missing offset')
assert(offset + 3 < buf.length, 'Trying to read beyond buffer length')
}
var len = buf.length
if (offset >= len)
return
var val = _readUInt32(buf, offset, littleEndian, true)
var neg = val & 0x80000000
if (neg)
return (0xffffffff - val + 1) * -1
else
return val
}
Buffer.prototype.readInt32LE = function (offset, noAssert) {
return _readInt32(this, offset, true, noAssert)
}
Buffer.prototype.readInt32BE = function (offset, noAssert) {
return _readInt32(this, offset, false, noAssert)
}
function _readFloat (buf, offset, littleEndian, noAssert) {
if (!noAssert) {
assert(typeof littleEndian === 'boolean', 'missing or invalid endian')
assert(offset + 3 < buf.length, 'Trying to read beyond buffer length')
}
return ieee754.read(buf, offset, littleEndian, 23, 4)
}
Buffer.prototype.readFloatLE = function (offset, noAssert) {
return _readFloat(this, offset, true, noAssert)
}
Buffer.prototype.readFloatBE = function (offset, noAssert) {
return _readFloat(this, offset, false, noAssert)
}
function _readDouble (buf, offset, littleEndian, noAssert) {
if (!noAssert) {
assert(typeof littleEndian === 'boolean', 'missing or invalid endian')
assert(offset + 7 < buf.length, 'Trying to read beyond buffer length')
}
return ieee754.read(buf, offset, littleEndian, 52, 8)
}
Buffer.prototype.readDoubleLE = function (offset, noAssert) {
return _readDouble(this, offset, true, noAssert)
}
Buffer.prototype.readDoubleBE = function (offset, noAssert) {
return _readDouble(this, offset, false, noAssert)
}
Buffer.prototype.writeUInt8 = function (value, offset, noAssert) {
if (!noAssert) {
assert(value !== undefined && value !== null, 'missing value')
assert(offset !== undefined && offset !== null, 'missing offset')
assert(offset < this.length, 'trying to write beyond buffer length')
verifuint(value, 0xff)
}
if (offset >= this.length) return
this[offset] = value
}
function _writeUInt16 (buf, value, offset, littleEndian, noAssert) {
if (!noAssert) {
assert(value !== undefined && value !== null, 'missing value')
assert(typeof littleEndian === 'boolean', 'missing or invalid endian')
assert(offset !== undefined && offset !== null, 'missing offset')
assert(offset + 1 < buf.length, 'trying to write beyond buffer length')
verifuint(value, 0xffff)
}
var len = buf.length
if (offset >= len)
return
for (var i = 0, j = Math.min(len - offset, 2); i < j; i++) {
buf[offset + i] =
(value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>>
(littleEndian ? i : 1 - i) * 8
}
}
Buffer.prototype.writeUInt16LE = function (value, offset, noAssert) {
_writeUInt16(this, value, offset, true, noAssert)
}
Buffer.prototype.writeUInt16BE = function (value, offset, noAssert) {
_writeUInt16(this, value, offset, false, noAssert)
}
function _writeUInt32 (buf, value, offset, littleEndian, noAssert) {
if (!noAssert) {
assert(value !== undefined && value !== null, 'missing value')
assert(typeof littleEndian === 'boolean', 'missing or invalid endian')
assert(offset !== undefined && offset !== null, 'missing offset')
assert(offset + 3 < buf.length, 'trying to write beyond buffer length')
verifuint(value, 0xffffffff)
}
var len = buf.length
if (offset >= len)
return
for (var i = 0, j = Math.min(len - offset, 4); i < j; i++) {
buf[offset + i] =
(value >>> (littleEndian ? i : 3 - i) * 8) & 0xff
}
}
Buffer.prototype.writeUInt32LE = function (value, offset, noAssert) {
_writeUInt32(this, value, offset, true, noAssert)
}
Buffer.prototype.writeUInt32BE = function (value, offset, noAssert) {
_writeUInt32(this, value, offset, false, noAssert)
}
Buffer.prototype.writeInt8 = function (value, offset, noAssert) {
if (!noAssert) {
assert(value !== undefined && value !== null, 'missing value')
assert(offset !== undefined && offset !== null, 'missing offset')
assert(offset < this.length, 'Trying to write beyond buffer length')
verifsint(value, 0x7f, -0x80)
}
if (offset >= this.length)
return
if (value >= 0)
this.writeUInt8(value, offset, noAssert)
else
this.writeUInt8(0xff + value + 1, offset, noAssert)
}
function _writeInt16 (buf, value, offset, littleEndian, noAssert) {
if (!noAssert) {
assert(value !== undefined && value !== null, 'missing value')
assert(typeof littleEndian === 'boolean', 'missing or invalid endian')
assert(offset !== undefined && offset !== null, 'missing offset')
assert(offset + 1 < buf.length, 'Trying to write beyond buffer length')
verifsint(value, 0x7fff, -0x8000)
}
var len = buf.length
if (offset >= len)
return
if (value >= 0)
_writeUInt16(buf, value, offset, littleEndian, noAssert)
else
_writeUInt16(buf, 0xffff + value + 1, offset, littleEndian, noAssert)
}
Buffer.prototype.writeInt16LE = function (value, offset, noAssert) {
_writeInt16(this, value, offset, true, noAssert)
}
Buffer.prototype.writeInt16BE = function (value, offset, noAssert) {
_writeInt16(this, value, offset, false, noAssert)
}
function _writeInt32 (buf, value, offset, littleEndian, noAssert) {
if (!noAssert) {
assert(value !== undefined && value !== null, 'missing value')
assert(typeof littleEndian === 'boolean', 'missing or invalid endian')
assert(offset !== undefined && offset !== null, 'missing offset')
assert(offset + 3 < buf.length, 'Trying to write beyond buffer length')
verifsint(value, 0x7fffffff, -0x80000000)
}
var len = buf.length
if (offset >= len)
return
if (value >= 0)
_writeUInt32(buf, value, offset, littleEndian, noAssert)
else
_writeUInt32(buf, 0xffffffff + value + 1, offset, littleEndian, noAssert)
}
Buffer.prototype.writeInt32LE = function (value, offset, noAssert) {
_writeInt32(this, value, offset, true, noAssert)
}
Buffer.prototype.writeInt32BE = function (value, offset, noAssert) {
_writeInt32(this, value, offset, false, noAssert)
}
function _writeFloat (buf, value, offset, littleEndian, noAssert) {
if (!noAssert) {
assert(value !== undefined && value !== null, 'missing value')
assert(typeof littleEndian === 'boolean', 'missing or invalid endian')
assert(offset !== undefined && offset !== null, 'missing offset')
assert(offset + 3 < buf.length, 'Trying to write beyond buffer length')
verifIEEE754(value, 3.4028234663852886e+38, -3.4028234663852886e+38)
}
var len = buf.length
if (offset >= len)
return
ieee754.write(buf, value, offset, littleEndian, 23, 4)
}
Buffer.prototype.writeFloatLE = function (value, offset, noAssert) {
_writeFloat(this, value, offset, true, noAssert)
}
Buffer.prototype.writeFloatBE = function (value, offset, noAssert) {
_writeFloat(this, value, offset, false, noAssert)
}
function _writeDouble (buf, value, offset, littleEndian, noAssert) {
if (!noAssert) {
assert(value !== undefined && value !== null, 'missing value')
assert(typeof littleEndian === 'boolean', 'missing or invalid endian')
assert(offset !== undefined && offset !== null, 'missing offset')
assert(offset + 7 < buf.length,
'Trying to write beyond buffer length')
verifIEEE754(value, 1.7976931348623157E+308, -1.7976931348623157E+308)
}
var len = buf.length
if (offset >= len)
return
ieee754.write(buf, value, offset, littleEndian, 52, 8)
}
Buffer.prototype.writeDoubleLE = function (value, offset, noAssert) {
_writeDouble(this, value, offset, true, noAssert)
}
Buffer.prototype.writeDoubleBE = function (value, offset, noAssert) {
_writeDouble(this, value, offset, false, noAssert)
}
// fill(value, start=0, end=buffer.length)
Buffer.prototype.fill = function (value, start, end) {
if (!value) value = 0
if (!start) start = 0
if (!end) end = this.length
if (typeof value === 'string') {
value = value.charCodeAt(0)
}
assert(typeof value === 'number' && !isNaN(value), 'value is not a number')
assert(end >= start, 'end < start')
// Fill 0 bytes; we're done
if (end === start) return
if (this.length === 0) return
assert(start >= 0 && start < this.length, 'start out of bounds')
assert(end >= 0 && end <= this.length, 'end out of bounds')
for (var i = start; i < end; i++) {
this[i] = value
}
}
Buffer.prototype.inspect = function () {
var out = []
var len = this.length
for (var i = 0; i < len; i++) {
out[i] = toHex(this[i])
if (i === exports.INSPECT_MAX_BYTES) {
out[i + 1] = '...'
break
}
}
return '<Buffer ' + out.join(' ') + '>'
}
/**
* Creates a new `ArrayBuffer` with the *copied* memory of the buffer instance.
* Added in Node 0.12. Only available in browsers that support ArrayBuffer.
*/
Buffer.prototype.toArrayBuffer = function () {
if (typeof Uint8Array === 'function') {
if (Buffer._useTypedArrays) {
return (new Buffer(this)).buffer
} else {
var buf = new Uint8Array(this.length)
for (var i = 0, len = buf.length; i < len; i += 1)
buf[i] = this[i]
return buf.buffer
}
} else {
throw new Error('Buffer.toArrayBuffer not supported in this browser')
}
}
// HELPER FUNCTIONS
// ================
function stringtrim (str) {
if (str.trim) return str.trim()
return str.replace(/^\s+|\s+$/g, '')
}
var BP = Buffer.prototype
/**
* Augment the Uint8Array *instance* (not the class!) with Buffer methods
*/
function augment (arr) {
arr._isBuffer = true
// save reference to original Uint8Array get/set methods before overwriting
arr._get = arr.get
arr._set = arr.set
// deprecated, will be removed in node 0.13+
arr.get = BP.get
arr.set = BP.set
arr.write = BP.write
arr.toString = BP.toString
arr.toLocaleString = BP.toString
arr.toJSON = BP.toJSON
arr.copy = BP.copy
arr.slice = BP.slice
arr.readUInt8 = BP.readUInt8
arr.readUInt16LE = BP.readUInt16LE
arr.readUInt16BE = BP.readUInt16BE
arr.readUInt32LE = BP.readUInt32LE
arr.readUInt32BE = BP.readUInt32BE
arr.readInt8 = BP.readInt8
arr.readInt16LE = BP.readInt16LE
arr.readInt16BE = BP.readInt16BE
arr.readInt32LE = BP.readInt32LE
arr.readInt32BE = BP.readInt32BE
arr.readFloatLE = BP.readFloatLE
arr.readFloatBE = BP.readFloatBE
arr.readDoubleLE = BP.readDoubleLE
arr.readDoubleBE = BP.readDoubleBE
arr.writeUInt8 = BP.writeUInt8
arr.writeUInt16LE = BP.writeUInt16LE
arr.writeUInt16BE = BP.writeUInt16BE
arr.writeUInt32LE = BP.writeUInt32LE
arr.writeUInt32BE = BP.writeUInt32BE
arr.writeInt8 = BP.writeInt8
arr.writeInt16LE = BP.writeInt16LE
arr.writeInt16BE = BP.writeInt16BE
arr.writeInt32LE = BP.writeInt32LE
arr.writeInt32BE = BP.writeInt32BE
arr.writeFloatLE = BP.writeFloatLE
arr.writeFloatBE = BP.writeFloatBE
arr.writeDoubleLE = BP.writeDoubleLE
arr.writeDoubleBE = BP.writeDoubleBE
arr.fill = BP.fill
arr.inspect = BP.inspect
arr.toArrayBuffer = BP.toArrayBuffer
return arr
}
// slice(start, end)
function clamp (index, len, defaultValue) {
if (typeof index !== 'number') return defaultValue
index = ~~index; // Coerce to integer.
if (index >= len) return len
if (index >= 0) return index
index += len
if (index >= 0) return index
return 0
}
function coerce (length) {
// Coerce length to a number (possibly NaN), round up
// in case it's fractional (e.g. 123.456) then do a
// double negate to coerce a NaN to 0. Easy, right?
length = ~~Math.ceil(+length)
return length < 0 ? 0 : length
}
function isArray (subject) {
return (Array.isArray || function (subject) {
return Object.prototype.toString.call(subject) === '[object Array]'
})(subject)
}
function isArrayish (subject) {
return isArray(subject) || Buffer.isBuffer(subject) ||
subject && typeof subject === 'object' &&
typeof subject.length === 'number'
}
function toHex (n) {
if (n < 16) return '0' + n.toString(16)
return n.toString(16)
}
function utf8ToBytes (str) {
var byteArray = []
for (var i = 0; i < str.length; i++) {
var b = str.charCodeAt(i)
if (b <= 0x7F)
byteArray.push(str.charCodeAt(i))
else {
var start = i
if (b >= 0xD800 && b <= 0xDFFF) i++
var h = encodeURIComponent(str.slice(start, i+1)).substr(1).split('%')
for (var j = 0; j < h.length; j++)
byteArray.push(parseInt(h[j], 16))
}
}
return byteArray
}
function asciiToBytes (str) {
var byteArray = []
for (var i = 0; i < str.length; i++) {
// Node's code seems to be doing this and not & 0x7F..
byteArray.push(str.charCodeAt(i) & 0xFF)
}
return byteArray
}
function base64ToBytes (str) {
return base64.toByteArray(str)
}
function blitBuffer (src, dst, offset, length) {
var pos
for (var i = 0; i < length; i++) {
if ((i + offset >= dst.length) || (i >= src.length))
break
dst[i + offset] = src[i]
}
return i
}
function decodeUtf8Char (str) {
try {
return decodeURIComponent(str)
} catch (err) {
return String.fromCharCode(0xFFFD) // UTF 8 invalid char
}
}
/*
* We have to make sure that the value is a valid integer. This means that it
* is non-negative. It has no fractional component and that it does not
* exceed the maximum allowed value.
*/
function verifuint (value, max) {
assert(typeof value == 'number', 'cannot write a non-number as a number')
assert(value >= 0,
'specified a negative value for writing an unsigned value')
assert(value <= max, 'value is larger than maximum value for type')
assert(Math.floor(value) === value, 'value has a fractional component')
}
function verifsint(value, max, min) {
assert(typeof value == 'number', 'cannot write a non-number as a number')
assert(value <= max, 'value larger than maximum allowed value')
assert(value >= min, 'value smaller than minimum allowed value')
assert(Math.floor(value) === value, 'value has a fractional component')
}
function verifIEEE754(value, max, min) {
assert(typeof value == 'number', 'cannot write a non-number as a number')
assert(value <= max, 'value larger than maximum allowed value')
assert(value >= min, 'value smaller than minimum allowed value')
}
function assert (test, message) {
if (!test) throw new Error(message || 'Failed assertion')
}
},{"base64-js":5,"ieee754":6}],5:[function(require,module,exports){
var lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
;(function (exports) {
'use strict';
var Arr = (typeof Uint8Array !== 'undefined')
? Uint8Array
: Array
var PLUS = '+'.charCodeAt(0)
var SLASH = '/'.charCodeAt(0)
var NUMBER = '0'.charCodeAt(0)
var LOWER = 'a'.charCodeAt(0)
var UPPER = 'A'.charCodeAt(0)
function decode (elt) {
var code = elt.charCodeAt(0)
if (code === PLUS)
return 62 // '+'
if (code === SLASH)
return 63 // '/'
if (code < NUMBER)
return -1 //no match
if (code < NUMBER + 10)
return code - NUMBER + 26 + 26
if (code < UPPER + 26)
return code - UPPER
if (code < LOWER + 26)
return code - LOWER + 26
}
function b64ToByteArray (b64) {
var i, j, l, tmp, placeHolders, arr
if (b64.length % 4 > 0) {
throw new Error('Invalid string. Length must be a multiple of 4')
}
// the number of equal signs (place holders)
// if there are two placeholders, than the two characters before it
// represent one byte
// if there is only one, then the three characters before it represent 2 bytes
// this is just a cheap hack to not do indexOf twice
var len = b64.length
placeHolders = '=' === b64.charAt(len - 2) ? 2 : '=' === b64.charAt(len - 1) ? 1 : 0
// base64 is 4/3 + up to two characters of the original data
arr = new Arr(b64.length * 3 / 4 - placeHolders)
// if there are placeholders, only get up to the last complete 4 chars
l = placeHolders > 0 ? b64.length - 4 : b64.length
var L = 0
function push (v) {
arr[L++] = v
}
for (i = 0, j = 0; i < l; i += 4, j += 3) {
tmp = (decode(b64.charAt(i)) << 18) | (decode(b64.charAt(i + 1)) << 12) | (decode(b64.charAt(i + 2)) << 6) | decode(b64.charAt(i + 3))
push((tmp & 0xFF0000) >> 16)
push((tmp & 0xFF00) >> 8)
push(tmp & 0xFF)
}
if (placeHolders === 2) {
tmp = (decode(b64.charAt(i)) << 2) | (decode(b64.charAt(i + 1)) >> 4)
push(tmp & 0xFF)
} else if (placeHolders === 1) {
tmp = (decode(b64.charAt(i)) << 10) | (decode(b64.charAt(i + 1)) << 4) | (decode(b64.charAt(i + 2)) >> 2)
push((tmp >> 8) & 0xFF)
push(tmp & 0xFF)
}
return arr
}
function uint8ToBase64 (uint8) {
var i,
extraBytes = uint8.length % 3, // if we have 1 byte left, pad 2 bytes
output = "",
temp, length
function encode (num) {
return lookup.charAt(num)
}
function tripletToBase64 (num) {
return encode(num >> 18 & 0x3F) + encode(num >> 12 & 0x3F) + encode(num >> 6 & 0x3F) + encode(num & 0x3F)
}
// go through the array every three bytes, we'll deal with trailing stuff later
for (i = 0, length = uint8.length - extraBytes; i < length; i += 3) {
temp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2])
output += tripletToBase64(temp)
}
// pad the end with zeros, but make sure to not forget the extra bytes
switch (extraBytes) {
case 1:
temp = uint8[uint8.length - 1]
output += encode(temp >> 2)
output += encode((temp << 4) & 0x3F)
output += '=='
break
case 2:
temp = (uint8[uint8.length - 2] << 8) + (uint8[uint8.length - 1])
output += encode(temp >> 10)
output += encode((temp >> 4) & 0x3F)
output += encode((temp << 2) & 0x3F)
output += '='
break
}
return output
}
exports.toByteArray = b64ToByteArray
exports.fromByteArray = uint8ToBase64
}(typeof exports === 'undefined' ? (this.base64js = {}) : exports))
},{}],6:[function(require,module,exports){
exports.read = function(buffer, offset, isLE, mLen, nBytes) {
var e, m,
eLen = nBytes * 8 - mLen - 1,
eMax = (1 << eLen) - 1,
eBias = eMax >> 1,
nBits = -7,
i = isLE ? (nBytes - 1) : 0,
d = isLE ? -1 : 1,
s = buffer[offset + i];
i += d;
e = s & ((1 << (-nBits)) - 1);
s >>= (-nBits);
nBits += eLen;
for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8);
m = e & ((1 << (-nBits)) - 1);
e >>= (-nBits);
nBits += mLen;
for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8);
if (e === 0) {
e = 1 - eBias;
} else if (e === eMax) {
return m ? NaN : ((s ? -1 : 1) * Infinity);
} else {
m = m + Math.pow(2, mLen);
e = e - eBias;
}
return (s ? -1 : 1) * m * Math.pow(2, e - mLen);
};
exports.write = function(buffer, value, offset, isLE, mLen, nBytes) {
var e, m, c,
eLen = nBytes * 8 - mLen - 1,
eMax = (1 << eLen) - 1,
eBias = eMax >> 1,
rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0),
i = isLE ? 0 : (nBytes - 1),
d = isLE ? 1 : -1,
s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0;
value = Math.abs(value);
if (isNaN(value) || value === Infinity) {
m = isNaN(value) ? 1 : 0;
e = eMax;
} else {
e = Math.floor(Math.log(value) / Math.LN2);
if (value * (c = Math.pow(2, -e)) < 1) {
e--;
c *= 2;
}
if (e + eBias >= 1) {
value += rt / c;
} else {
value += rt * Math.pow(2, 1 - eBias);
}
if (value * c >= 2) {
e++;
c /= 2;
}
if (e + eBias >= eMax) {
m = 0;
e = eMax;
} else if (e + eBias >= 1) {
m = (value * c - 1) * Math.pow(2, mLen);
e = e + eBias;
} else {
m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen);
e = 0;
}
}
for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8);
e = (e << mLen) | m;
eLen += mLen;
for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8);
buffer[offset + i - d] |= s * 128;
};
},{}],7:[function(require,module,exports){
(function (process){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// resolves . and .. elements in a path array with directory names there
// must be no slashes, empty elements, or device names (c:\) in the array
// (so also no leading and trailing slashes - it does not distinguish
// relative and absolute paths)
function normalizeArray(parts, allowAboveRoot) {
// if the path tries to go above the root, `up` ends up > 0
var up = 0;
for (var i = parts.length - 1; i >= 0; i--) {
var last = parts[i];
if (last === '.') {
parts.splice(i, 1);
} else if (last === '..') {
parts.splice(i, 1);
up++;
} else if (up) {
parts.splice(i, 1);
up--;
}
}
// if the path is allowed to go above the root, restore leading ..s
if (allowAboveRoot) {
for (; up--; up) {
parts.unshift('..');
}
}
return parts;
}
// Split a filename into [root, dir, basename, ext], unix version
// 'root' is just a slash, or nothing.
var splitPathRe =
/^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/;
var splitPath = function(filename) {
return splitPathRe.exec(filename).slice(1);
};
// path.resolve([from ...], to)
// posix version
exports.resolve = function() {
var resolvedPath = '',
resolvedAbsolute = false;
for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) {
var path = (i >= 0) ? arguments[i] : process.cwd();
// Skip empty and invalid entries
if (typeof path !== 'string') {
throw new TypeError('Arguments to path.resolve must be strings');
} else if (!path) {
continue;
}
resolvedPath = path + '/' + resolvedPath;
resolvedAbsolute = path.charAt(0) === '/';
}
// At this point the path should be resolved to a full absolute path, but
// handle relative paths to be safe (might happen when process.cwd() fails)
// Normalize the path
resolvedPath = normalizeArray(filter(resolvedPath.split('/'), function(p) {
return !!p;
}), !resolvedAbsolute).join('/');
return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.';
};
// path.normalize(path)
// posix version
exports.normalize = function(path) {
var isAbsolute = exports.isAbsolute(path),
trailingSlash = substr(path, -1) === '/';
// Normalize the path
path = normalizeArray(filter(path.split('/'), function(p) {
return !!p;
}), !isAbsolute).join('/');
if (!path && !isAbsolute) {
path = '.';
}
if (path && trailingSlash) {
path += '/';
}
return (isAbsolute ? '/' : '') + path;
};
// posix version
exports.isAbsolute = function(path) {
return path.charAt(0) === '/';
};
// posix version
exports.join = function() {
var paths = Array.prototype.slice.call(arguments, 0);
return exports.normalize(filter(paths, function(p, index) {
if (typeof p !== 'string') {
throw new TypeError('Arguments to path.join must be strings');
}
return p;
}).join('/'));
};
// path.relative(from, to)
// posix version
exports.relative = function(from, to) {
from = exports.resolve(from).substr(1);
to = exports.resolve(to).substr(1);
function trim(arr) {
var start = 0;
for (; start < arr.length; start++) {
if (arr[start] !== '') break;
}
var end = arr.length - 1;
for (; end >= 0; end--) {
if (arr[end] !== '') break;
}
if (start > end) return [];
return arr.slice(start, end - start + 1);
}
var fromParts = trim(from.split('/'));
var toParts = trim(to.split('/'));
var length = Math.min(fromParts.length, toParts.length);
var samePartsLength = length;
for (var i = 0; i < length; i++) {
if (fromParts[i] !== toParts[i]) {
samePartsLength = i;
break;
}
}
var outputParts = [];
for (var i = samePartsLength; i < fromParts.length; i++) {
outputParts.push('..');
}
outputParts = outputParts.concat(toParts.slice(samePartsLength));
return outputParts.join('/');
};
exports.sep = '/';
exports.delimiter = ':';
exports.dirname = function(path) {
var result = splitPath(path),
root = result[0],
dir = result[1];
if (!root && !dir) {
// No dirname whatsoever
return '.';
}
if (dir) {
// It has a dirname, strip trailing slash
dir = dir.substr(0, dir.length - 1);
}
return root + dir;
};
exports.basename = function(path, ext) {
var f = splitPath(path)[2];
// TODO: make this comparison case-insensitive on windows?
if (ext && f.substr(-1 * ext.length) === ext) {
f = f.substr(0, f.length - ext.length);
}
return f;
};
exports.extname = function(path) {
return splitPath(path)[3];
};
function filter (xs, f) {
if (xs.filter) return xs.filter(f);
var res = [];
for (var i = 0; i < xs.length; i++) {
if (f(xs[i], i, xs)) res.push(xs[i]);
}
return res;
}
// String.prototype.substr - negative index don't work in IE8
var substr = 'ab'.substr(-1) === 'b'
? function (str, start, len) { return str.substr(start, len) }
: function (str, start, len) {
if (start < 0) start = str.length + start;
return str.substr(start, len);
}
;
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"))
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3}],8:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a duplex stream is just a stream that is both readable and writable.
// Since JS doesn't have multiple prototypal inheritance, this class
// prototypally inherits from Readable, and then parasitically from
// Writable.
module.exports = Duplex;
var inherits = require('inherits');
var setImmediate = require('process/browser.js').nextTick;
var Readable = require('./readable.js');
var Writable = require('./writable.js');
inherits(Duplex, Readable);
Duplex.prototype.write = Writable.prototype.write;
Duplex.prototype.end = Writable.prototype.end;
Duplex.prototype._write = Writable.prototype._write;
function Duplex(options) {
if (!(this instanceof Duplex))
return new Duplex(options);
Readable.call(this, options);
Writable.call(this, options);
if (options && options.readable === false)
this.readable = false;
if (options && options.writable === false)
this.writable = false;
this.allowHalfOpen = true;
if (options && options.allowHalfOpen === false)
this.allowHalfOpen = false;
this.once('end', onend);
}
// the no-half-open enforcer
function onend() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
if (this.allowHalfOpen || this._writableState.ended)
return;
// no more data can be written.
// But allow more writes to happen in this tick.
var self = this;
setImmediate(function () {
self.end();
});
}
},{"./readable.js":12,"./writable.js":14,"inherits":16,"process/browser.js":10}],9:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
module.exports = Stream;
var EE = require('events').EventEmitter;
var inherits = require('inherits');
inherits(Stream, EE);
Stream.Readable = require('./readable.js');
Stream.Writable = require('./writable.js');
Stream.Duplex = require('./duplex.js');
Stream.Transform = require('./transform.js');
Stream.PassThrough = require('./passthrough.js');
// Backwards-compat with node 0.4.x
Stream.Stream = Stream;
// old-style streams. Note that the pipe method (the only relevant
// part of this class) is overridden in the Readable class.
function Stream() {
EE.call(this);
}
Stream.prototype.pipe = function(dest, options) {
var source = this;
function ondata(chunk) {
if (dest.writable) {
if (false === dest.write(chunk) && source.pause) {
source.pause();
}
}
}
source.on('data', ondata);
function ondrain() {
if (source.readable && source.resume) {
source.resume();
}
}
dest.on('drain', ondrain);
// If the 'end' option is not supplied, dest.end() will be called when
// source gets the 'end' or 'close' events. Only dest.end() once.
if (!dest._isStdio && (!options || options.end !== false)) {
source.on('end', onend);
source.on('close', onclose);
}
var didOnEnd = false;
function onend() {
if (didOnEnd) return;
didOnEnd = true;
dest.end();
}
function onclose() {
if (didOnEnd) return;
didOnEnd = true;
if (typeof dest.destroy === 'function') dest.destroy();
}
// don't leave dangling pipes when there are errors.
function onerror(er) {
cleanup();
if (EE.listenerCount(this, 'error') === 0) {
throw er; // Unhandled stream error in pipe.
}
}
source.on('error', onerror);
dest.on('error', onerror);
// remove all the event listeners that were added.
function cleanup() {
source.removeListener('data', ondata);
dest.removeListener('drain', ondrain);
source.removeListener('end', onend);
source.removeListener('close', onclose);
source.removeListener('error', onerror);
dest.removeListener('error', onerror);
source.removeListener('end', cleanup);
source.removeListener('close', cleanup);
dest.removeListener('close', cleanup);
}
source.on('end', cleanup);
source.on('close', cleanup);
dest.on('close', cleanup);
dest.emit('pipe', source);
// Allow for unix-like usage: A.pipe(B).pipe(C)
return dest;
};
},{"./duplex.js":8,"./passthrough.js":11,"./readable.js":12,"./transform.js":13,"./writable.js":14,"events":2,"inherits":16}],10:[function(require,module,exports){
module.exports=require(3)
},{}],11:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
module.exports = PassThrough;
var Transform = require('./transform.js');
var inherits = require('inherits');
inherits(PassThrough, Transform);
function PassThrough(options) {
if (!(this instanceof PassThrough))
return new PassThrough(options);
Transform.call(this, options);
}
PassThrough.prototype._transform = function(chunk, encoding, cb) {
cb(null, chunk);
};
},{"./transform.js":13,"inherits":16}],12:[function(require,module,exports){
(function (process){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
module.exports = Readable;
Readable.ReadableState = ReadableState;
var EE = require('events').EventEmitter;
var Stream = require('./index.js');
var Buffer = require('buffer').Buffer;
var setImmediate = require('process/browser.js').nextTick;
var StringDecoder;
var inherits = require('inherits');
inherits(Readable, Stream);
function ReadableState(options, stream) {
options = options || {};
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.buffer = [];
this.length = 0;
this.pipes = null;
this.pipesCount = 0;
this.flowing = false;
this.ended = false;
this.endEmitted = false;
this.reading = false;
// In streams that never have any data, and do push(null) right away,
// the consumer can miss the 'end' event if they do some I/O before
// consuming the stream. So, we don't emit('end') until some reading
// happens.
this.calledRead = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, becuase any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// whenever we return null, then we set a flag to say
// that we're awaiting a 'readable' event emission.
this.needReadable = false;
this.emittedReadable = false;
this.readableListening = false;
// object stream flag. Used to make read(n) ignore n and to
// make all the buffer merging and length checks go away
this.objectMode = !!options.objectMode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// when piping, we only care about 'readable' events that happen
// after read()ing all the bytes and not getting any pushback.
this.ranOut = false;
// the number of writers that are awaiting a drain event in .pipe()s
this.awaitDrain = 0;
// if true, a maybeReadMore has been scheduled
this.readingMore = false;
this.decoder = null;
this.encoding = null;
if (options.encoding) {
if (!StringDecoder)
StringDecoder = require('string_decoder').StringDecoder;
this.decoder = new StringDecoder(options.encoding);
this.encoding = options.encoding;
}
}
function Readable(options) {
if (!(this instanceof Readable))
return new Readable(options);
this._readableState = new ReadableState(options, this);
// legacy
this.readable = true;
Stream.call(this);
}
// Manually shove something into the read() buffer.
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
Readable.prototype.push = function(chunk, encoding) {
var state = this._readableState;
if (typeof chunk === 'string' && !state.objectMode) {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
chunk = new Buffer(chunk, encoding);
encoding = '';
}
}
return readableAddChunk(this, state, chunk, encoding, false);
};
// Unshift should *always* be something directly out of read()
Readable.prototype.unshift = function(chunk) {
var state = this._readableState;
return readableAddChunk(this, state, chunk, '', true);
};
function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var er = chunkInvalid(state, chunk);
if (er) {
stream.emit('error', er);
} else if (chunk === null || chunk === undefined) {
state.reading = false;
if (!state.ended)
onEofChunk(stream, state);
} else if (state.objectMode || chunk && chunk.length > 0) {
if (state.ended && !addToFront) {
var e = new Error('stream.push() after EOF');
stream.emit('error', e);
} else if (state.endEmitted && addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
if (state.decoder && !addToFront && !encoding)
chunk = state.decoder.write(chunk);
// update the buffer info.
state.length += state.objectMode ? 1 : chunk.length;
if (addToFront) {
state.buffer.unshift(chunk);
} else {
state.reading = false;
state.buffer.push(chunk);
}
if (state.needReadable)
emitReadable(stream);
maybeReadMore(stream, state);
}
} else if (!addToFront) {
state.reading = false;
}
return needMoreData(state);
}
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
// such as the repl. Also, if the push() triggered a
// readable event, and the user called read(largeNumber) such that
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData(state) {
return !state.ended &&
(state.needReadable ||
state.length < state.highWaterMark ||
state.length === 0);
}
// backwards compatibility.
Readable.prototype.setEncoding = function(enc) {
if (!StringDecoder)
StringDecoder = require('string_decoder').StringDecoder;
this._readableState.decoder = new StringDecoder(enc);
this._readableState.encoding = enc;
};
// Don't raise the hwm > 128MB
var MAX_HWM = 0x800000;
function roundUpToNextPowerOf2(n) {
if (n >= MAX_HWM) {
n = MAX_HWM;
} else {
// Get the next highest power of 2
n--;
for (var p = 1; p < 32; p <<= 1) n |= n >> p;
n++;
}
return n;
}
function howMuchToRead(n, state) {
if (state.length === 0 && state.ended)
return 0;
if (state.objectMode)
return n === 0 ? 0 : 1;
if (isNaN(n) || n === null) {
// only flow one buffer at a time
if (state.flowing && state.buffer.length)
return state.buffer[0].length;
else
return state.length;
}
if (n <= 0)
return 0;
// If we're asking for more than the target buffer level,
// then raise the water mark. Bump up to the next highest
// power of 2, to prevent increasing it excessively in tiny
// amounts.
if (n > state.highWaterMark)
state.highWaterMark = roundUpToNextPowerOf2(n);
// don't have that much. return null, unless we've ended.
if (n > state.length) {
if (!state.ended) {
state.needReadable = true;
return 0;
} else
return state.length;
}
return n;
}
// you can override either this method, or the async _read(n) below.
Readable.prototype.read = function(n) {
var state = this._readableState;
state.calledRead = true;
var nOrig = n;
if (typeof n !== 'number' || n > 0)
state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
if (n === 0 &&
state.needReadable &&
(state.length >= state.highWaterMark || state.ended)) {
emitReadable(this);
return null;
}
n = howMuchToRead(n, state);
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
if (state.length === 0)
endReadable(this);
return null;
}
// All the actual chunk generation logic needs to be
// *below* the call to _read. The reason is that in certain
// synthetic stream cases, such as passthrough streams, _read
// may be a completely synchronous operation which may change
// the state of the read buffer, providing enough data when
// before there was *not* enough.
//
// So, the steps are:
// 1. Figure out what the state of things will be after we do
// a read from the buffer.
//
// 2. If that resulting state will trigger a _read, then call _read.
// Note that this may be asynchronous, or synchronous. Yes, it is
// deeply ugly to write APIs this way, but that still doesn't mean
// that the Readable class should behave improperly, as streams are
// designed to be sync/async agnostic.
// Take note if the _read call is sync or async (ie, if the read call
// has returned yet), so that we know whether or not it's safe to emit
// 'readable' etc.
//
// 3. Actually pull the requested chunks out of the buffer and return.
// if we need a readable event, then we need to do some reading.
var doRead = state.needReadable;
// if we currently have less than the highWaterMark, then also read some
if (state.length - n <= state.highWaterMark)
doRead = true;
// however, if we've ended, then there's no point, and if we're already
// reading, then it's unnecessary.
if (state.ended || state.reading)
doRead = false;
if (doRead) {
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
if (state.length === 0)
state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
}
// If _read called its callback synchronously, then `reading`
// will be false, and we need to re-evaluate how much data we
// can return to the user.
if (doRead && !state.reading)
n = howMuchToRead(nOrig, state);
var ret;
if (n > 0)
ret = fromList(n, state);
else
ret = null;
if (ret === null) {
state.needReadable = true;
n = 0;
}
state.length -= n;
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
if (state.length === 0 && !state.ended)
state.needReadable = true;
// If we happened to read() exactly the remaining amount in the
// buffer, and the EOF has been seen at this point, then make sure
// that we emit 'end' on the very next tick.
if (state.ended && !state.endEmitted && state.length === 0)
endReadable(this);
return ret;
};
function chunkInvalid(state, chunk) {
var er = null;
if (!Buffer.isBuffer(chunk) &&
'string' !== typeof chunk &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode &&
!er) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
function onEofChunk(stream, state) {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length) {
state.buffer.push(chunk);
state.length += state.objectMode ? 1 : chunk.length;
}
}
state.ended = true;
// if we've ended and we have some data left, then emit
// 'readable' now to make sure it gets picked up.
if (state.length > 0)
emitReadable(stream);
else
endReadable(stream);
}
// Don't emit readable right away in sync mode, because this can trigger
// another read() call => stack overflow. This way, it might trigger
// a nextTick recursion warning, but that's not so bad.
function emitReadable(stream) {
var state = stream._readableState;
state.needReadable = false;
if (state.emittedReadable)
return;
state.emittedReadable = true;
if (state.sync)
setImmediate(function() {
emitReadable_(stream);
});
else
emitReadable_(stream);
}
function emitReadable_(stream) {
stream.emit('readable');
}
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
// it's in progress.
// However, if we're not ended, or reading, and the length < hwm,
// then go ahead and try to read some more preemptively.
function maybeReadMore(stream, state) {
if (!state.readingMore) {
state.readingMore = true;
setImmediate(function() {
maybeReadMore_(stream, state);
});
}
}
function maybeReadMore_(stream, state) {
var len = state.length;
while (!state.reading && !state.flowing && !state.ended &&
state.length < state.highWaterMark) {
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
break;
else
len = state.length;
}
state.readingMore = false;
}
// abstract method. to be overridden in specific implementation classes.
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
Readable.prototype._read = function(n) {
this.emit('error', new Error('not implemented'));
};
Readable.prototype.pipe = function(dest, pipeOpts) {
var src = this;
var state = this._readableState;
switch (state.pipesCount) {
case 0:
state.pipes = dest;
break;
case 1:
state.pipes = [state.pipes, dest];
break;
default:
state.pipes.push(dest);
break;
}
state.pipesCount += 1;
var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
dest !== process.stdout &&
dest !== process.stderr;
var endFn = doEnd ? onend : cleanup;
if (state.endEmitted)
setImmediate(endFn);
else
src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
if (readable !== src) return;
cleanup();
}
function onend() {
dest.end();
}
// when the dest drains, it reduces the awaitDrain counter
// on the source. This would be more elegant with a .once()
// handler in flow(), but adding and removing repeatedly is
// too slow.
var ondrain = pipeOnDrain(src);
dest.on('drain', ondrain);
function cleanup() {
// cleanup event handlers once the pipe is broken
dest.removeListener('close', onclose);
dest.removeListener('finish', onfinish);
dest.removeListener('drain', ondrain);
dest.removeListener('error', onerror);
dest.removeListener('unpipe', onunpipe);
src.removeListener('end', onend);
src.removeListener('end', cleanup);
// if the reader is waiting for a drain event from this
// specific writer, then it would cause it to never start
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
if (!dest._writableState || dest._writableState.needDrain)
ondrain();
}
// if the dest has an error, then stop piping into it.
// however, don't suppress the throwing behavior for this.
// check for listeners before emit removes one-time listeners.
var errListeners = EE.listenerCount(dest, 'error');
function onerror(er) {
unpipe();
if (errListeners === 0 && EE.listenerCount(dest, 'error') === 0)
dest.emit('error', er);
}
dest.once('error', onerror);
// Both close and finish should trigger unpipe, but only once.
function onclose() {
dest.removeListener('finish', onfinish);
unpipe();
}
dest.once('close', onclose);
function onfinish() {
dest.removeListener('close', onclose);
unpipe();
}
dest.once('finish', onfinish);
function unpipe() {
src.unpipe(dest);
}
// tell the dest that it's being piped to
dest.emit('pipe', src);
// start the flow if it hasn't been started already.
if (!state.flowing) {
// the handler that waits for readable events after all
// the data gets sucked out in flow.
// This would be easier to follow with a .once() handler
// in flow(), but that is too slow.
this.on('readable', pipeOnReadable);
state.flowing = true;
setImmediate(function() {
flow(src);
});
}
return dest;
};
function pipeOnDrain(src) {
return function() {
var dest = this;
var state = src._readableState;
state.awaitDrain--;
if (state.awaitDrain === 0)
flow(src);
};
}
function flow(src) {
var state = src._readableState;
var chunk;
state.awaitDrain = 0;
function write(dest, i, list) {
var written = dest.write(chunk);
if (false === written) {
state.awaitDrain++;
}
}
while (state.pipesCount && null !== (chunk = src.read())) {
if (state.pipesCount === 1)
write(state.pipes, 0, null);
else
forEach(state.pipes, write);
src.emit('data', chunk);
// if anyone needs a drain, then we have to wait for that.
if (state.awaitDrain > 0)
return;
}
// if every destination was unpiped, either before entering this
// function, or in the while loop, then stop flowing.
//
// NB: This is a pretty rare edge case.
if (state.pipesCount === 0) {
state.flowing = false;
// if there were data event listeners added, then switch to old mode.
if (EE.listenerCount(src, 'data') > 0)
emitDataEvents(src);
return;
}
// at this point, no one needed a drain, so we just ran out of data
// on the next readable event, start it over again.
state.ranOut = true;
}
function pipeOnReadable() {
if (this._readableState.ranOut) {
this._readableState.ranOut = false;
flow(this);
}
}
Readable.prototype.unpipe = function(dest) {
var state = this._readableState;
// if we're not piping anywhere, then do nothing.
if (state.pipesCount === 0)
return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
if (dest && dest !== state.pipes)
return this;
if (!dest)
dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
this.removeListener('readable', pipeOnReadable);
state.flowing = false;
if (dest)
dest.emit('unpipe', this);
return this;
}
// slow case. multiple pipe destinations.
if (!dest) {
// remove all.
var dests = state.pipes;
var len = state.pipesCount;
state.pipes = null;
state.pipesCount = 0;
this.removeListener('readable', pipeOnReadable);
state.flowing = false;
for (var i = 0; i < len; i++)
dests[i].emit('unpipe', this);
return this;
}
// try to find the right one.
var i = indexOf(state.pipes, dest);
if (i === -1)
return this;
state.pipes.splice(i, 1);
state.pipesCount -= 1;
if (state.pipesCount === 1)
state.pipes = state.pipes[0];
dest.emit('unpipe', this);
return this;
};
// set up data events if they are asked for
// Ensure readable listeners eventually get something
Readable.prototype.on = function(ev, fn) {
var res = Stream.prototype.on.call(this, ev, fn);
if (ev === 'data' && !this._readableState.flowing)
emitDataEvents(this);
if (ev === 'readable' && this.readable) {
var state = this._readableState;
if (!state.readableListening) {
state.readableListening = true;
state.emittedReadable = false;
state.needReadable = true;
if (!state.reading) {
this.read(0);
} else if (state.length) {
emitReadable(this, state);
}
}
}
return res;
};
Readable.prototype.addListener = Readable.prototype.on;
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
Readable.prototype.resume = function() {
emitDataEvents(this);
this.read(0);
this.emit('resume');
};
Readable.prototype.pause = function() {
emitDataEvents(this, true);
this.emit('pause');
};
function emitDataEvents(stream, startPaused) {
var state = stream._readableState;
if (state.flowing) {
// https://github.com/isaacs/readable-stream/issues/16
throw new Error('Cannot switch to old mode now.');
}
var paused = startPaused || false;
var readable = false;
// convert to an old-style stream.
stream.readable = true;
stream.pipe = Stream.prototype.pipe;
stream.on = stream.addListener = Stream.prototype.on;
stream.on('readable', function() {
readable = true;
var c;
while (!paused && (null !== (c = stream.read())))
stream.emit('data', c);
if (c === null) {
readable = false;
stream._readableState.needReadable = true;
}
});
stream.pause = function() {
paused = true;
this.emit('pause');
};
stream.resume = function() {
paused = false;
if (readable)
setImmediate(function() {
stream.emit('readable');
});
else
this.read(0);
this.emit('resume');
};
// now make it start, just in case it hadn't already.
stream.emit('readable');
}
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
Readable.prototype.wrap = function(stream) {
var state = this._readableState;
var paused = false;
var self = this;
stream.on('end', function() {
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
if (chunk && chunk.length)
self.push(chunk);
}
self.push(null);
});
stream.on('data', function(chunk) {
if (state.decoder)
chunk = state.decoder.write(chunk);
if (!chunk || !state.objectMode && !chunk.length)
return;
var ret = self.push(chunk);
if (!ret) {
paused = true;
stream.pause();
}
});
// proxy all the other methods.
// important when wrapping filters and duplexes.
for (var i in stream) {
if (typeof stream[i] === 'function' &&
typeof this[i] === 'undefined') {
this[i] = function(method) { return function() {
return stream[method].apply(stream, arguments);
}}(i);
}
}
// proxy certain important events.
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
forEach(events, function(ev) {
stream.on(ev, function (x) {
return self.emit.apply(self, ev, x);
});
});
// when we try to consume some more bytes, simply unpause the
// underlying stream.
self._read = function(n) {
if (paused) {
paused = false;
stream.resume();
}
};
return self;
};
// exposed for testing purposes only.
Readable._fromList = fromList;
// Pluck off n bytes from an array of buffers.
// Length is the combined lengths of all the buffers in the list.
function fromList(n, state) {
var list = state.buffer;
var length = state.length;
var stringMode = !!state.decoder;
var objectMode = !!state.objectMode;
var ret;
// nothing in the list, definitely empty.
if (list.length === 0)
return null;
if (length === 0)
ret = null;
else if (objectMode)
ret = list.shift();
else if (!n || n >= length) {
// read it all, truncate the array.
if (stringMode)
ret = list.join('');
else
ret = Buffer.concat(list, length);
list.length = 0;
} else {
// read just some of it.
if (n < list[0].length) {
// just take a part of the first list item.
// slice is the same for buffers and strings.
var buf = list[0];
ret = buf.slice(0, n);
list[0] = buf.slice(n);
} else if (n === list[0].length) {
// first list is a perfect match
ret = list.shift();
} else {
// complex case.
// we have enough to cover it, but it spans past the first buffer.
if (stringMode)
ret = '';
else
ret = new Buffer(n);
var c = 0;
for (var i = 0, l = list.length; i < l && c < n; i++) {
var buf = list[0];
var cpy = Math.min(n - c, buf.length);
if (stringMode)
ret += buf.slice(0, cpy);
else
buf.copy(ret, c, 0, cpy);
if (cpy < buf.length)
list[0] = buf.slice(cpy);
else
list.shift();
c += cpy;
}
}
}
return ret;
}
function endReadable(stream) {
var state = stream._readableState;
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
if (state.length > 0)
throw new Error('endReadable called on non-empty stream');
if (!state.endEmitted && state.calledRead) {
state.ended = true;
setImmediate(function() {
// Check that we didn't get one last unshift.
if (!state.endEmitted && state.length === 0) {
state.endEmitted = true;
stream.readable = false;
stream.emit('end');
}
});
}
}
function forEach (xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
function indexOf (xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
}
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"))
},{"./index.js":9,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"buffer":4,"events":2,"inherits":16,"process/browser.js":10,"string_decoder":15}],13:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a transform stream is a readable/writable stream where you do
// something with the data. Sometimes it's called a "filter",
// but that's not a great name for it, since that implies a thing where
// some bits pass through, and others are simply ignored. (That would
// be a valid example of a transform, of course.)
//
// While the output is causally related to the input, it's not a
// necessarily symmetric or synchronous transformation. For example,
// a zlib stream might take multiple plain-text writes(), and then
// emit a single compressed chunk some time in the future.
//
// Here's how this works:
//
// The Transform stream has all the aspects of the readable and writable
// stream classes. When you write(chunk), that calls _write(chunk,cb)
// internally, and returns false if there's a lot of pending writes
// buffered up. When you call read(), that calls _read(n) until
// there's enough pending readable data buffered up.
//
// In a transform stream, the written data is placed in a buffer. When
// _read(n) is called, it transforms the queued up data, calling the
// buffered _write cb's as it consumes chunks. If consuming a single
// written chunk would result in multiple output chunks, then the first
// outputted bit calls the readcb, and subsequent chunks just go into
// the read buffer, and will cause it to emit 'readable' if necessary.
//
// This way, back-pressure is actually determined by the reading side,
// since _read has to be called to start processing a new chunk. However,
// a pathological inflate type of transform can cause excessive buffering
// here. For example, imagine a stream where every byte of input is
// interpreted as an integer from 0-255, and then results in that many
// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in
// 1kb of data being output. In this case, you could write a very small
// amount of input, and end up with a very large amount of output. In
// such a pathological inflating mechanism, there'd be no way to tell
// the system to stop doing the transform. A single 4MB write could
// cause the system to run out of memory.
//
// However, even in such a pathological case, only a single written chunk
// would be consumed, and then the rest would wait (un-transformed) until
// the results of the previous transformed chunk were consumed.
module.exports = Transform;
var Duplex = require('./duplex.js');
var inherits = require('inherits');
inherits(Transform, Duplex);
function TransformState(options, stream) {
this.afterTransform = function(er, data) {
return afterTransform(stream, er, data);
};
this.needTransform = false;
this.transforming = false;
this.writecb = null;
this.writechunk = null;
}
function afterTransform(stream, er, data) {
var ts = stream._transformState;
ts.transforming = false;
var cb = ts.writecb;
if (!cb)
return stream.emit('error', new Error('no writecb in Transform class'));
ts.writechunk = null;
ts.writecb = null;
if (data !== null && data !== undefined)
stream.push(data);
if (cb)
cb(er);
var rs = stream._readableState;
rs.reading = false;
if (rs.needReadable || rs.length < rs.highWaterMark) {
stream._read(rs.highWaterMark);
}
}
function Transform(options) {
if (!(this instanceof Transform))
return new Transform(options);
Duplex.call(this, options);
var ts = this._transformState = new TransformState(options, this);
// when the writable side finishes, then flush out anything remaining.
var stream = this;
// start out asking for a readable event once data is transformed.
this._readableState.needReadable = true;
// we have implemented the _read method, and done the other things
// that Readable wants before the first _read call, so unset the
// sync guard flag.
this._readableState.sync = false;
this.once('finish', function() {
if ('function' === typeof this._flush)
this._flush(function(er) {
done(stream, er);
});
else
done(stream);
});
}
Transform.prototype.push = function(chunk, encoding) {
this._transformState.needTransform = false;
return Duplex.prototype.push.call(this, chunk, encoding);
};
// This is the part where you do stuff!
// override this function in implementation classes.
// 'chunk' is an input chunk.
//
// Call `push(newChunk)` to pass along transformed output
// to the readable side. You may call 'push' zero or more times.
//
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform.prototype._transform = function(chunk, encoding, cb) {
throw new Error('not implemented');
};
Transform.prototype._write = function(chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
if (ts.needTransform ||
rs.needReadable ||
rs.length < rs.highWaterMark)
this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
Transform.prototype._read = function(n) {
var ts = this._transformState;
if (ts.writechunk && ts.writecb && !ts.transforming) {
ts.transforming = true;
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
} else {
// mark that we need a transform, so that any data that comes in
// will get processed, now that we've asked for it.
ts.needTransform = true;
}
};
function done(stream, er) {
if (er)
return stream.emit('error', er);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
var rs = stream._readableState;
var ts = stream._transformState;
if (ws.length)
throw new Error('calling transform done when ws.length != 0');
if (ts.transforming)
throw new Error('calling transform done when still transforming');
return stream.push(null);
}
},{"./duplex.js":8,"inherits":16}],14:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// A bit simpler than readable streams.
// Implement an async ._write(chunk, cb), and it'll handle all
// the drain event emission and buffering.
module.exports = Writable;
Writable.WritableState = WritableState;
var isUint8Array = typeof Uint8Array !== 'undefined'
? function (x) { return x instanceof Uint8Array }
: function (x) {
return x && x.constructor && x.constructor.name === 'Uint8Array'
}
;
var isArrayBuffer = typeof ArrayBuffer !== 'undefined'
? function (x) { return x instanceof ArrayBuffer }
: function (x) {
return x && x.constructor && x.constructor.name === 'ArrayBuffer'
}
;
var inherits = require('inherits');
var Stream = require('./index.js');
var setImmediate = require('process/browser.js').nextTick;
var Buffer = require('buffer').Buffer;
inherits(Writable, Stream);
function WriteReq(chunk, encoding, cb) {
this.chunk = chunk;
this.encoding = encoding;
this.callback = cb;
}
function WritableState(options, stream) {
options = options || {};
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
this.highWaterMark = (hwm || hwm === 0) ? hwm : 16 * 1024;
// object stream flag to indicate whether or not this stream
// contains buffers or objects.
this.objectMode = !!options.objectMode;
// cast to ints.
this.highWaterMark = ~~this.highWaterMark;
this.needDrain = false;
// at the start of calling end()
this.ending = false;
// when end() has been called, and returned
this.ended = false;
// when 'finish' is emitted
this.finished = false;
// should we decode strings into buffers before passing to _write?
// this is here so that some node-core streams can optimize string
// handling at a lower level.
var noDecode = options.decodeStrings === false;
this.decodeStrings = !noDecode;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
// Everything else in the universe uses 'utf8', though.
this.defaultEncoding = options.defaultEncoding || 'utf8';
// not an actual buffer we keep track of, but a measurement
// of how much we're waiting to get pushed to some underlying
// socket or file.
this.length = 0;
// a flag to see when we're in the middle of a write.
this.writing = false;
// a flag to be able to tell if the onwrite cb is called immediately,
// or on a later tick. We set this to true at first, becuase any
// actions that shouldn't happen until "later" should generally also
// not happen before the first write call.
this.sync = true;
// a flag to know if we're processing previously buffered items, which
// may call the _write() callback in the same tick, so that we don't
// end up in an overlapped onwrite situation.
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
this.onwrite = function(er) {
onwrite(stream, er);
};
// the callback that the user supplies to write(chunk,encoding,cb)
this.writecb = null;
// the amount that is being written when _write is called.
this.writelen = 0;
this.buffer = [];
}
function Writable(options) {
// Writable ctor is applied to Duplexes, though they're not
// instanceof Writable, they're instanceof Readable.
if (!(this instanceof Writable) && !(this instanceof Stream.Duplex))
return new Writable(options);
this._writableState = new WritableState(options, this);
// legacy.
this.writable = true;
Stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
Writable.prototype.pipe = function() {
this.emit('error', new Error('Cannot pipe. Not readable.'));
};
function writeAfterEnd(stream, state, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
stream.emit('error', er);
setImmediate(function() {
cb(er);
});
}
// If we get something that is not a buffer, string, null, or undefined,
// and we're not in objectMode, then that's an error.
// Otherwise stream chunks are all considered to be of length=1, and the
// watermarks determine how many objects to keep in the buffer, rather than
// how many bytes or characters.
function validChunk(stream, state, chunk, cb) {
var valid = true;
if (!Buffer.isBuffer(chunk) &&
'string' !== typeof chunk &&
chunk !== null &&
chunk !== undefined &&
!state.objectMode) {
var er = new TypeError('Invalid non-string/buffer chunk');
stream.emit('error', er);
setImmediate(function() {
cb(er);
});
valid = false;
}
return valid;
}
Writable.prototype.write = function(chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (!Buffer.isBuffer(chunk) && isUint8Array(chunk))
chunk = new Buffer(chunk);
if (isArrayBuffer(chunk) && typeof Uint8Array !== 'undefined')
chunk = new Buffer(new Uint8Array(chunk));
if (Buffer.isBuffer(chunk))
encoding = 'buffer';
else if (!encoding)
encoding = state.defaultEncoding;
if (typeof cb !== 'function')
cb = function() {};
if (state.ended)
writeAfterEnd(this, state, cb);
else if (validChunk(this, state, chunk, cb))
ret = writeOrBuffer(this, state, chunk, encoding, cb);
return ret;
};
function decodeChunk(state, chunk, encoding) {
if (!state.objectMode &&
state.decodeStrings !== false &&
typeof chunk === 'string') {
chunk = new Buffer(chunk, encoding);
}
return chunk;
}
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer(stream, state, chunk, encoding, cb) {
chunk = decodeChunk(state, chunk, encoding);
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
state.needDrain = !ret;
if (state.writing)
state.buffer.push(new WriteReq(chunk, encoding, cb));
else
doWrite(stream, state, len, chunk, encoding, cb);
return ret;
}
function doWrite(stream, state, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
state.writing = true;
state.sync = true;
stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
if (sync)
setImmediate(function() {
cb(er);
});
else
cb(er);
stream.emit('error', er);
}
function onwriteStateUpdate(state) {
state.writing = false;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
}
function onwrite(stream, er) {
var state = stream._writableState;
var sync = state.sync;
var cb = state.writecb;
onwriteStateUpdate(state);
if (er)
onwriteError(stream, state, sync, er, cb);
else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish(stream, state);
if (!finished && !state.bufferProcessing && state.buffer.length)
clearBuffer(stream, state);
if (sync) {
setImmediate(function() {
afterWrite(stream, state, finished, cb);
});
} else {
afterWrite(stream, state, finished, cb);
}
}
}
function afterWrite(stream, state, finished, cb) {
if (!finished)
onwriteDrain(stream, state);
cb();
if (finished)
finishMaybe(stream, state);
}
// Must force callback to be called on nextTick, so that we don't
// emit 'drain' before the write() consumer gets the 'false' return
// value, and has a chance to attach a 'drain' listener.
function onwriteDrain(stream, state) {
if (state.length === 0 && state.needDrain) {
state.needDrain = false;
stream.emit('drain');
}
}
// if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
for (var c = 0; c < state.buffer.length; c++) {
var entry = state.buffer[c];
var chunk = entry.chunk;
var encoding = entry.encoding;
var cb = entry.callback;
var len = state.objectMode ? 1 : chunk.length;
doWrite(stream, state, len, chunk, encoding, cb);
// if we didn't call the onwrite immediately, then
// it means that we need to wait until it does.
// also, that means that the chunk and cb are currently
// being processed, so move the buffer counter past them.
if (state.writing) {
c++;
break;
}
}
state.bufferProcessing = false;
if (c < state.buffer.length)
state.buffer = state.buffer.slice(c);
else
state.buffer.length = 0;
}
Writable.prototype._write = function(chunk, encoding, cb) {
cb(new Error('not implemented'));
};
Writable.prototype.end = function(chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
cb = chunk;
chunk = null;
encoding = null;
} else if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
if (typeof chunk !== 'undefined' && chunk !== null)
this.write(chunk, encoding);
// ignore unnecessary end() calls.
if (!state.ending && !state.finished)
endWritable(this, state, cb);
};
function needFinish(stream, state) {
return (state.ending &&
state.length === 0 &&
!state.finished &&
!state.writing);
}
function finishMaybe(stream, state) {
var need = needFinish(stream, state);
if (need) {
state.finished = true;
stream.emit('finish');
}
return need;
}
function endWritable(stream, state, cb) {
state.ending = true;
finishMaybe(stream, state);
if (cb) {
if (state.finished)
setImmediate(cb);
else
stream.once('finish', cb);
}
state.ended = true;
}
},{"./index.js":9,"buffer":4,"inherits":16,"process/browser.js":10}],15:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
var Buffer = require('buffer').Buffer;
function assertEncoding(encoding) {
if (encoding && !Buffer.isEncoding(encoding)) {
throw new Error('Unknown encoding: ' + encoding);
}
}
var StringDecoder = exports.StringDecoder = function(encoding) {
this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, '');
assertEncoding(encoding);
switch (this.encoding) {
case 'utf8':
// CESU-8 represents each of Surrogate Pair by 3-bytes
this.surrogateSize = 3;
break;
case 'ucs2':
case 'utf16le':
// UTF-16 represents each of Surrogate Pair by 2-bytes
this.surrogateSize = 2;
this.detectIncompleteChar = utf16DetectIncompleteChar;
break;
case 'base64':
// Base-64 stores 3 bytes in 4 chars, and pads the remainder.
this.surrogateSize = 3;
this.detectIncompleteChar = base64DetectIncompleteChar;
break;
default:
this.write = passThroughWrite;
return;
}
this.charBuffer = new Buffer(6);
this.charReceived = 0;
this.charLength = 0;
};
StringDecoder.prototype.write = function(buffer) {
var charStr = '';
var offset = 0;
// if our last write ended with an incomplete multibyte character
while (this.charLength) {
// determine how many remaining bytes this buffer has to offer for this char
var i = (buffer.length >= this.charLength - this.charReceived) ?
this.charLength - this.charReceived :
buffer.length;
// add the new bytes to the char buffer
buffer.copy(this.charBuffer, this.charReceived, offset, i);
this.charReceived += (i - offset);
offset = i;
if (this.charReceived < this.charLength) {
// still not enough chars in this buffer? wait for more ...
return '';
}
// get the character that was split
charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding);
// lead surrogate (D800-DBFF) is also the incomplete character
var charCode = charStr.charCodeAt(charStr.length - 1);
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
this.charLength += this.surrogateSize;
charStr = '';
continue;
}
this.charReceived = this.charLength = 0;
// if there are no more bytes in this buffer, just emit our char
if (i == buffer.length) return charStr;
// otherwise cut off the characters end from the beginning of this buffer
buffer = buffer.slice(i, buffer.length);
break;
}
var lenIncomplete = this.detectIncompleteChar(buffer);
var end = buffer.length;
if (this.charLength) {
// buffer the incomplete character bytes we got
buffer.copy(this.charBuffer, 0, buffer.length - lenIncomplete, end);
this.charReceived = lenIncomplete;
end -= lenIncomplete;
}
charStr += buffer.toString(this.encoding, 0, end);
var end = charStr.length - 1;
var charCode = charStr.charCodeAt(end);
// lead surrogate (D800-DBFF) is also the incomplete character
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
var size = this.surrogateSize;
this.charLength += size;
this.charReceived += size;
this.charBuffer.copy(this.charBuffer, size, 0, size);
this.charBuffer.write(charStr.charAt(charStr.length - 1), this.encoding);
return charStr.substring(0, end);
}
// or just emit the charStr
return charStr;
};
StringDecoder.prototype.detectIncompleteChar = function(buffer) {
// determine how many bytes we have to check at the end of this buffer
var i = (buffer.length >= 3) ? 3 : buffer.length;
// Figure out if one of the last i bytes of our buffer announces an
// incomplete char.
for (; i > 0; i--) {
var c = buffer[buffer.length - i];
// See http://en.wikipedia.org/wiki/UTF-8#Description
// 110XXXXX
if (i == 1 && c >> 5 == 0x06) {
this.charLength = 2;
break;
}
// 1110XXXX
if (i <= 2 && c >> 4 == 0x0E) {
this.charLength = 3;
break;
}
// 11110XXX
if (i <= 3 && c >> 3 == 0x1E) {
this.charLength = 4;
break;
}
}
return i;
};
StringDecoder.prototype.end = function(buffer) {
var res = '';
if (buffer && buffer.length)
res = this.write(buffer);
if (this.charReceived) {
var cr = this.charReceived;
var buf = this.charBuffer;
var enc = this.encoding;
res += buf.slice(0, cr).toString(enc);
}
return res;
};
function passThroughWrite(buffer) {
return buffer.toString(this.encoding);
}
function utf16DetectIncompleteChar(buffer) {
var incomplete = this.charReceived = buffer.length % 2;
this.charLength = incomplete ? 2 : 0;
return incomplete;
}
function base64DetectIncompleteChar(buffer) {
var incomplete = this.charReceived = buffer.length % 3;
this.charLength = incomplete ? 3 : 0;
return incomplete;
}
},{"buffer":4}],16:[function(require,module,exports){
if (typeof Object.create === 'function') {
// implementation from standard node.js 'util' module
module.exports = function inherits(ctor, superCtor) {
ctor.super_ = superCtor
ctor.prototype = Object.create(superCtor.prototype, {
constructor: {
value: ctor,
enumerable: false,
writable: true,
configurable: true
}
});
};
} else {
// old school shim for old browsers
module.exports = function inherits(ctor, superCtor) {
ctor.super_ = superCtor
var TempCtor = function () {}
TempCtor.prototype = superCtor.prototype
ctor.prototype = new TempCtor()
ctor.prototype.constructor = ctor
}
}
},{}],17:[function(require,module,exports){
'use strict';
module.exports = INTERNAL;
function INTERNAL() {}
},{}],18:[function(require,module,exports){
'use strict';
var Promise = require('./promise');
var reject = require('./reject');
var resolve = require('./resolve');
var INTERNAL = require('./INTERNAL');
var handlers = require('./handlers');
module.exports = all;
function all(iterable) {
if (Object.prototype.toString.call(iterable) !== '[object Array]') {
return reject(new TypeError('must be an array'));
}
var len = iterable.length;
var called = false;
if (!len) {
return resolve([]);
}
var values = new Array(len);
var resolved = 0;
var i = -1;
var promise = new Promise(INTERNAL);
while (++i < len) {
allResolver(iterable[i], i);
}
return promise;
function allResolver(value, i) {
resolve(value).then(resolveFromAll, function (error) {
if (!called) {
called = true;
handlers.reject(promise, error);
}
});
function resolveFromAll(outValue) {
values[i] = outValue;
if (++resolved === len & !called) {
called = true;
handlers.resolve(promise, values);
}
}
}
}
},{"./INTERNAL":17,"./handlers":19,"./promise":21,"./reject":24,"./resolve":25}],19:[function(require,module,exports){
'use strict';
var tryCatch = require('./tryCatch');
var resolveThenable = require('./resolveThenable');
var states = require('./states');
exports.resolve = function (self, value) {
var result = tryCatch(getThen, value);
if (result.status === 'error') {
return exports.reject(self, result.value);
}
var thenable = result.value;
if (thenable) {
resolveThenable.safely(self, thenable);
} else {
self.state = states.FULFILLED;
self.outcome = value;
var i = -1;
var len = self.queue.length;
while (++i < len) {
self.queue[i].callFulfilled(value);
}
}
return self;
};
exports.reject = function (self, error) {
self.state = states.REJECTED;
self.outcome = error;
var i = -1;
var len = self.queue.length;
while (++i < len) {
self.queue[i].callRejected(error);
}
return self;
};
function getThen(obj) {
// Make sure we only access the accessor once as required by the spec
var then = obj && obj.then;
if (obj && typeof obj === 'object' && typeof then === 'function') {
return function appyThen() {
then.apply(obj, arguments);
};
}
}
},{"./resolveThenable":26,"./states":27,"./tryCatch":28}],20:[function(require,module,exports){
module.exports = exports = require('./promise');
exports.resolve = require('./resolve');
exports.reject = require('./reject');
exports.all = require('./all');
exports.race = require('./race');
},{"./all":18,"./promise":21,"./race":23,"./reject":24,"./resolve":25}],21:[function(require,module,exports){
'use strict';
var unwrap = require('./unwrap');
var INTERNAL = require('./INTERNAL');
var resolveThenable = require('./resolveThenable');
var states = require('./states');
var QueueItem = require('./queueItem');
module.exports = Promise;
function Promise(resolver) {
if (!(this instanceof Promise)) {
return new Promise(resolver);
}
if (typeof resolver !== 'function') {
throw new TypeError('reslover must be a function');
}
this.state = states.PENDING;
this.queue = [];
this.outcome = void 0;
if (resolver !== INTERNAL) {
resolveThenable.safely(this, resolver);
}
}
Promise.prototype['catch'] = function (onRejected) {
return this.then(null, onRejected);
};
Promise.prototype.then = function (onFulfilled, onRejected) {
if (typeof onFulfilled !== 'function' && this.state === states.FULFILLED ||
typeof onRejected !== 'function' && this.state === states.REJECTED) {
return this;
}
var promise = new Promise(INTERNAL);
if (this.state !== states.PENDING) {
var resolver = this.state === states.FULFILLED ? onFulfilled: onRejected;
unwrap(promise, resolver, this.outcome);
} else {
this.queue.push(new QueueItem(promise, onFulfilled, onRejected));
}
return promise;
};
},{"./INTERNAL":17,"./queueItem":22,"./resolveThenable":26,"./states":27,"./unwrap":29}],22:[function(require,module,exports){
'use strict';
var handlers = require('./handlers');
var unwrap = require('./unwrap');
module.exports = QueueItem;
function QueueItem(promise, onFulfilled, onRejected) {
this.promise = promise;
if (typeof onFulfilled === 'function') {
this.onFulfilled = onFulfilled;
this.callFulfilled = this.otherCallFulfilled;
}
if (typeof onRejected === 'function') {
this.onRejected = onRejected;
this.callRejected = this.otherCallRejected;
}
}
QueueItem.prototype.callFulfilled = function (value) {
handlers.resolve(this.promise, value);
};
QueueItem.prototype.otherCallFulfilled = function (value) {
unwrap(this.promise, this.onFulfilled, value);
};
QueueItem.prototype.callRejected = function (value) {
handlers.reject(this.promise, value);
};
QueueItem.prototype.otherCallRejected = function (value) {
unwrap(this.promise, this.onRejected, value);
};
},{"./handlers":19,"./unwrap":29}],23:[function(require,module,exports){
'use strict';
var Promise = require('./promise');
var reject = require('./reject');
var resolve = require('./resolve');
var INTERNAL = require('./INTERNAL');
var handlers = require('./handlers');
module.exports = race;
function race(iterable) {
if (Object.prototype.toString.call(iterable) !== '[object Array]') {
return reject(new TypeError('must be an array'));
}
var len = iterable.length;
var called = false;
if (!len) {
return resolve([]);
}
var resolved = 0;
var i = -1;
var promise = new Promise(INTERNAL);
while (++i < len) {
resolver(iterable[i]);
}
return promise;
function resolver(value) {
resolve(value).then(function (response) {
if (!called) {
called = true;
handlers.resolve(promise, response);
}
}, function (error) {
if (!called) {
called = true;
handlers.reject(promise, error);
}
});
}
}
},{"./INTERNAL":17,"./handlers":19,"./promise":21,"./reject":24,"./resolve":25}],24:[function(require,module,exports){
'use strict';
var Promise = require('./promise');
var INTERNAL = require('./INTERNAL');
var handlers = require('./handlers');
module.exports = reject;
function reject(reason) {
var promise = new Promise(INTERNAL);
return handlers.reject(promise, reason);
}
},{"./INTERNAL":17,"./handlers":19,"./promise":21}],25:[function(require,module,exports){
'use strict';
var Promise = require('./promise');
var INTERNAL = require('./INTERNAL');
var handlers = require('./handlers');
module.exports = resolve;
var FALSE = handlers.resolve(new Promise(INTERNAL), false);
var NULL = handlers.resolve(new Promise(INTERNAL), null);
var UNDEFINED = handlers.resolve(new Promise(INTERNAL), void 0);
var ZERO = handlers.resolve(new Promise(INTERNAL), 0);
var EMPTYSTRING = handlers.resolve(new Promise(INTERNAL), '');
function resolve(value) {
if (value) {
if (value instanceof Promise) {
return value;
}
return handlers.resolve(new Promise(INTERNAL), value);
}
var valueType = typeof value;
switch (valueType) {
case 'boolean':
return FALSE;
case 'undefined':
return UNDEFINED;
case 'object':
return NULL;
case 'number':
return ZERO;
case 'string':
return EMPTYSTRING;
}
}
},{"./INTERNAL":17,"./handlers":19,"./promise":21}],26:[function(require,module,exports){
'use strict';
var handlers = require('./handlers');
var tryCatch = require('./tryCatch');
function safelyResolveThenable(self, thenable) {
// Either fulfill, reject or reject with error
var called = false;
function onError(value) {
if (called) {
return;
}
called = true;
handlers.reject(self, value);
}
function onSuccess(value) {
if (called) {
return;
}
called = true;
handlers.resolve(self, value);
}
function tryToUnwrap() {
thenable(onSuccess, onError);
}
var result = tryCatch(tryToUnwrap);
if (result.status === 'error') {
onError(result.value);
}
}
exports.safely = safelyResolveThenable;
},{"./handlers":19,"./tryCatch":28}],27:[function(require,module,exports){
// Lazy man's symbols for states
exports.REJECTED = ['REJECTED'];
exports.FULFILLED = ['FULFILLED'];
exports.PENDING = ['PENDING'];
},{}],28:[function(require,module,exports){
'use strict';
module.exports = tryCatch;
function tryCatch(func, value) {
var out = {};
try {
out.value = func(value);
out.status = 'success';
} catch (e) {
out.status = 'error';
out.value = e;
}
return out;
}
},{}],29:[function(require,module,exports){
'use strict';
var immediate = require('immediate');
var handlers = require('./handlers');
module.exports = unwrap;
function unwrap(promise, func, value) {
immediate(function () {
var returnValue;
try {
returnValue = func(value);
} catch (e) {
return handlers.reject(promise, e);
}
if (returnValue === promise) {
handlers.reject(promise, new TypeError('Cannot resolve promise with itself'));
} else {
handlers.resolve(promise, returnValue);
}
});
}
},{"./handlers":19,"immediate":30}],30:[function(require,module,exports){
'use strict';
var types = [
require('./nextTick'),
require('./mutation.js'),
require('./messageChannel'),
require('./stateChange'),
require('./timeout')
];
var draining;
var queue = [];
function drainQueue() {
draining = true;
var i, oldQueue;
var len = queue.length;
while (len) {
oldQueue = queue;
queue = [];
i = -1;
while (++i < len) {
oldQueue[i]();
}
len = queue.length;
}
draining = false;
}
var scheduleDrain;
var i = -1;
var len = types.length;
while (++ i < len) {
if (types[i] && types[i].test && types[i].test()) {
scheduleDrain = types[i].install(drainQueue);
break;
}
}
module.exports = immediate;
function immediate(task) {
if (queue.push(task) === 1 && !draining) {
scheduleDrain();
}
}
},{"./messageChannel":31,"./mutation.js":32,"./nextTick":1,"./stateChange":33,"./timeout":34}],31:[function(require,module,exports){
(function (global){
'use strict';
exports.test = function () {
if (global.setImmediate) {
// we can only get here in IE10
// which doesn't handel postMessage well
return false;
}
return typeof global.MessageChannel !== 'undefined';
};
exports.install = function (func) {
var channel = new global.MessageChannel();
channel.port1.onmessage = func;
return function () {
channel.port2.postMessage(0);
};
};
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],32:[function(require,module,exports){
(function (global){
'use strict';
//based off rsvp https://github.com/tildeio/rsvp.js
//license https://github.com/tildeio/rsvp.js/blob/master/LICENSE
//https://github.com/tildeio/rsvp.js/blob/master/lib/rsvp/asap.js
var Mutation = global.MutationObserver || global.WebKitMutationObserver;
exports.test = function () {
return Mutation;
};
exports.install = function (handle) {
var called = 0;
var observer = new Mutation(handle);
var element = global.document.createTextNode('');
observer.observe(element, {
characterData: true
});
return function () {
element.data = (called = ++called % 2);
};
};
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],33:[function(require,module,exports){
(function (global){
'use strict';
exports.test = function () {
return 'document' in global && 'onreadystatechange' in global.document.createElement('script');
};
exports.install = function (handle) {
return function () {
// Create a <script> element; its readystatechange event will be fired asynchronously once it is inserted
// into the document. Do so, thus queuing up the task. Remember to clean up once it's been called.
var scriptEl = global.document.createElement('script');
scriptEl.onreadystatechange = function () {
handle();
scriptEl.onreadystatechange = null;
scriptEl.parentNode.removeChild(scriptEl);
scriptEl = null;
};
global.document.documentElement.appendChild(scriptEl);
return handle;
};
};
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{}],34:[function(require,module,exports){
'use strict';
exports.test = function () {
return true;
};
exports.install = function (t) {
return function () {
setTimeout(t, 0);
};
};
},{}],35:[function(require,module,exports){
(function (process){
var defined = require('defined');
var createDefaultStream = require('./lib/default_stream');
var Test = require('./lib/test');
var createResult = require('./lib/results');
var through = require('through');
var canEmitExit = typeof process !== 'undefined' && process
&& typeof process.on === 'function' && process.browser !== true
;
var canExit = typeof process !== 'undefined' && process
&& typeof process.exit === 'function'
;
var nextTick = typeof setImmediate !== 'undefined'
? setImmediate
: process.nextTick
;
exports = module.exports = (function () {
var harness;
var lazyLoad = function () {
return getHarness().apply(this, arguments);
};
lazyLoad.only = function () {
return getHarness().only.apply(this, arguments);
};
lazyLoad.createStream = function (opts) {
if (!opts) opts = {};
if (!harness) {
var output = through();
getHarness({ stream: output, objectMode: opts.objectMode });
return output;
}
return harness.createStream(opts);
};
return lazyLoad
function getHarness (opts) {
if (!opts) opts = {};
opts.autoclose = !canEmitExit;
if (!harness) harness = createExitHarness(opts);
return harness;
}
})();
function createExitHarness (conf) {
if (!conf) conf = {};
var harness = createHarness({
autoclose: defined(conf.autoclose, false)
});
var stream = harness.createStream({ objectMode: conf.objectMode });
var es = stream.pipe(conf.stream || createDefaultStream());
if (canEmitExit) {
es.on('error', function (err) { harness._exitCode = 1 });
}
var ended = false;
stream.on('end', function () { ended = true });
if (conf.exit === false) return harness;
if (!canEmitExit || !canExit) return harness;
var _error;
process.on('uncaughtException', function (err) {
if (err && err.code === 'EPIPE' && err.errno === 'EPIPE'
&& err.syscall === 'write') return;
_error = err
throw err
})
process.on('exit', function (code) {
if (_error) {
return
}
if (!ended) {
var only = harness._results._only;
for (var i = 0; i < harness._tests.length; i++) {
var t = harness._tests[i];
if (only && t.name !== only) continue;
t._exit();
}
}
harness.close();
process.exit(code || harness._exitCode);
});
return harness;
}
exports.createHarness = createHarness;
exports.Test = Test;
exports.test = exports; // tap compat
exports.test.skip = Test.skip;
var exitInterval;
function createHarness (conf_) {
if (!conf_) conf_ = {};
var results = createResult();
if (conf_.autoclose !== false) {
results.once('done', function () { results.close() });
}
var test = function (name, conf, cb) {
var t = new Test(name, conf, cb);
test._tests.push(t);
(function inspectCode (st) {
st.on('test', function sub (st_) {
inspectCode(st_);
});
st.on('result', function (r) {
if (!r.ok) test._exitCode = 1
});
})(t);
results.push(t);
return t;
};
test._results = results;
test._tests = [];
test.createStream = function (opts) {
return results.createStream(opts);
};
var only = false;
test.only = function (name) {
if (only) throw new Error('there can only be one only test');
results.only(name);
only = true;
return test.apply(null, arguments);
};
test._exitCode = 0;
test.close = function () { results.close() };
return test;
}
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"))
},{"./lib/default_stream":36,"./lib/results":37,"./lib/test":38,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"defined":42,"through":45}],36:[function(require,module,exports){
var through = require('through');
module.exports = function () {
var line = '';
var stream = through(write, flush);
return stream;
function write (buf) {
for (var i = 0; i < buf.length; i++) {
var c = typeof buf === 'string'
? buf.charAt(i)
: String.fromCharCode(buf[i])
;
if (c === '\n') flush();
else line += c;
}
}
function flush () {
try { console.log(line); }
catch (e) { stream.emit('error', e) }
line = '';
}
};
},{"through":45}],37:[function(require,module,exports){
(function (process){
var EventEmitter = require('events').EventEmitter;
var inherits = require('inherits');
var through = require('through');
var resumer = require('resumer');
var inspect = require('object-inspect');
var nextTick = typeof setImmediate !== 'undefined'
? setImmediate
: process.nextTick
;
module.exports = Results;
inherits(Results, EventEmitter);
function Results () {
if (!(this instanceof Results)) return new Results;
this.count = 0;
this.fail = 0;
this.pass = 0;
this._stream = through();
this.tests = [];
}
Results.prototype.createStream = function (opts) {
if (!opts) opts = {};
var self = this;
var output, testId = 0;
if (opts.objectMode) {
output = through();
self.on('_push', function ontest (t, extra) {
if (!extra) extra = {};
var id = testId++;
t.once('prerun', function () {
var row = {
type: 'test',
name: t.name,
id: id
};
if (extra.parent) {
row.parent = extra.parent;
}
output.queue(row);
});
t.on('test', function (st) {
ontest(st, { parent: id });
});
t.on('result', function (res) {
res.test = id;
res.type = 'assert';
output.queue(res);
});
t.on('end', function () {
output.queue({ type: 'end', test: id });
});
});
self.on('done', function () { output.queue(null) });
}
else {
output = resumer();
output.queue('TAP version 13\n');
self._stream.pipe(output);
}
nextTick(function next() {
var t;
while (t = getNextTest(self)) {
t.run();
if (!t.ended) return t.once('end', function(){ nextTick(next); });
}
self.emit('done');
});
return output;
};
Results.prototype.push = function (t) {
var self = this;
self.tests.push(t);
self._watch(t);
self.emit('_push', t);
};
Results.prototype.only = function (name) {
if (this._only) {
self.count ++;
self.fail ++;
write('not ok ' + self.count + ' already called .only()\n');
}
this._only = name;
};
Results.prototype._watch = function (t) {
var self = this;
var write = function (s) { self._stream.queue(s) };
t.once('prerun', function () {
write('# ' + t.name + '\n');
});
t.on('result', function (res) {
if (typeof res === 'string') {
write('# ' + res + '\n');
return;
}
write(encodeResult(res, self.count + 1));
self.count ++;
if (res.ok) self.pass ++
else self.fail ++
});
t.on('test', function (st) { self._watch(st) });
};
Results.prototype.close = function () {
var self = this;
if (self.closed) self._stream.emit('error', new Error('ALREADY CLOSED'));
self.closed = true;
var write = function (s) { self._stream.queue(s) };
write('\n1..' + self.count + '\n');
write('# tests ' + self.count + '\n');
write('# pass ' + self.pass + '\n');
if (self.fail) write('# fail ' + self.fail + '\n')
else write('\n# ok\n')
self._stream.queue(null);
};
function encodeResult (res, count) {
var output = '';
output += (res.ok ? 'ok ' : 'not ok ') + count;
output += res.name ? ' ' + res.name.toString().replace(/\s+/g, ' ') : '';
if (res.skip) output += ' # SKIP';
else if (res.todo) output += ' # TODO';
output += '\n';
if (res.ok) return output;
var outer = ' ';
var inner = outer + ' ';
output += outer + '---\n';
output += inner + 'operator: ' + res.operator + '\n';
if (has(res, 'expected') || has(res, 'actual')) {
var ex = inspect(res.expected);
var ac = inspect(res.actual);
if (Math.max(ex.length, ac.length) > 65) {
output += inner + 'expected:\n' + inner + ' ' + ex + '\n';
output += inner + 'actual:\n' + inner + ' ' + ac + '\n';
}
else {
output += inner + 'expected: ' + ex + '\n';
output += inner + 'actual: ' + ac + '\n';
}
}
if (res.at) {
output += inner + 'at: ' + res.at + '\n';
}
if (res.operator === 'error' && res.actual && res.actual.stack) {
var lines = String(res.actual.stack).split('\n');
output += inner + 'stack:\n';
output += inner + ' ' + lines[0] + '\n';
for (var i = 1; i < lines.length; i++) {
output += inner + lines[i] + '\n';
}
}
output += outer + '...\n';
return output;
}
function getNextTest (results) {
if (!results._only) {
return results.tests.shift();
}
do {
var t = results.tests.shift();
if (!t) continue;
if (results._only === t.name) {
return t;
}
} while (results.tests.length !== 0)
}
function has (obj, prop) {
return Object.prototype.hasOwnProperty.call(obj, prop);
}
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"))
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"events":2,"inherits":16,"object-inspect":43,"resumer":44,"through":45}],38:[function(require,module,exports){
(function (process,__dirname){
var Stream = require('stream');
var deepEqual = require('deep-equal');
var defined = require('defined');
var path = require('path');
var inherits = require('inherits');
var EventEmitter = require('events').EventEmitter;
module.exports = Test;
var nextTick = typeof setImmediate !== 'undefined'
? setImmediate
: process.nextTick
;
inherits(Test, EventEmitter);
var getTestArgs = function (name_, opts_, cb_) {
var name = '(anonymous)';
var opts = {};
var cb;
for (var i = 0; i < arguments.length; i++) {
var arg = arguments[i];
var t = typeof arg;
if (t === 'string') {
name = arg;
}
else if (t === 'object') {
opts = arg || opts;
}
else if (t === 'function') {
cb = arg;
}
}
return { name: name, opts: opts, cb: cb };
};
function Test (name_, opts_, cb_) {
var self = this;
var args = getTestArgs(name_, opts_, cb_);
this.readable = true;
this.name = args.name || '(anonymous)';
this.assertCount = 0;
this.pendingCount = 0;
this._skip = args.opts.skip || false;
this._plan = undefined;
this._cb = args.cb;
this._progeny = [];
this._ok = true;
this.end = function () {
return Test.prototype.end.apply(self, arguments);
};
}
Test.prototype.run = function () {
if (!this._cb || this._skip) {
return this._end();
}
this.emit('prerun');
try {
this._cb(this);
}
catch (err) {
this.error(err);
this._end();
return;
}
this.emit('run');
};
Test.prototype.test = function (name, opts, cb) {
var self = this;
var t = new Test(name, opts, cb);
this._progeny.push(t);
this.pendingCount++;
this.emit('test', t);
t.on('prerun', function () {
self.assertCount++;
})
if (!self._pendingAsserts()) {
nextTick(function () {
self._end();
});
}
nextTick(function() {
if (!self._plan && self.pendingCount == self._progeny.length) {
self._end();
}
});
};
Test.prototype.comment = function (msg) {
this.emit('result', msg.trim().replace(/^#\s*/, ''));
};
Test.prototype.plan = function (n) {
this._plan = n;
this.emit('plan', n);
};
Test.prototype.end = function (err) {
var self = this;
if (arguments.length >= 1) {
this.ifError(err);
}
if (this.calledEnd) {
this.fail('.end() called twice');
}
this.calledEnd = true;
this._end();
};
Test.prototype._end = function (err) {
var self = this;
if (this._progeny.length) {
var t = this._progeny.shift();
t.on('end', function () { self._end() });
t.run();
return;
}
if (!this.ended) this.emit('end');
var pendingAsserts = this._pendingAsserts();
if (!this._planError && this._plan !== undefined && pendingAsserts) {
this._planError = true;
this.fail('plan != count', {
expected : this._plan,
actual : this.assertCount
});
}
this.ended = true;
};
Test.prototype._exit = function () {
if (this._plan !== undefined &&
!this._planError && this.assertCount !== this._plan) {
this._planError = true;
this.fail('plan != count', {
expected : this._plan,
actual : this.assertCount,
exiting : true
});
}
else if (!this.ended) {
this.fail('test exited without ending', {
exiting: true
});
}
};
Test.prototype._pendingAsserts = function () {
if (this._plan === undefined) {
return 1;
}
else {
return this._plan - (this._progeny.length + this.assertCount);
}
};
Test.prototype._assert = function assert (ok, opts) {
var self = this;
var extra = opts.extra || {};
var res = {
id : self.assertCount ++,
ok : Boolean(ok),
skip : defined(extra.skip, opts.skip),
name : defined(extra.message, opts.message, '(unnamed assert)'),
operator : defined(extra.operator, opts.operator)
};
if (has(opts, 'actual') || has(extra, 'actual')) {
res.actual = defined(extra.actual, opts.actual);
}
if (has(opts, 'expected') || has(extra, 'expected')) {
res.expected = defined(extra.expected, opts.expected);
}
this._ok = Boolean(this._ok && ok);
if (!ok) {
res.error = defined(extra.error, opts.error, new Error(res.name));
}
var e = new Error('exception');
var err = (e.stack || '').split('\n');
var dir = path.dirname(__dirname) + '/';
for (var i = 0; i < err.length; i++) {
var m = /^\s*\bat\s+(.+)/.exec(err[i]);
if (!m) continue;
var s = m[1].split(/\s+/);
var filem = /(\/[^:\s]+:(\d+)(?::(\d+))?)/.exec(s[1]);
if (!filem) {
filem = /(\/[^:\s]+:(\d+)(?::(\d+))?)/.exec(s[3]);
if (!filem) continue;
}
if (filem[1].slice(0, dir.length) === dir) continue;
res.functionName = s[0];
res.file = filem[1];
res.line = Number(filem[2]);
if (filem[3]) res.column = filem[3];
res.at = m[1];
break;
}
self.emit('result', res);
var pendingAsserts = self._pendingAsserts();
if (!pendingAsserts) {
if (extra.exiting) {
self._end();
} else {
nextTick(function () {
self._end();
});
}
}
if (!self._planError && pendingAsserts < 0) {
self._planError = true;
self.fail('plan != count', {
expected : self._plan,
actual : self._plan - pendingAsserts
});
}
};
Test.prototype.fail = function (msg, extra) {
this._assert(false, {
message : msg,
operator : 'fail',
extra : extra
});
};
Test.prototype.pass = function (msg, extra) {
this._assert(true, {
message : msg,
operator : 'pass',
extra : extra
});
};
Test.prototype.skip = function (msg, extra) {
this._assert(true, {
message : msg,
operator : 'skip',
skip : true,
extra : extra
});
};
Test.prototype.ok
= Test.prototype['true']
= Test.prototype.assert
= function (value, msg, extra) {
this._assert(value, {
message : msg,
operator : 'ok',
expected : true,
actual : value,
extra : extra
});
};
Test.prototype.notOk
= Test.prototype['false']
= Test.prototype.notok
= function (value, msg, extra) {
this._assert(!value, {
message : msg,
operator : 'notOk',
expected : false,
actual : value,
extra : extra
});
};
Test.prototype.error
= Test.prototype.ifError
= Test.prototype.ifErr
= Test.prototype.iferror
= function (err, msg, extra) {
this._assert(!err, {
message : defined(msg, String(err)),
operator : 'error',
actual : err,
extra : extra
});
};
Test.prototype.equal
= Test.prototype.equals
= Test.prototype.isEqual
= Test.prototype.is
= Test.prototype.strictEqual
= Test.prototype.strictEquals
= function (a, b, msg, extra) {
this._assert(a === b, {
message : defined(msg, 'should be equal'),
operator : 'equal',
actual : a,
expected : b,
extra : extra
});
};
Test.prototype.notEqual
= Test.prototype.notEquals
= Test.prototype.notStrictEqual
= Test.prototype.notStrictEquals
= Test.prototype.isNotEqual
= Test.prototype.isNot
= Test.prototype.not
= Test.prototype.doesNotEqual
= Test.prototype.isInequal
= function (a, b, msg, extra) {
this._assert(a !== b, {
message : defined(msg, 'should not be equal'),
operator : 'notEqual',
actual : a,
notExpected : b,
extra : extra
});
};
Test.prototype.deepEqual
= Test.prototype.deepEquals
= Test.prototype.isEquivalent
= Test.prototype.same
= function (a, b, msg, extra) {
this._assert(deepEqual(a, b, { strict: true }), {
message : defined(msg, 'should be equivalent'),
operator : 'deepEqual',
actual : a,
expected : b,
extra : extra
});
};
Test.prototype.deepLooseEqual
= Test.prototype.looseEqual
= Test.prototype.looseEquals
= function (a, b, msg, extra) {
this._assert(deepEqual(a, b), {
message : defined(msg, 'should be equivalent'),
operator : 'deepLooseEqual',
actual : a,
expected : b,
extra : extra
});
};
Test.prototype.notDeepEqual
= Test.prototype.notEquivalent
= Test.prototype.notDeeply
= Test.prototype.notSame
= Test.prototype.isNotDeepEqual
= Test.prototype.isNotDeeply
= Test.prototype.isNotEquivalent
= Test.prototype.isInequivalent
= function (a, b, msg, extra) {
this._assert(!deepEqual(a, b, { strict: true }), {
message : defined(msg, 'should not be equivalent'),
operator : 'notDeepEqual',
actual : a,
notExpected : b,
extra : extra
});
};
Test.prototype.notDeepLooseEqual
= Test.prototype.notLooseEqual
= Test.prototype.notLooseEquals
= function (a, b, msg, extra) {
this._assert(deepEqual(a, b), {
message : defined(msg, 'should be equivalent'),
operator : 'notDeepLooseEqual',
actual : a,
expected : b,
extra : extra
});
};
Test.prototype['throws'] = function (fn, expected, msg, extra) {
if (typeof expected === 'string') {
msg = expected;
expected = undefined;
}
var caught = undefined;
try {
fn();
}
catch (err) {
caught = { error : err };
var message = err.message;
delete err.message;
err.message = message;
}
var passed = caught;
if (expected instanceof RegExp) {
passed = expected.test(caught && caught.error);
expected = String(expected);
}
this._assert(passed, {
message : defined(msg, 'should throw'),
operator : 'throws',
actual : caught && caught.error,
expected : expected,
error: !passed && caught && caught.error,
extra : extra
});
};
Test.prototype.doesNotThrow = function (fn, expected, msg, extra) {
if (typeof expected === 'string') {
msg = expected;
expected = undefined;
}
var caught = undefined;
try {
fn();
}
catch (err) {
caught = { error : err };
}
this._assert(!caught, {
message : defined(msg, 'should not throw'),
operator : 'throws',
actual : caught && caught.error,
expected : expected,
error : caught && caught.error,
extra : extra
});
};
function has (obj, prop) {
return Object.prototype.hasOwnProperty.call(obj, prop);
}
Test.skip = function (name_, _opts, _cb) {
var args = getTestArgs.apply(null, arguments);
args.opts.skip = true;
return Test(args.name, args.opts, args.cb);
};
// vim: set softtabstop=4 shiftwidth=4:
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),"/../../node_modules/tape/lib")
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"deep-equal":39,"defined":42,"events":2,"inherits":16,"path":7,"stream":9}],39:[function(require,module,exports){
var pSlice = Array.prototype.slice;
var objectKeys = require('./lib/keys.js');
var isArguments = require('./lib/is_arguments.js');
var deepEqual = module.exports = function (actual, expected, opts) {
if (!opts) opts = {};
// 7.1. All identical values are equivalent, as determined by ===.
if (actual === expected) {
return true;
} else if (actual instanceof Date && expected instanceof Date) {
return actual.getTime() === expected.getTime();
// 7.3. Other pairs that do not both pass typeof value == 'object',
// equivalence is determined by ==.
} else if (typeof actual != 'object' && typeof expected != 'object') {
return opts.strict ? actual === expected : actual == expected;
// 7.4. For all other Object pairs, including Array objects, equivalence is
// determined by having the same number of owned properties (as verified
// with Object.prototype.hasOwnProperty.call), the same set of keys
// (although not necessarily the same order), equivalent values for every
// corresponding key, and an identical 'prototype' property. Note: this
// accounts for both named and indexed properties on Arrays.
} else {
return objEquiv(actual, expected, opts);
}
}
function isUndefinedOrNull(value) {
return value === null || value === undefined;
}
function isBuffer (x) {
if (!x || typeof x !== 'object' || typeof x.length !== 'number') return false;
if (typeof x.copy !== 'function' || typeof x.slice !== 'function') {
return false;
}
if (x.length > 0 && typeof x[0] !== 'number') return false;
return true;
}
function objEquiv(a, b, opts) {
var i, key;
if (isUndefinedOrNull(a) || isUndefinedOrNull(b))
return false;
// an identical 'prototype' property.
if (a.prototype !== b.prototype) return false;
//~~~I've managed to break Object.keys through screwy arguments passing.
// Converting to array solves the problem.
if (isArguments(a)) {
if (!isArguments(b)) {
return false;
}
a = pSlice.call(a);
b = pSlice.call(b);
return deepEqual(a, b, opts);
}
if (isBuffer(a)) {
if (!isBuffer(b)) {
return false;
}
if (a.length !== b.length) return false;
for (i = 0; i < a.length; i++) {
if (a[i] !== b[i]) return false;
}
return true;
}
try {
var ka = objectKeys(a),
kb = objectKeys(b);
} catch (e) {//happens when one is a string literal and the other isn't
return false;
}
// having the same number of owned properties (keys incorporates
// hasOwnProperty)
if (ka.length != kb.length)
return false;
//the same set of keys (although not necessarily the same order),
ka.sort();
kb.sort();
//~~~cheap key test
for (i = ka.length - 1; i >= 0; i--) {
if (ka[i] != kb[i])
return false;
}
//equivalent values for every corresponding key, and
//~~~possibly expensive deep test
for (i = ka.length - 1; i >= 0; i--) {
key = ka[i];
if (!deepEqual(a[key], b[key], opts)) return false;
}
return true;
}
},{"./lib/is_arguments.js":40,"./lib/keys.js":41}],40:[function(require,module,exports){
var supportsArgumentsClass = (function(){
return Object.prototype.toString.call(arguments)
})() == '[object Arguments]';
exports = module.exports = supportsArgumentsClass ? supported : unsupported;
exports.supported = supported;
function supported(object) {
return Object.prototype.toString.call(object) == '[object Arguments]';
};
exports.unsupported = unsupported;
function unsupported(object){
return object &&
typeof object == 'object' &&
typeof object.length == 'number' &&
Object.prototype.hasOwnProperty.call(object, 'callee') &&
!Object.prototype.propertyIsEnumerable.call(object, 'callee') ||
false;
};
},{}],41:[function(require,module,exports){
exports = module.exports = typeof Object.keys === 'function'
? Object.keys : shim;
exports.shim = shim;
function shim (obj) {
var keys = [];
for (var key in obj) keys.push(key);
return keys;
}
},{}],42:[function(require,module,exports){
module.exports = function () {
for (var i = 0; i < arguments.length; i++) {
if (arguments[i] !== undefined) return arguments[i];
}
};
},{}],43:[function(require,module,exports){
module.exports = function inspect_ (obj, opts, depth, seen) {
if (!opts) opts = {};
var maxDepth = opts.depth === undefined ? 5 : opts.depth;
if (depth === undefined) depth = 0;
if (depth > maxDepth && maxDepth > 0) return '...';
if (seen === undefined) seen = [];
else if (indexOf(seen, obj) >= 0) {
return '[Circular]';
}
function inspect (value, from) {
if (from) {
seen = seen.slice();
seen.push(from);
}
return inspect_(value, opts, depth + 1, seen);
}
if (typeof obj === 'string') {
return inspectString(obj);
}
else if (typeof obj === 'function') {
var name = nameOf(obj);
return '[Function' + (name ? ': ' + name : '') + ']';
}
else if (obj === null) {
return 'null';
}
else if (isElement(obj)) {
var s = '<' + String(obj.nodeName).toLowerCase();
var attrs = obj.attributes || [];
for (var i = 0; i < attrs.length; i++) {
s += ' ' + attrs[i].name + '="' + quote(attrs[i].value) + '"';
}
s += '>';
if (obj.childNodes && obj.childNodes.length) s += '...';
s += '</' + String(obj.tagName).toLowerCase() + '>';
return s;
}
else if (isArray(obj)) {
if (obj.length === 0) return '[]';
var xs = Array(obj.length);
for (var i = 0; i < obj.length; i++) {
xs[i] = has(obj, i) ? inspect(obj[i], obj) : '';
}
return '[ ' + xs.join(', ') + ' ]';
}
else if (typeof obj === 'object' && typeof obj.inspect === 'function') {
return obj.inspect();
}
else if (typeof obj === 'object' && !isDate(obj) && !isRegExp(obj)) {
var xs = [], keys = [];
for (var key in obj) {
if (has(obj, key)) keys.push(key);
}
keys.sort();
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
if (/[^\w$]/.test(key)) {
xs.push(inspect(key) + ': ' + inspect(obj[key], obj));
}
else xs.push(key + ': ' + inspect(obj[key], obj));
}
if (xs.length === 0) return '{}';
return '{ ' + xs.join(', ') + ' }';
}
else return String(obj);
};
function quote (s) {
return String(s).replace(/"/g, '&quot;');
}
function isArray (obj) {
return {}.toString.call(obj) === '[object Array]';
}
function isDate (obj) {
return {}.toString.call(obj) === '[object Date]';
}
function isRegExp (obj) {
return {}.toString.call(obj) === '[object RegExp]';
}
function has (obj, key) {
if (!{}.hasOwnProperty) return key in obj;
return {}.hasOwnProperty.call(obj, key);
}
function nameOf (f) {
if (f.name) return f.name;
var m = f.toString().match(/^function\s*([\w$]+)/);
if (m) return m[1];
}
function indexOf (xs, x) {
if (xs.indexOf) return xs.indexOf(x);
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
}
function isElement (x) {
if (!x || typeof x !== 'object') return false;
if (typeof HTMLElement !== 'undefined') {
return x instanceof HTMLElement;
}
else return typeof x.nodeName === 'string'
&& typeof x.getAttribute === 'function'
;
}
function inspectString (str) {
var s = str.replace(/(['\\])/g, '\\$1').replace(/[\x00-\x1f]/g, lowbyte);
return "'" + s + "'";
function lowbyte (c) {
var n = c.charCodeAt(0);
var x = { 8: 'b', 9: 't', 10: 'n', 12: 'f', 13: 'r' }[n];
if (x) return '\\' + x;
return '\\x' + (n < 0x10 ? '0' : '') + n.toString(16);
}
}
},{}],44:[function(require,module,exports){
(function (process){
var through = require('through');
var nextTick = typeof setImmediate !== 'undefined'
? setImmediate
: process.nextTick
;
module.exports = function (write, end) {
var tr = through(write, end);
tr.pause();
var resume = tr.resume;
var pause = tr.pause;
var paused = false;
tr.pause = function () {
paused = true;
return pause.apply(this, arguments);
};
tr.resume = function () {
paused = false;
return resume.apply(this, arguments);
};
nextTick(function () {
if (!paused) tr.resume();
});
return tr;
};
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"))
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"through":45}],45:[function(require,module,exports){
(function (process){
var Stream = require('stream')
// through
//
// a stream that does nothing but re-emit the input.
// useful for aggregating a series of changing but not ending streams into one stream)
exports = module.exports = through
through.through = through
//create a readable writable stream.
function through (write, end, opts) {
write = write || function (data) { this.queue(data) }
end = end || function () { this.queue(null) }
var ended = false, destroyed = false, buffer = [], _ended = false
var stream = new Stream()
stream.readable = stream.writable = true
stream.paused = false
// stream.autoPause = !(opts && opts.autoPause === false)
stream.autoDestroy = !(opts && opts.autoDestroy === false)
stream.write = function (data) {
write.call(this, data)
return !stream.paused
}
function drain() {
while(buffer.length && !stream.paused) {
var data = buffer.shift()
if(null === data)
return stream.emit('end')
else
stream.emit('data', data)
}
}
stream.queue = stream.push = function (data) {
// console.error(ended)
if(_ended) return stream
if(data == null) _ended = true
buffer.push(data)
drain()
return stream
}
//this will be registered as the first 'end' listener
//must call destroy next tick, to make sure we're after any
//stream piped from here.
//this is only a problem if end is not emitted synchronously.
//a nicer way to do this is to make sure this is the last listener for 'end'
stream.on('end', function () {
stream.readable = false
if(!stream.writable && stream.autoDestroy)
process.nextTick(function () {
stream.destroy()
})
})
function _end () {
stream.writable = false
end.call(stream)
if(!stream.readable && stream.autoDestroy)
stream.destroy()
}
stream.end = function (data) {
if(ended) return
ended = true
if(arguments.length) stream.write(data)
_end() // will emit or queue
return stream
}
stream.destroy = function () {
if(destroyed) return
destroyed = true
ended = true
buffer.length = 0
stream.writable = stream.readable = false
stream.emit('close')
return stream
}
stream.pause = function () {
if(stream.paused) return
stream.paused = true
return stream
}
stream.resume = function () {
if(stream.paused) {
stream.paused = false
stream.emit('resume')
}
drain()
//may have become paused again,
//as drain emits 'data'.
if(!stream.paused)
stream.emit('drain')
return stream
}
return stream
}
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"))
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"stream":9}],46:[function(require,module,exports){
// UAParser.js v0.6.2
// Lightweight JavaScript-based User-Agent string parser
// https://github.com/faisalman/ua-parser-js
//
// Copyright © 2012-2013 Faisalman <[email protected]>
// Dual licensed under GPLv2 & MIT
(function (window, undefined) {
'use strict';
//////////////
// Constants
/////////////
var EMPTY = '',
UNKNOWN = '?',
FUNC_TYPE = 'function',
UNDEF_TYPE = 'undefined',
OBJ_TYPE = 'object',
MAJOR = 'major',
MODEL = 'model',
NAME = 'name',
TYPE = 'type',
VENDOR = 'vendor',
VERSION = 'version',
ARCHITECTURE= 'architecture',
CONSOLE = 'console',
MOBILE = 'mobile',
TABLET = 'tablet';
///////////
// Helper
//////////
var util = {
has : function (str1, str2) {
return str2.toLowerCase().indexOf(str1.toLowerCase()) !== -1;
},
lowerize : function (str) {
return str.toLowerCase();
}
};
///////////////
// Map helper
//////////////
var mapper = {
rgx : function () {
// loop through all regexes maps
for (var result, i = 0, j, k, p, q, matches, match, args = arguments; i < args.length; i += 2) {
var regex = args[i], // even sequence (0,2,4,..)
props = args[i + 1]; // odd sequence (1,3,5,..)
// construct object barebones
if (typeof(result) === UNDEF_TYPE) {
result = {};
for (p in props) {
q = props[p];
if (typeof(q) === OBJ_TYPE) {
result[q[0]] = undefined;
} else {
result[q] = undefined;
}
}
}
// try matching uastring with regexes
for (j = k = 0; j < regex.length; j++) {
matches = regex[j].exec(this.getUA());
if (!!matches) {
for (p in props) {
match = matches[++k];
q = props[p];
// check if given property is actually array
if (typeof(q) === OBJ_TYPE && q.length > 0) {
if (q.length == 2) {
if (typeof(q[1]) == FUNC_TYPE) {
// assign modified match
result[q[0]] = q[1].call(this, match);
} else {
// assign given value, ignore regex match
result[q[0]] = q[1];
}
} else if (q.length == 3) {
// check whether function or regex
if (typeof(q[1]) === FUNC_TYPE && !(q[1].exec && q[1].test)) {
// call function (usually string mapper)
result[q[0]] = match ? q[1].call(this, match, q[2]) : undefined;
} else {
// sanitize match using given regex
result[q[0]] = match ? match.replace(q[1], q[2]) : undefined;
}
} else if (q.length == 4) {
result[q[0]] = match ? q[3].call(this, match.replace(q[1], q[2])) : undefined;
}
} else {
result[q] = match ? match : undefined;
}
}
break;
}
}
if(!!matches) break; // break the loop immediately if match found
}
return result;
},
str : function (str, map) {
for (var i in map) {
// check if array
if (typeof(map[i]) === OBJ_TYPE && map[i].length > 0) {
for (var j = 0; j < map[i].length; j++) {
if (util.has(map[i][j], str)) {
return (i === UNKNOWN) ? undefined : i;
}
}
} else if (util.has(map[i], str)) {
return (i === UNKNOWN) ? undefined : i;
}
}
return str;
}
};
///////////////
// String map
//////////////
var maps = {
browser : {
oldsafari : {
major : {
'1' : ['/8', '/1', '/3'],
'2' : '/4',
'?' : '/'
},
version : {
'1.0' : '/8',
'1.2' : '/1',
'1.3' : '/3',
'2.0' : '/412',
'2.0.2' : '/416',
'2.0.3' : '/417',
'2.0.4' : '/419',
'?' : '/'
}
}
},
device : {
sprint : {
model : {
'Evo Shift 4G' : '7373KT'
},
vendor : {
'HTC' : 'APA',
'Sprint' : 'Sprint'
}
}
},
os : {
windows : {
version : {
'ME' : '4.90',
'NT 3.11' : 'NT3.51',
'NT 4.0' : 'NT4.0',
'2000' : 'NT 5.0',
'XP' : ['NT 5.1', 'NT 5.2'],
'Vista' : 'NT 6.0',
'7' : 'NT 6.1',
'8' : 'NT 6.2',
'RT' : 'ARM'
}
}
}
};
//////////////
// Regex map
/////////////
var regexes = {
browser : [[
// Presto based
/(opera\smini)\/((\d+)?[\w\.-]+)/i, // Opera Mini
/(opera\s[mobiletab]+).+version\/((\d+)?[\w\.-]+)/i, // Opera Mobi/Tablet
/(opera).+version\/((\d+)?[\w\.]+)/i, // Opera > 9.80
/(opera)[\/\s]+((\d+)?[\w\.]+)/i // Opera < 9.80
], [NAME, VERSION, MAJOR], [
/\s(opr)\/((\d+)?[\w\.]+)/i // Opera Webkit
], [[NAME, 'Opera'], VERSION, MAJOR], [
// Mixed
/(kindle)\/((\d+)?[\w\.]+)/i, // Kindle
/(lunascape|maxthon|netfront|jasmine|blazer)[\/\s]?((\d+)?[\w\.]+)*/i,
// Lunascape/Maxthon/Netfront/Jasmine/Blazer
// Trident based
/(avant\s|iemobile|slim|baidu)(?:browser)?[\/\s]?((\d+)?[\w\.]*)/i,
// Avant/IEMobile/SlimBrowser/Baidu
/(?:ms|\()(ie)\s((\d+)?[\w\.]+)/i, // Internet Explorer
// Webkit/KHTML based
/(rekonq)((?:\/)[\w\.]+)*/i, // Rekonq
/(chromium|flock|rockmelt|midori|epiphany|silk|skyfire|ovibrowser|bolt|iron)\/((\d+)?[\w\.-]+)/i
// Chromium/Flock/RockMelt/Midori/Epiphany/Silk/Skyfire/Bolt/Iron
], [NAME, VERSION, MAJOR], [
/(trident).+rv[:\s]((\d+)?[\w\.]+).+like\sgecko/i // IE11
], [[NAME, 'IE'], VERSION, MAJOR], [
/(yabrowser)\/((\d+)?[\w\.]+)/i // Yandex
], [[NAME, 'Yandex'], VERSION, MAJOR], [
/(comodo_dragon)\/((\d+)?[\w\.]+)/i // Comodo Dragon
], [[NAME, /_/g, ' '], VERSION, MAJOR], [
/(chrome|omniweb|arora|[tizenoka]{5}\s?browser)\/v?((\d+)?[\w\.]+)/i
// Chrome/OmniWeb/Arora/Tizen/Nokia
], [NAME, VERSION, MAJOR], [
/(dolfin)\/((\d+)?[\w\.]+)/i // Dolphin
], [[NAME, 'Dolphin'], VERSION, MAJOR], [
/((?:android.+)crmo|crios)\/((\d+)?[\w\.]+)/i // Chrome for Android/iOS
], [[NAME, 'Chrome'], VERSION, MAJOR], [
/version\/((\d+)?[\w\.]+).+?mobile\/\w+\s(safari)/i // Mobile Safari
], [VERSION, MAJOR, [NAME, 'Mobile Safari']], [
/version\/((\d+)?[\w\.]+).+?(mobile\s?safari|safari)/i // Safari & Safari Mobile
], [VERSION, MAJOR, NAME], [
/webkit.+?(mobile\s?safari|safari)((\/[\w\.]+))/i // Safari < 3.0
], [NAME, [MAJOR, mapper.str, maps.browser.oldsafari.major], [VERSION, mapper.str, maps.browser.oldsafari.version]], [
/(konqueror)\/((\d+)?[\w\.]+)/i, // Konqueror
/(webkit|khtml)\/((\d+)?[\w\.]+)/i
], [NAME, VERSION, MAJOR], [
// Gecko based
/(navigator|netscape)\/((\d+)?[\w\.-]+)/i // Netscape
], [[NAME, 'Netscape'], VERSION, MAJOR], [
/(swiftfox)/i, // Swiftfox
/(icedragon|iceweasel|camino|chimera|fennec|maemo\sbrowser|minimo|conkeror)[\/\s]?((\d+)?[\w\.\+]+)/i,
// IceDragon/Iceweasel/Camino/Chimera/Fennec/Maemo/Minimo/Conkeror
/(firefox|seamonkey|k-meleon|icecat|iceape|firebird|phoenix)\/((\d+)?[\w\.-]+)/i,
// Firefox/SeaMonkey/K-Meleon/IceCat/IceApe/Firebird/Phoenix
/(mozilla)\/((\d+)?[\w\.]+).+rv\:.+gecko\/\d+/i, // Mozilla
// Other
/(uc\s?browser|polaris|lynx|dillo|icab|doris|amaya|w3m|netsurf|qqbrowser)[\/\s]?((\d+)?[\w\.]+)/i,
// UCBrowser/Polaris/Lynx/Dillo/iCab/Doris/Amaya/w3m/NetSurf/QQBrowser
/(links)\s\(((\d+)?[\w\.]+)/i, // Links
/(gobrowser)\/?((\d+)?[\w\.]+)*/i, // GoBrowser
/(ice\s?browser)\/v?((\d+)?[\w\._]+)/i, // ICE Browser
/(mosaic)[\/\s]((\d+)?[\w\.]+)/i // Mosaic
], [NAME, VERSION, MAJOR]
],
cpu : [[
/(?:(amd|x(?:(?:86|64)[_-])?|wow|win)64)[;\)]/i // AMD64
], [[ARCHITECTURE, 'amd64']], [
/((?:i[346]|x)86)[;\)]/i // IA32
], [[ARCHITECTURE, 'ia32']], [
// PocketPC mistakenly identified as PowerPC
/windows\s(ce|mobile);\sppc;/i
], [[ARCHITECTURE, 'arm']], [
/((?:ppc|powerpc)(?:64)?)(?:\smac|;|\))/i // PowerPC
], [[ARCHITECTURE, /ower/, '', util.lowerize]], [
/(sun4\w)[;\)]/i // SPARC
], [[ARCHITECTURE, 'sparc']], [
/(ia64(?=;)|68k(?=\))|arm(?=v\d+;)|(?:irix|mips|sparc)(?:64)?(?=;)|pa-risc)/i
// IA64, 68K, ARM, IRIX, MIPS, SPARC, PA-RISC
], [ARCHITECTURE, util.lowerize]
],
device : [[
/\((ipad|playbook);[\w\s\);-]+(rim|apple)/i // iPad/PlayBook
], [MODEL, VENDOR, [TYPE, TABLET]], [
/(hp).+(touchpad)/i, // HP TouchPad
/(kindle)\/([\w\.]+)/i, // Kindle
/\s(nook)[\w\s]+build\/(\w+)/i, // Nook
/(dell)\s(strea[kpr\s\d]*[\dko])/i // Dell Streak
], [VENDOR, MODEL, [TYPE, TABLET]], [
/\((ip[honed]+);.+(apple)/i // iPod/iPhone
], [MODEL, VENDOR, [TYPE, MOBILE]], [
/(blackberry)[\s-]?(\w+)/i, // BlackBerry
/(blackberry|benq|palm(?=\-)|sonyericsson|acer|asus|dell|huawei|meizu|motorola)[\s_-]?([\w-]+)*/i,
// BenQ/Palm/Sony-Ericsson/Acer/Asus/Dell/Huawei/Meizu/Motorola
/(hp)\s([\w\s]+\w)/i, // HP iPAQ
/(asus)-?(\w+)/i // Asus
], [VENDOR, MODEL, [TYPE, MOBILE]], [
/\((bb10);\s(\w+)/i // BlackBerry 10
], [[VENDOR, 'BlackBerry'], MODEL, [TYPE, MOBILE]], [
/android.+((transfo[prime\s]{4,10}\s\w+|eeepc|slider\s\w+))/i // Asus Tablets
], [[VENDOR, 'Asus'], MODEL, [TYPE, TABLET]], [
/(sony)\s(tablet\s[ps])/i // Sony Tablets
], [VENDOR, MODEL, [TYPE, TABLET]], [
/(nintendo)\s([wids3u]+)/i // Nintendo
], [VENDOR, MODEL, [TYPE, CONSOLE]], [
/((playstation)\s[3portablevi]+)/i // Playstation
], [[VENDOR, 'Sony'], MODEL, [TYPE, CONSOLE]], [
/(sprint\s(\w+))/i // Sprint Phones
], [[VENDOR, mapper.str, maps.device.sprint.vendor], [MODEL, mapper.str, maps.device.sprint.model], [TYPE, MOBILE]], [
/(htc)[;_\s-]+([\w\s]+(?=\))|\w+)*/i, // HTC
/(zte)-(\w+)*/i, // ZTE
/(alcatel|geeksphone|huawei|lenovo|nexian|panasonic|(?=;\s)sony)[_\s-]?([\w-]+)*/i
// Alcatel/GeeksPhone/Huawei/Lenovo/Nexian/Panasonic/Sony
], [VENDOR, [MODEL, /_/g, ' '], [TYPE, MOBILE]], [
// Motorola
/\s((milestone|droid(?:[2-4x]|\s(?:bionic|x2|pro|razr))?(:?\s4g)?))[\w\s]+build\//i,
/(mot)[\s-]?(\w+)*/i
], [[VENDOR, 'Motorola'], MODEL, [TYPE, MOBILE]], [
/android.+\s((mz60\d|xoom[\s2]{0,2}))\sbuild\//i
], [[VENDOR, 'Motorola'], MODEL, [TYPE, TABLET]], [
/android.+((sch-i[89]0\d|shw-m380s|gt-p\d{4}|gt-n8000|sgh-t8[56]9))/i
], [[VENDOR, 'Samsung'], MODEL, [TYPE, TABLET]], [ // Samsung
/((s[cgp]h-\w+|gt-\w+|galaxy\snexus))/i,
/(sam[sung]*)[\s-]*(\w+-?[\w-]*)*/i,
/sec-((sgh\w+))/i
], [[VENDOR, 'Samsung'], MODEL, [TYPE, MOBILE]], [
/(sie)-(\w+)*/i // Siemens
], [[VENDOR, 'Siemens'], MODEL, [TYPE, MOBILE]], [
/(maemo|nokia).*(n900|lumia\s\d+)/i, // Nokia
/(nokia)[\s_-]?([\w-]+)*/i
], [[VENDOR, 'Nokia'], MODEL, [TYPE, MOBILE]], [
/android\s3\.[\s\w-;]{10}((a\d{3}))/i // Acer
], [[VENDOR, 'Acer'], MODEL, [TYPE, TABLET]], [
/android\s3\.[\s\w-;]{10}(lg?)-([06cv9]{3,4})/i // LG
], [[VENDOR, 'LG'], MODEL, [TYPE, TABLET]], [
/((nexus\s4))/i,
/(lg)[e;\s-\/]+(\w+)*/i
], [[VENDOR, 'LG'], MODEL, [TYPE, MOBILE]], [
/(mobile|tablet);.+rv\:.+gecko\//i // Unidentifiable
], [TYPE, VENDOR, MODEL]
],
engine : [[
/(presto)\/([\w\.]+)/i, // Presto
/(webkit|trident|netfront|netsurf|amaya|lynx|w3m)\/([\w\.]+)/i, // WebKit/Trident/NetFront/NetSurf/Amaya/Lynx/w3m
/(khtml|tasman|links)[\/\s]\(?([\w\.]+)/i, // KHTML/Tasman/Links
/(icab)[\/\s]([23]\.[\d\.]+)/i // iCab
], [NAME, VERSION], [
/rv\:([\w\.]+).*(gecko)/i // Gecko
], [VERSION, NAME]
],
os : [[
// Windows based
/(windows)\snt\s6\.2;\s(arm)/i, // Windows RT
/(windows\sphone(?:\sos)*|windows\smobile|windows)[\s\/]?([ntce\d\.\s]+\w)/i
], [NAME, [VERSION, mapper.str, maps.os.windows.version]], [
/(win(?=3|9|n)|win\s9x\s)([nt\d\.]+)/i
], [[NAME, 'Windows'], [VERSION, mapper.str, maps.os.windows.version]], [
// Mobile/Embedded OS
/\((bb)(10);/i // BlackBerry 10
], [[NAME, 'BlackBerry'], VERSION], [
/(blackberry)\w*\/?([\w\.]+)*/i, // Blackberry
/(tizen)\/([\w\.]+)/i, // Tizen
/(android|webos|palm\os|qnx|bada|rim\stablet\sos|meego)[\/\s-]?([\w\.]+)*/i
// Android/WebOS/Palm/QNX/Bada/RIM/MeeGo
], [NAME, VERSION], [
/(symbian\s?os|symbos|s60(?=;))[\/\s-]?([\w\.]+)*/i // Symbian
], [[NAME, 'Symbian'], VERSION],[
/mozilla.+\(mobile;.+gecko.+firefox/i // Firefox OS
], [[NAME, 'Firefox OS'], VERSION], [
// Console
/(nintendo|playstation)\s([wids3portablevu]+)/i, // Nintendo/Playstation
// GNU/Linux based
/(mint)[\/\s\(]?(\w+)*/i, // Mint
/(joli|[kxln]?ubuntu|debian|[open]*suse|gentoo|arch|slackware|fedora|mandriva|centos|pclinuxos|redhat|zenwalk)[\/\s-]?([\w\.-]+)*/i,
// Joli/Ubuntu/Debian/SUSE/Gentoo/Arch/Slackware
// Fedora/Mandriva/CentOS/PCLinuxOS/RedHat/Zenwalk
/(hurd|linux)\s?([\w\.]+)*/i, // Hurd/Linux
/(gnu)\s?([\w\.]+)*/i // GNU
], [NAME, VERSION], [
/(cros)\s[\w]+\s([\w\.]+\w)/i // Chromium OS
], [[NAME, 'Chromium OS'], VERSION],[
// Solaris
/(sunos)\s?([\w\.]+\d)*/i // Solaris
], [[NAME, 'Solaris'], VERSION], [
// BSD based
/\s([frentopc-]{0,4}bsd|dragonfly)\s?([\w\.]+)*/i // FreeBSD/NetBSD/OpenBSD/PC-BSD/DragonFly
], [NAME, VERSION],[
/(ip[honead]+)(?:.*os\s*([\w]+)*\slike\smac|;\sopera)/i // iOS
], [[NAME, 'iOS'], [VERSION, /_/g, '.']], [
/(mac\sos\sx)\s?([\w\s\.]+\w)*/i // Mac OS
], [NAME, [VERSION, /_/g, '.']], [
// Other
/(haiku)\s(\w+)/i, // Haiku
/(aix)\s((\d)(?=\.|\)|\s)[\w\.]*)*/i, // AIX
/(macintosh|mac(?=_powerpc)|plan\s9|minix|beos|os\/2|amigaos|morphos|risc\sos)/i,
// Plan9/Minix/BeOS/OS2/AmigaOS/MorphOS/RISCOS
/(unix)\s?([\w\.]+)*/i // UNIX
], [NAME, VERSION]
]
};
/////////////////
// Constructor
////////////////
var UAParser = function (uastring) {
var ua = uastring || ((window && window.navigator && window.navigator.userAgent) ? window.navigator.userAgent : EMPTY);
if (!(this instanceof UAParser)) {
return new UAParser(uastring).getResult();
}
this.getBrowser = function () {
return mapper.rgx.apply(this, regexes.browser);
};
this.getCPU = function () {
return mapper.rgx.apply(this, regexes.cpu);
};
this.getDevice = function () {
return mapper.rgx.apply(this, regexes.device);
};
this.getEngine = function () {
return mapper.rgx.apply(this, regexes.engine);
};
this.getOS = function () {
return mapper.rgx.apply(this, regexes.os);
};
this.getResult = function() {
return {
ua : this.getUA(),
browser : this.getBrowser(),
engine : this.getEngine(),
os : this.getOS(),
device : this.getDevice(),
cpu : this.getCPU()
};
};
this.getUA = function () {
return ua;
};
this.setUA = function (uastring) {
ua = uastring;
return this;
};
this.setUA(ua);
};
///////////
// Export
//////////
// check js environment
if (typeof(exports) !== UNDEF_TYPE) {
// nodejs env
if (typeof(module) !== UNDEF_TYPE && module.exports) {
exports = module.exports = UAParser;
}
exports.UAParser = UAParser;
} else {
// browser env
window.UAParser = UAParser;
// requirejs env (optional)
if (typeof(define) === FUNC_TYPE && define.amd) {
define(function () {
return UAParser;
});
}
// jQuery specific (optional)
if (typeof(window.jQuery) !== UNDEF_TYPE) {
var $ = window.jQuery;
var parser = new UAParser();
$.ua = parser.getResult();
$.ua.get = function() {
return parser.getUA();
};
$.ua.set = function (uastring) {
parser.setUA(uastring);
var result = parser.getResult();
for (var prop in result) {
$.ua[prop] = result[prop];
}
};
}
}
})(this);
},{}],47:[function(require,module,exports){
(function (global){
'use strict';
var opts = {adapter: 'redblack'};
function runTestSuites(PouchDB) {
var reporter = require('./perf.reporter');
reporter.log('Testing PouchDB version ' + PouchDB.version +
(opts.adapter ? (', using adapter: ' + opts.adapter) : '') +
'\n\n');
require('./perf.basics')(PouchDB, opts);
require('./perf.views')(PouchDB, opts);
}
var startNow = true;
if (startNow) {
runTestSuites(global.PouchDB);
}
}).call(this,typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./perf.basics":48,"./perf.reporter":49,"./perf.views":50}],48:[function(require,module,exports){
'use strict';
module.exports = function (PouchDB, opts) {
// need to use bluebird for promises everywhere, so we're comparing
// apples to apples
var Promise = require('bluebird');
var utils = require('./utils');
function createDocId(i) {
var intString = i.toString();
while (intString.length < 10) {
intString = '0' + intString;
}
return 'doc_' + intString;
}
var testCases = [
{
name: 'basic-inserts',
assertions: 1,
iterations: 1000,
setup: function (db, callback) {
callback(null, {'yo': 'dawg'});
},
test: function (db, itr, doc, done) {
db.post(doc, done);
}
}, {
name: 'bulk-inserts',
assertions: 1,
iterations: 100,
setup: function (db, callback) {
var docs = [];
for (var i = 0; i < 100; i++) {
docs.push({much : 'docs', very : 'bulk'});
}
callback(null, {docs : docs});
},
test: function (db, itr, docs, done) {
db.bulkDocs(docs, done);
}
}, {
name: 'basic-gets',
assertions: 1,
iterations: 10000,
setup: function (db, callback) {
var docs = [];
for (var i = 0; i < 10000; i++) {
docs.push({_id : createDocId(i), foo : 'bar', baz : 'quux'});
}
db.bulkDocs({docs : docs}, callback);
},
test: function (db, itr, docs, done) {
db.get(createDocId(itr), done);
}
}, {
name: 'all-docs-skip-limit',
assertions: 1,
iterations: 50,
setup: function (db, callback) {
var docs = [];
for (var i = 0; i < 1000; i++) {
docs.push({_id : createDocId(i), foo : 'bar', baz : 'quux'});
}
db.bulkDocs({docs : docs}, callback);
},
test: function (db, itr, docs, done) {
var tasks = [];
for (var i = 0; i < 10; i++) {
tasks.push(i);
}
Promise.all(tasks.map(function (doc, i) {
return db.allDocs({skip : i * 100, limit : 10});
})).then(function () {
done();
}, done);
}
}, {
name: 'all-docs-startkey-endkey',
assertions: 1,
iterations: 50,
setup: function (db, callback) {
var docs = [];
for (var i = 0; i < 1000; i++) {
docs.push({_id : createDocId(i), foo : 'bar', baz : 'quux'});
}
db.bulkDocs({docs : docs}, callback);
},
test: function (db, itr, docs, done) {
var tasks = [];
for (var i = 0; i < 10; i++) {
tasks.push(i);
}
Promise.all(tasks.map(function (doc, i) {
return db.allDocs({
startkey : createDocId(i * 100),
endkey : createDocId((i * 100) + 10)
});
})).then(function () {
done();
}, done);
}
}
];
utils.runTests(PouchDB, 'basics', testCases, opts);
};
},{"./utils":51,"bluebird":20}],49:[function(require,module,exports){
(function (process,global){
'use strict';
var isNode = process && !process.browser;
var UAParser = require('ua-parser-js');
var ua = !isNode && new UAParser(navigator.userAgent);
global.results = {};
var pre = !isNode && global.document.getElementById('output');
function log(msg) {
if (pre) {
pre.innerHTML = pre.innerHTML + msg;
} else {
console.log(msg);
}
}
exports.log = log;
exports.startSuite = function (suiteName) {
log('Starting suite: ' + suiteName + '\n\n');
};
exports.start = function (testCase) {
var key = testCase.name;
log('Starting test: ' + key + ' with ' + testCase.assertions +
' assertions and ' + testCase.iterations + ' iterations... ');
global.results[key] = {
start: Date.now()
};
};
exports.end = function (testCase) {
var key = testCase.name;
var obj = global.results[key];
obj.end = Date.now();
obj.duration = obj.end - obj.start;
log('done in ' + obj.duration + 'ms\n');
};
exports.complete = function (suiteName) {
global.results.completed = true;
if (isNode) {
global.results.client = {node: process.version};
} else {
global.results.client = {
browser: ua.getBrowser(),
device: ua.getDevice(),
engine: ua.getEngine(),
cpu: ua.getCPU(),
os : ua.getOS(),
userAgent: navigator.userAgent
};
}
console.log(global.results);
log('\nTests Complete!\n\n');
};
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"ua-parser-js":46}],50:[function(require,module,exports){
'use strict';
module.exports = function (PouchDB, opts) {
// need to use bluebird for promises everywhere, so we're comparing
// apples to apples
var Promise = require('bluebird');
var utils = require('./utils');
function makeTestDocs() {
return [
{key: null},
{key: true},
{key: false},
{key: -1},
{key: 0},
{key: 1},
{key: 2},
{key: 3},
{key: Math.random()},
{key: 'bar' + Math.random()},
{key: 'foo' + Math.random()},
{key: 'foobar' + Math.random()}
];
}
var testCases = [
{
name: 'temp-views',
assertions: 1,
iterations: 10,
setup: function (db, callback) {
var tasks = [];
for (var i = 0; i < 100; i++) {
tasks.push(i);
}
Promise.all(tasks.map(function () {
return db.bulkDocs({docs : makeTestDocs()});
})).then(function () {
callback();
}, callback);
},
test: function (db, itr, doc, done) {
var tasks = [
{startkey: 'foo', limit: 5},
{startkey: 'foobar', limit: 5},
{startkey: 'foo', limit: 5},
{startkey: -1, limit: 5},
{startkey: null, limit: 5}
];
Promise.all(tasks.map(function (task) {
return db.query(function (doc) {
emit(doc.key);
}, task);
})).then(function (res) {
done();
}, done);
}
},
{
name: 'persisted-views',
assertions: 1,
iterations: 10,
setup: function (db, callback) {
var tasks = [];
for (var i = 0; i < 100; i++) {
tasks.push(i);
}
Promise.all(tasks.map(function () {
return db.bulkDocs({docs : makeTestDocs()});
})).then(function () {
return db.put({
_id : '_design/myview',
views : {
myview : {
map : function (doc) {
emit(doc.key);
}.toString()
}
}
});
}).then(function () {
return db.query('myview/myview');
}).then(function () {
callback();
}, callback);
},
test: function (db, itr, doc, done) {
var tasks = [
{startkey: 'foo', limit: 5},
{startkey: 'foobar', limit: 5},
{startkey: 'foo', limit: 5},
{startkey: -1, limit: 5},
{startkey: null, limit: 5}
];
Promise.all(tasks.map(function (task) {
return db.query('myview/myview', task);
})).then(function (res) {
done();
}, done);
}
},
{
name: 'persisted-views-stale-ok',
assertions: 1,
iterations: 10,
setup: function (db, callback) {
var tasks = [];
for (var i = 0; i < 100; i++) {
tasks.push(i);
}
Promise.all(tasks.map(function () {
return db.bulkDocs({docs : makeTestDocs()});
})).then(function () {
return db.put({
_id : '_design/myview',
views : {
myview : {
map : function (doc) {
emit(doc.key);
}.toString()
}
}
});
}).then(function () {
return db.query('myview/myview');
}).then(function () {
callback();
}, callback);
},
test: function (db, itr, doc, done) {
var tasks = [
{startkey: 'foo', limit: 5, stale : 'ok'},
{startkey: 'foobar', limit: 5, stale : 'ok'},
{startkey: 'foo', limit: 5, stale : 'ok'},
{startkey: -1, limit: 5, stale : 'ok'},
{startkey: null, limit: 5, stale : 'ok'}
];
Promise.all(tasks.map(function (task) {
return db.query('myview/myview', task);
})).then(function (res) {
done();
}, done);
}
}
];
utils.runTests(PouchDB, 'views', testCases, opts);
};
},{"./utils":51,"bluebird":20}],51:[function(require,module,exports){
(function (process,global){
'use strict';
var reporter = require('./perf.reporter');
var test = require('tape');
var grep;
if (global.window && global.window.location && global.window.location.search) {
grep = global.window.location.search.match(/[&?]grep=([^&]+)/);
grep = grep && grep[1];
} else if (process && process.env) {
grep = process.env.GREP;
}
exports.runTests = function (PouchDB, suiteName, testCases, opts) {
testCases.forEach(function (testCase, i) {
if (grep && suiteName.indexOf(grep) === -1 &&
testCase.name.indexOf(grep) === -1) {
return;
}
test('benchmarking', function (t) {
var db;
var setupObj;
var randomizer = Math.random();
t.test('setup', function (t) {
db = new PouchDB('test' + randomizer, opts);
testCase.setup(db, function (err, res) {
setupObj = res;
if (i === 0) {
reporter.startSuite(suiteName);
}
reporter.start(testCase);
t.end();
});
});
t.test(testCase.name, function (t) {
t.plan(testCase.assertions);
var num = 0;
function after(err) {
if (err) {
t.error(err);
reporter.log(testCase.name + ' errored: ' + err.message + '\n');
}
if (++num < testCase.iterations) {
process.nextTick(function () {
testCase.test(db, num, setupObj, after);
});
} else {
t.ok(testCase.name + ' completed');
}
}
testCase.test(db, num, setupObj, after);
});
t.test('teardown', function (t) {
reporter.end(testCase);
var opts = {adapter : db.adapter};
PouchDB.destroy('test' + randomizer, opts, function () {
t.end();
if (i === testCases.length - 1) {
reporter.complete(suiteName);
}
});
});
});
});
};
}).call(this,require("/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js"),typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
},{"./perf.reporter":49,"/Users/cmetcalf/pouchdb/node_modules/browserify/node_modules/insert-module-globals/node_modules/process/browser.js":3,"tape":35}]},{},[47,48,49,50,51])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment