Created
December 31, 2013 01:05
-
-
Save coolaj86/8190827 to your computer and use it in GitHub Desktop.
testing phantom with indexeddb
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
'use strict'; | |
var phantom = require('node-phantom-simple') | |
; | |
phantom.create(function (err, ph) { | |
console.log('created phantom instance'); | |
ph.createPage(function(err, page) { | |
console.log('created page instance'); | |
function loadPouchDb() { | |
page.injectJs('./IndexedDBShim.min.js', function (/*err*/) { | |
page.injectJs('./pouchdb-nightly.min.js', function (/*err*/) { | |
page.evaluate(function () { | |
window.callPhantom('Pouch should be loaded...'); | |
new window.Pouch('test-db', function (err, db) { | |
window.callPhantom('Pouch initialized successfully!'); | |
setTimeout(function () { | |
db.get('BOGOID', function (err, data) { | |
if (err) { | |
window.callPhantom("item doesn't exist" + JSON.stringify(err)); | |
} else if (data) { | |
window.callPhantom('item exists' + JSON.stringify(data)); | |
} | |
window.callPhantom('saving an item...'); | |
db.put({ _id: 'BOGOID', foo: 'bar', _rev: (data && (data.rev || data._rev)) }, function (err, data) { | |
window.callPhantom({ msg: 'saved a data item', error: err, data: data }); | |
db.get('BOGOID', function (err, data) { | |
window.callPhantom({ code: 'exit', msg: 'got a data item', error: err, data: data }); | |
}); | |
}); | |
}); | |
}, 100); | |
}); | |
}.toString(), function () { | |
}); | |
}); | |
}); | |
} | |
// Basically this is window.onerror | |
page.onError = function (err) { | |
console.error(err); | |
ph.exit(); | |
}; | |
// fired on window.callPhantom except the first call from evaluateAsync | |
page.onCallback = function (data) { | |
if (data && 'exit' === data.code) { | |
console.log('Kill Code Received'); | |
ph.exit(); | |
} | |
console.log('CALLBACK: ' + JSON.stringify(data)); | |
}; | |
page.open('https://gist.github.com/', function (err, status) { | |
console.log('opening page...'); | |
if (err || 'success' !== status) { | |
console.log('err, status', err, status); | |
ph.exit(); | |
return; | |
} | |
}); | |
page.onLoadFinished = function () { | |
// often never happens | |
console.log('load finished'); | |
testMessagePassing(); | |
loadPouchDb(); | |
}; | |
page.onUrlChanged = function(targetUrl) { | |
console.log('New URL: ' + targetUrl); | |
}; | |
function testMessagePassing() { | |
// IMPORTANT: you must call `.toString()` on the first function | |
// IMPORTANT: closure won't work, you must pass variables with `replace(/'TPL'/g, JSON.stringify(data))` | |
// The first call to window.callPhantom will fire the evaluateAsync callback, all others fire page.onCallback | |
page.evaluateAsync(function () { | |
window.callPhantom("this is ignored" + 'MESSAGE'); // this will end the evaluateAsync, but will not pass parameters | |
window.callPhantom("this is goes to 'onCallback'" + 'MESSAGE'); | |
window.callPhantom("this also goes to 'onCallback'" + 'MESSAGE'); | |
}.toString().replace(/'MESSAGE'/g, JSON.stringify("hello world")), function (err, data) { | |
// this doesn't receive err on script error and doesn't receive data on data | |
console.log('window.callPhantom called the first time:', data); | |
}); | |
} | |
}); | |
}, { parameters: { 'local-storage-path': './html5-storage' } }); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/*! IndexedDBShim - v0.1.2 - 2013-07-11 */ | |
"use strict";var idbModules={};(function(e){function t(e,t,n,o){n.target=t,"function"==typeof t[e]&&t[e].apply(t,[n]),"function"==typeof o&&o()}function n(t,n,o){var i=new DOMException.constructor(0,n);throw i.name=t,i.message=n,e.DEBUG&&(console.log(t,n,o,i),console.trace&&console.trace()),i}var o=function(){this.length=0,this._items=[],Object.defineProperty&&Object.defineProperty(this,"_items",{enumerable:!1})};if(o.prototype={contains:function(e){return-1!==this._items.indexOf(e)},item:function(e){return this._items[e]},indexOf:function(e){return this._items.indexOf(e)},push:function(e){this._items.push(e),this.length+=1;for(var t=0;this._items.length>t;t++)this[t]=this._items[t]},splice:function(){this._items.splice.apply(this._items,arguments),this.length=this._items.length;for(var e in this)e===parseInt(e,10)+""&&delete this[e];for(e=0;this._items.length>e;e++)this[e]=this._items[e]}},Object.defineProperty)for(var i in{indexOf:!1,push:!1,splice:!1})Object.defineProperty(o.prototype,i,{enumerable:!1});e.util={throwDOMException:n,callback:t,quote:function(e){return"'"+e+"'"},StringList:o}})(idbModules),function(idbModules){var Sca=function(){return{decycle:function(object,callback){function checkForCompletion(){0===queuedObjects.length&&returnCallback(derezObj)}function readBlobAsDataURL(e,t){var n=new FileReader;n.onloadend=function(e){var n=e.target.result,o="blob";updateEncodedBlob(n,t,o)},n.readAsDataURL(e)}function updateEncodedBlob(dataURL,path,blobtype){var encoded=queuedObjects.indexOf(path);path=path.replace("$","derezObj"),eval(path+'.$enc="'+dataURL+'"'),eval(path+'.$type="'+blobtype+'"'),queuedObjects.splice(encoded,1),checkForCompletion()}function derez(e,t){var n,o,i;if(!("object"!=typeof e||null===e||e instanceof Boolean||e instanceof Date||e instanceof Number||e instanceof RegExp||e instanceof Blob||e instanceof String)){for(n=0;objects.length>n;n+=1)if(objects[n]===e)return{$ref:paths[n]};if(objects.push(e),paths.push(t),"[object Array]"===Object.prototype.toString.apply(e))for(i=[],n=0;e.length>n;n+=1)i[n]=derez(e[n],t+"["+n+"]");else{i={};for(o in e)Object.prototype.hasOwnProperty.call(e,o)&&(i[o]=derez(e[o],t+"["+JSON.stringify(o)+"]"))}return i}return e instanceof Blob?(queuedObjects.push(t),readBlobAsDataURL(e,t)):e instanceof Boolean?e={$type:"bool",$enc:""+e}:e instanceof Date?e={$type:"date",$enc:e.getTime()}:e instanceof Number?e={$type:"num",$enc:""+e}:e instanceof RegExp&&(e={$type:"regex",$enc:""+e}),e}var objects=[],paths=[],queuedObjects=[],returnCallback=callback,derezObj=derez(object,"$");checkForCompletion()},retrocycle:function retrocycle($){function dataURLToBlob(e){var t,n,o,i=";base64,";if(-1===e.indexOf(i))return n=e.split(","),t=n[0].split(":")[1],o=n[1],new Blob([o],{type:t});n=e.split(i),t=n[0].split(":")[1],o=window.atob(n[1]);for(var r=o.length,a=new Uint8Array(r),s=0;r>s;++s)a[s]=o.charCodeAt(s);return new Blob([a.buffer],{type:t})}function rez(value){var i,item,name,path;if(value&&"object"==typeof value)if("[object Array]"===Object.prototype.toString.apply(value))for(i=0;value.length>i;i+=1)item=value[i],item&&"object"==typeof item&&(path=item.$ref,value[i]="string"==typeof path&&px.test(path)?eval(path):rez(item));else if(void 0!==value.$type)switch(value.$type){case"blob":case"file":value=dataURLToBlob(value.$enc);break;case"bool":value=Boolean("true"===value.$enc);break;case"date":value=new Date(value.$enc);break;case"num":value=Number(value.$enc);break;case"regex":value=eval(value.$enc)}else for(name in value)"object"==typeof value[name]&&(item=value[name],item&&(path=item.$ref,value[name]="string"==typeof path&&px.test(path)?eval(path):rez(item)));return value}var px=/^\$(?:\[(?:\d+|\"(?:[^\\\"\u0000-\u001f]|\\([\\\"\/bfnrt]|u[0-9a-zA-Z]{4}))*\")\])*$/;return rez($),$},encode:function(e,t){function n(e){t(JSON.stringify(e))}this.decycle(e,n)},decode:function(e){return this.retrocycle(JSON.parse(e))}}}();idbModules.Sca=Sca}(idbModules),function(e){var t=["","number","string","boolean","object","undefined"],n=function(){return{encode:function(e){return t.indexOf(typeof e)+"-"+JSON.stringify(e)},decode:function(e){return e===void 0?void 0:JSON.parse(e.substring(2))}}},o={number:n("number"),"boolean":n(),object:n(),string:{encode:function(e){return t.indexOf("string")+"-"+e},decode:function(e){return""+e.substring(2)}},undefined:{encode:function(){return t.indexOf("undefined")+"-undefined"},decode:function(){return void 0}}},i=function(){return{encode:function(e){return o[typeof e].encode(e)},decode:function(e){return o[t[e.substring(0,1)]].decode(e)}}}();e.Key=i}(idbModules),function(e){var t=function(e,t){return{type:e,debug:t,bubbles:!1,cancelable:!1,eventPhase:0,timeStamp:new Date}};e.Event=t}(idbModules),function(e){var t=function(){this.onsuccess=this.onerror=this.result=this.error=this.source=this.transaction=null,this.readyState="pending"},n=function(){this.onblocked=this.onupgradeneeded=null};n.prototype=t,e.IDBRequest=t,e.IDBOpenRequest=n}(idbModules),function(e,t){var n=function(e,t,n,o){this.lower=e,this.upper=t,this.lowerOpen=n,this.upperOpen=o};n.only=function(e){return new n(e,e,!0,!0)},n.lowerBound=function(e,o){return new n(e,t,o,t)},n.upperBound=function(e){return new n(t,e,t,open)},n.bound=function(e,t,o,i){return new n(e,t,o,i)},e.IDBKeyRange=n}(idbModules),function(e,t){function n(n,o,i,r,a,s){this.__range=n,this.source=this.__idbObjectStore=i,this.__req=r,this.key=t,this.direction=o,this.__keyColumnName=a,this.__valueColumnName=s,this.source.transaction.__active||e.util.throwDOMException("TransactionInactiveError - The transaction this IDBObjectStore belongs to is not active."),this.__offset=-1,this.__lastKeyContinued=t,this["continue"]()}n.prototype.__find=function(n,o,i,r){var a=this,s=["SELECT * FROM ",e.util.quote(a.__idbObjectStore.name)],u=[];s.push("WHERE ",a.__keyColumnName," NOT NULL"),a.__range&&(a.__range.lower||a.__range.upper)&&(s.push("AND"),a.__range.lower&&(s.push(a.__keyColumnName+(a.__range.lowerOpen?" >":" >= ")+" ?"),u.push(e.Key.encode(a.__range.lower))),a.__range.lower&&a.__range.upper&&s.push("AND"),a.__range.upper&&(s.push(a.__keyColumnName+(a.__range.upperOpen?" < ":" <= ")+" ?"),u.push(e.Key.encode(a.__range.upper)))),n!==t&&(a.__lastKeyContinued=n,a.__offset=0),a.__lastKeyContinued!==t&&(s.push("AND "+a.__keyColumnName+" >= ?"),u.push(e.Key.encode(a.__lastKeyContinued))),s.push("ORDER BY ",a.__keyColumnName),s.push("LIMIT 1 OFFSET "+a.__offset),e.DEBUG&&console.log(s.join(" "),u),o.executeSql(s.join(" "),u,function(n,o){if(1===o.rows.length){var r=e.Key.decode(o.rows.item(0)[a.__keyColumnName]),s="value"===a.__valueColumnName?e.Sca.decode(o.rows.item(0)[a.__valueColumnName]):e.Key.decode(o.rows.item(0)[a.__valueColumnName]);i(r,s)}else e.DEBUG&&console.log("Reached end of cursors"),i(t,t)},function(t,n){e.DEBUG&&console.log("Could not execute Cursor.continue"),r(n)})},n.prototype["continue"]=function(e){var n=this;this.__idbObjectStore.transaction.__addToTransactionQueue(function(o,i,r,a){n.__offset++,n.__find(e,o,function(e,o){n.key=e,n.value=o,r(n.key!==t?n:t,n.__req)},function(e){a(e)})})},n.prototype.advance=function(n){0>=n&&e.util.throwDOMException("Type Error - Count is invalid - 0 or negative",n);var o=this;this.__idbObjectStore.transaction.__addToTransactionQueue(function(e,i,r,a){o.__offset+=n,o.__find(t,e,function(e,n){o.key=e,o.value=n,r(o.key!==t?o:t,o.__req)},function(e){a(e)})})},n.prototype.update=function(n){var o=this,i=this.__idbObjectStore.transaction.__createRequest(function(){});return e.Sca.encode(n,function(n){this.__idbObjectStore.__pushToQueue(i,function(i,r,a,s){o.__find(t,i,function(t){var r="UPDATE "+e.util.quote(o.__idbObjectStore.name)+" SET value = ? WHERE key = ?";e.DEBUG&&console.log(r,n,t),i.executeSql(r,[e.Sca.encode(n),e.Key.encode(t)],function(e,n){1===n.rowsAffected?a(t):s("No rowns with key found"+t)},function(e,t){s(t)})},function(e){s(e)})})}),i},n.prototype["delete"]=function(){var n=this;return this.__idbObjectStore.transaction.__addToTransactionQueue(function(o,i,r,a){n.__find(t,o,function(i){var s="DELETE FROM "+e.util.quote(n.__idbObjectStore.name)+" WHERE key = ?";e.DEBUG&&console.log(s,i),o.executeSql(s,[e.Key.encode(i)],function(e,n){1===n.rowsAffected?r(t):a("No rowns with key found"+i)},function(e,t){a(t)})},function(e){a(e)})})},e.IDBCursor=n}(idbModules),function(idbModules,undefined){function IDBIndex(e,t){this.indexName=this.name=e,this.__idbObjectStore=this.objectStore=this.source=t;var n=t.__storeProps&&t.__storeProps.indexList;n&&(n=JSON.parse(n)),this.keyPath=n&&n[e]&&n[e].keyPath||e,["multiEntry","unique"].forEach(function(t){this[t]=!!(n&&n[e]&&n[e].optionalParams&&n[e].optionalParams[t])},this)}IDBIndex.prototype.__createIndex=function(indexName,keyPath,optionalParameters){var me=this,transaction=me.__idbObjectStore.transaction;transaction.__addToTransactionQueue(function(tx,args,success,failure){me.__idbObjectStore.__getStoreProps(tx,function(){function error(){idbModules.util.throwDOMException(0,"Could not create new index",arguments)}2!==transaction.mode&&idbModules.util.throwDOMException(0,"Invalid State error, not a version transaction",me.transaction);var idxList=JSON.parse(me.__idbObjectStore.__storeProps.indexList);idxList[indexName]!==undefined&&idbModules.util.throwDOMException(0,"Index already exists on store",idxList);var columnName=indexName;idxList[indexName]={columnName:columnName,keyPath:keyPath,optionalParams:optionalParameters},me.__idbObjectStore.__storeProps.indexList=JSON.stringify(idxList);var sql=["ALTER TABLE",idbModules.util.quote(me.__idbObjectStore.name),"ADD",columnName,"BLOB"].join(" ");idbModules.DEBUG&&console.log(sql),tx.executeSql(sql,[],function(tx,data){tx.executeSql("SELECT * FROM "+idbModules.util.quote(me.__idbObjectStore.name),[],function(tx,data){(function initIndexForRow(i){if(data.rows.length>i)try{var value=idbModules.Sca.decode(data.rows.item(i).value),indexKey=eval("value['"+keyPath+"']");tx.executeSql("UPDATE "+idbModules.util.quote(me.__idbObjectStore.name)+" set "+columnName+" = ? where key = ?",[idbModules.Key.encode(indexKey),data.rows.item(i).key],function(){initIndexForRow(i+1)},error)}catch(e){initIndexForRow(i+1)}else idbModules.DEBUG&&console.log("Updating the indexes in table",me.__idbObjectStore.__storeProps),tx.executeSql("UPDATE __sys__ set indexList = ? where name = ?",[me.__idbObjectStore.__storeProps.indexList,me.__idbObjectStore.name],function(){me.__idbObjectStore.__setReadyState("createIndex",!0),success(me)},error)})(0)},error)},error)},"createObjectStore")})},IDBIndex.prototype.openCursor=function(e,t){var n=new idbModules.IDBRequest;return new idbModules.IDBCursor(e,t,this.source,n,this.indexName,"value"),n},IDBIndex.prototype.openKeyCursor=function(e,t){var n=new idbModules.IDBRequest;return new idbModules.IDBCursor(e,t,this.source,n,this.indexName,"key"),n},IDBIndex.prototype.__fetchIndexData=function(e,t){var n=this;return n.__idbObjectStore.transaction.__addToTransactionQueue(function(o,i,r,a){var s=["SELECT * FROM ",idbModules.util.quote(n.__idbObjectStore.name)," WHERE",n.indexName,"NOT NULL"],u=[];e!==undefined&&(s.push("AND",n.indexName," = ?"),u.push(idbModules.Key.encode(e))),idbModules.DEBUG&&console.log("Trying to fetch data for Index",s.join(" "),u),o.executeSql(s.join(" "),u,function(e,n){var o;o="count"==typeof t?n.rows.length:0===n.rows.length?undefined:"key"===t?idbModules.Key.decode(n.rows.item(0).key):idbModules.Sca.decode(n.rows.item(0).value),r(o)},a)})},IDBIndex.prototype.get=function(e){return this.__fetchIndexData(e,"value")},IDBIndex.prototype.getKey=function(e){return this.__fetchIndexData(e,"key")},IDBIndex.prototype.count=function(e){return this.__fetchIndexData(e,"count")},idbModules.IDBIndex=IDBIndex}(idbModules),function(idbModules){var IDBObjectStore=function(e,t,n){this.name=e,this.transaction=t,this.__ready={},this.__setReadyState("createObjectStore",n===void 0?!0:n),this.indexNames=new idbModules.util.StringList};IDBObjectStore.prototype.__setReadyState=function(e,t){this.__ready[e]=t},IDBObjectStore.prototype.__waitForReady=function(e,t){var n=!0;if(t!==void 0)n=this.__ready[t]===void 0?!0:this.__ready[t];else for(var o in this.__ready)this.__ready[o]||(n=!1);if(n)e();else{idbModules.DEBUG&&console.log("Waiting for to be ready",t);var i=this;window.setTimeout(function(){i.__waitForReady(e,t)},100)}},IDBObjectStore.prototype.__getStoreProps=function(e,t,n){var o=this;this.__waitForReady(function(){o.__storeProps?(idbModules.DEBUG&&console.log("Store properties - cached",o.__storeProps),t(o.__storeProps)):e.executeSql("SELECT * FROM __sys__ where name = ?",[o.name],function(e,n){1!==n.rows.length?t():(o.__storeProps={name:n.rows.item(0).name,indexList:n.rows.item(0).indexList,autoInc:n.rows.item(0).autoInc,keyPath:n.rows.item(0).keyPath},idbModules.DEBUG&&console.log("Store properties",o.__storeProps),t(o.__storeProps))},function(){t()})},n)},IDBObjectStore.prototype.__deriveKey=function(tx,value,key,callback){function getNextAutoIncKey(){tx.executeSql("SELECT * FROM sqlite_sequence where name like ?",[me.name],function(e,t){1!==t.rows.length?callback(0):callback(t.rows.item(0).seq)},function(e,t){idbModules.util.throwDOMException(0,"Data Error - Could not get the auto increment value for key",t)})}var me=this;me.__getStoreProps(tx,function(props){if(props||idbModules.util.throwDOMException(0,"Data Error - Could not locate defination for this table",props),props.keyPath)if(key!==void 0&&idbModules.util.throwDOMException(0,"Data Error - The object store uses in-line keys and the key parameter was provided",props),value)try{var primaryKey=eval("value['"+props.keyPath+"']");primaryKey?callback(primaryKey):"true"===props.autoInc?getNextAutoIncKey():idbModules.util.throwDOMException(0,"Data Error - Could not eval key from keyPath")}catch(e){idbModules.util.throwDOMException(0,"Data Error - Could not eval key from keyPath",e)}else idbModules.util.throwDOMException(0,"Data Error - KeyPath was specified, but value was not");else key!==void 0?callback(key):"false"===props.autoInc?idbModules.util.throwDOMException(0,"Data Error - The object store uses out-of-line keys and has no key generator and the key parameter was not provided. ",props):getNextAutoIncKey()})},IDBObjectStore.prototype.__insertData=function(tx,value,primaryKey,success,error){var paramMap={};primaryKey!==void 0&&(paramMap.key=idbModules.Key.encode(primaryKey));var indexes=JSON.parse(this.__storeProps.indexList);for(var key in indexes)try{paramMap[indexes[key].columnName]=idbModules.Key.encode(eval("value['"+indexes[key].keyPath+"']"))}catch(e){error(e)}var sqlStart=["INSERT INTO ",idbModules.util.quote(this.name),"("],sqlEnd=[" VALUES ("],sqlValues=[];for(key in paramMap)sqlStart.push(key+","),sqlEnd.push("?,"),sqlValues.push(paramMap[key]);sqlStart.push("value )"),sqlEnd.push("?)"),sqlValues.push(value);var sql=sqlStart.join(" ")+sqlEnd.join(" ");idbModules.DEBUG&&console.log("SQL for adding",sql,sqlValues),tx.executeSql(sql,sqlValues,function(){success(primaryKey)},function(e,t){error(t)})},IDBObjectStore.prototype.add=function(e,t){var n=this,o=n.transaction.__createRequest(function(){});return idbModules.Sca.encode(e,function(i){n.transaction.__pushToQueue(o,function(o,r,a,s){n.__deriveKey(o,e,t,function(e){n.__insertData(o,i,e,a,s)})})}),o},IDBObjectStore.prototype.put=function(e,t){var n=this,o=n.transaction.__createRequest(function(){});return idbModules.Sca.encode(e,function(i){n.transaction.__pushToQueue(o,function(o,r,a,s){n.__deriveKey(o,e,t,function(e){var t="DELETE FROM "+idbModules.util.quote(n.name)+" where key = ?";o.executeSql(t,[idbModules.Key.encode(e)],function(t,o){idbModules.DEBUG&&console.log("Did the row with the",e,"exist? ",o.rowsAffected),n.__insertData(t,i,e,a,s)},function(e,t){s(t)})})})}),o},IDBObjectStore.prototype.get=function(e){var t=this;return t.transaction.__addToTransactionQueue(function(n,o,i,r){t.__waitForReady(function(){var o=idbModules.Key.encode(e);idbModules.DEBUG&&console.log("Fetching",t.name,o),n.executeSql("SELECT * FROM "+idbModules.util.quote(t.name)+" where key = ?",[o],function(e,t){idbModules.DEBUG&&console.log("Fetched data",t);try{if(0===t.rows.length)return i();i(idbModules.Sca.decode(t.rows.item(0).value))}catch(n){idbModules.DEBUG&&console.log(n),i(void 0)}},function(e,t){r(t)})})})},IDBObjectStore.prototype["delete"]=function(e){var t=this;return t.transaction.__addToTransactionQueue(function(n,o,i,r){t.__waitForReady(function(){var o=idbModules.Key.encode(e);idbModules.DEBUG&&console.log("Fetching",t.name,o),n.executeSql("DELETE FROM "+idbModules.util.quote(t.name)+" where key = ?",[o],function(e,t){idbModules.DEBUG&&console.log("Deleted from database",t.rowsAffected),i()},function(e,t){r(t)})})})},IDBObjectStore.prototype.clear=function(){var e=this;return e.transaction.__addToTransactionQueue(function(t,n,o,i){e.__waitForReady(function(){t.executeSql("DELETE FROM "+idbModules.util.quote(e.name),[],function(e,t){idbModules.DEBUG&&console.log("Cleared all records from database",t.rowsAffected),o()},function(e,t){i(t)})})})},IDBObjectStore.prototype.count=function(e){var t=this;return t.transaction.__addToTransactionQueue(function(n,o,i,r){t.__waitForReady(function(){var o="SELECT * FROM "+idbModules.util.quote(t.name)+(e!==void 0?" WHERE key = ?":""),a=[];e!==void 0&&a.push(idbModules.Key.encode(e)),n.executeSql(o,a,function(e,t){i(t.rows.length)},function(e,t){r(t)})})})},IDBObjectStore.prototype.openCursor=function(e,t){var n=new idbModules.IDBRequest;return new idbModules.IDBCursor(e,t,this,n,"key","value"),n},IDBObjectStore.prototype.index=function(e){var t=new idbModules.IDBIndex(e,this);return t},IDBObjectStore.prototype.createIndex=function(e,t,n){var o=this;n=n||{},o.__setReadyState("createIndex",!1);var i=new idbModules.IDBIndex(e,o);return o.__waitForReady(function(){i.__createIndex(e,t,n)},"createObjectStore"),o.indexNames.push(e),i},IDBObjectStore.prototype.deleteIndex=function(e){var t=new idbModules.IDBIndex(e,this,!1);return t.__deleteIndex(e),t},idbModules.IDBObjectStore=IDBObjectStore}(idbModules),function(e){var t=0,n=1,o=2,i=function(o,i,r){if("number"==typeof i)this.mode=i,2!==i&&e.DEBUG&&console.log("Mode should be a string, but was specified as ",i);else if("string"==typeof i)switch(i){case"readwrite":this.mode=n;break;case"readonly":this.mode=t;break;default:this.mode=t}this.storeNames="string"==typeof o?[o]:o;for(var a=0;this.storeNames.length>a;a++)r.objectStoreNames.contains(this.storeNames[a])||e.util.throwDOMException(0,"The operation failed because the requested database object could not be found. For example, an object store did not exist but was being opened.",this.storeNames[a]);this.__active=!0,this.__running=!1,this.__requests=[],this.__aborted=!1,this.db=r,this.error=null,this.onabort=this.onerror=this.oncomplete=null};i.prototype.__executeRequests=function(){if(this.__running&&this.mode!==o)return e.DEBUG&&console.log("Looks like the request set is already running",this.mode),void 0;this.__running=!0;var t=this;window.setTimeout(function(){2===t.mode||t.__active||e.util.throwDOMException(0,"A request was placed against a transaction which is currently not active, or which is finished",t.__active),t.db.__db.transaction(function(n){function o(t,n){n&&(a.req=n),a.req.readyState="done",a.req.result=t,delete a.req.error;var o=e.Event("success");e.util.callback("onsuccess",a.req,o),s++,r()}function i(){a.req.readyState="done",a.req.error="DOMError";var t=e.Event("error",arguments);e.util.callback("onerror",a.req,t),s++,r()}function r(){return s>=t.__requests.length?(t.__active=!1,t.__requests=[],void 0):(a=t.__requests[s],a.op(n,a.args,o,i),void 0)}t.__tx=n;var a=null,s=0;try{r()}catch(u){e.DEBUG&&console.log("An exception occured in transaction",arguments),"function"==typeof t.onerror&&t.onerror()}},function(){e.DEBUG&&console.log("An error in transaction",arguments),"function"==typeof t.onerror&&t.onerror()},function(){e.DEBUG&&console.log("Transaction completed",arguments),"function"==typeof t.oncomplete&&t.oncomplete()})},1)},i.prototype.__addToTransactionQueue=function(t,n){this.__active||this.mode===o||e.util.throwDOMException(0,"A request was placed against a transaction which is currently not active, or which is finished.",this.__mode);var i=this.__createRequest();return this.__pushToQueue(i,t,n),i},i.prototype.__createRequest=function(){var t=new e.IDBRequest;return t.source=this.db,t},i.prototype.__pushToQueue=function(e,t,n){this.__requests.push({op:t,args:n,req:e}),this.__executeRequests()},i.prototype.objectStore=function(t){return new e.IDBObjectStore(t,this)},i.prototype.abort=function(){!this.__active&&e.util.throwDOMException(0,"A request was placed against a transaction which is currently not active, or which is finished",this.__active)},i.prototype.READ_ONLY=0,i.prototype.READ_WRITE=1,i.prototype.VERSION_CHANGE=2,e.IDBTransaction=i}(idbModules),function(e){var t=function(t,n,o,i){this.__db=t,this.version=o,this.__storeProperties=i,this.objectStoreNames=new e.util.StringList;for(var r=0;i.rows.length>r;r++)this.objectStoreNames.push(i.rows.item(r).name);this.name=n,this.onabort=this.onerror=this.onversionchange=null};t.prototype.createObjectStore=function(t,n){var o=this;n=n||{},n.keyPath=n.keyPath||null;var i=new e.IDBObjectStore(t,o.__versionTransaction,!1),r=o.__versionTransaction;return r.__addToTransactionQueue(function(r,a,s){function u(){e.util.throwDOMException(0,"Could not create new object store",arguments)}o.__versionTransaction||e.util.throwDOMException(0,"Invalid State error",o.transaction);var c=["CREATE TABLE",e.util.quote(t),"(key BLOB",n.autoIncrement?", inc INTEGER PRIMARY KEY AUTOINCREMENT":"PRIMARY KEY",", value BLOB)"].join(" ");e.DEBUG&&console.log(c),r.executeSql(c,[],function(e){e.executeSql("INSERT INTO __sys__ VALUES (?,?,?,?)",[t,n.keyPath,n.autoIncrement?!0:!1,"{}"],function(){i.__setReadyState("createObjectStore",!0),s(i)},u)},u)}),o.objectStoreNames.push(t),i},t.prototype.deleteObjectStore=function(t){var n=function(){e.util.throwDOMException(0,"Could not delete ObjectStore",arguments)},o=this;!o.objectStoreNames.contains(t)&&n("Object Store does not exist"),o.objectStoreNames.splice(o.objectStoreNames.indexOf(t),1);var i=o.__versionTransaction;i.__addToTransactionQueue(function(){o.__versionTransaction||e.util.throwDOMException(0,"Invalid State error",o.transaction),o.__db.transaction(function(o){o.executeSql("SELECT * FROM __sys__ where name = ?",[t],function(o,i){i.rows.length>0&&o.executeSql("DROP TABLE "+e.util.quote(t),[],function(){o.executeSql("DELETE FROM __sys__ WHERE name = ?",[t],function(){},n)},n)})})})},t.prototype.close=function(){},t.prototype.transaction=function(t,n){var o=new e.IDBTransaction(t,n||1,this);return o},e.IDBDatabase=t}(idbModules),function(e){var t=4194304;if(window.openDatabase){var n=window.openDatabase("__sysdb__",1,"System Database",t);n.transaction(function(t){t.executeSql("SELECT * FROM dbVersions",[],function(){},function(){n.transaction(function(t){t.executeSql("CREATE TABLE IF NOT EXISTS dbVersions (name VARCHAR(255), version INT);",[],function(){},function(){e.util.throwDOMException("Could not create table __sysdb__ to save DB versions")})})})},function(){e.DEBUG&&console.log("Error in sysdb transaction - when selecting from dbVersions",arguments)});var o={open:function(o,i){function r(){if(!u){var t=e.Event("error",arguments);s.readyState="done",s.error="DOMError",e.util.callback("onerror",s,t),u=!0}}function a(a){var u=window.openDatabase(o,1,o,t);s.readyState="done",i===void 0&&(i=a||1),(0>=i||a>i)&&e.util.throwDOMException(0,"An attempt was made to open a database using a lower version than the existing version.",i),u.transaction(function(t){t.executeSql("CREATE TABLE IF NOT EXISTS __sys__ (name VARCHAR(255), keyPath VARCHAR(255), autoInc BOOLEAN, indexList BLOB)",[],function(){t.executeSql("SELECT * FROM __sys__",[],function(t,c){var d=e.Event("success");s.source=s.result=new e.IDBDatabase(u,o,i,c),i>a?n.transaction(function(t){t.executeSql("UPDATE dbVersions set version = ? where name = ?",[i,o],function(){var t=e.Event("upgradeneeded");t.oldVersion=a,t.newVersion=i,s.transaction=s.result.__versionTransaction=new e.IDBTransaction([],2,s.source),e.util.callback("onupgradeneeded",s,t,function(){var t=e.Event("success");e.util.callback("onsuccess",s,t)})},r)},r):e.util.callback("onsuccess",s,d)},r)},r)},r)}var s=new e.IDBOpenRequest,u=!1;return n.transaction(function(e){e.executeSql("SELECT * FROM dbVersions where name = ?",[o],function(e,t){0===t.rows.length?e.executeSql("INSERT INTO dbVersions VALUES (?,?)",[o,i||1],function(){a(0)},r):a(t.rows.item(0).version)},r)},r),s},deleteDatabase:function(o){function i(t){if(!s){a.readyState="done",a.error="DOMError";var n=e.Event("error");n.message=t,n.debug=arguments,e.util.callback("onerror",a,n),s=!0}}function r(){n.transaction(function(t){t.executeSql("DELETE FROM dbVersions where name = ? ",[o],function(){a.result=void 0;var t=e.Event("success");t.newVersion=null,t.oldVersion=u,e.util.callback("onsuccess",a,t)},i)},i)}var a=new e.IDBOpenRequest,s=!1,u=null;return n.transaction(function(n){n.executeSql("SELECT * FROM dbVersions where name = ?",[o],function(n,s){if(0===s.rows.length){a.result=void 0;var c=e.Event("success");return c.newVersion=null,c.oldVersion=u,e.util.callback("onsuccess",a,c),void 0}u=s.rows.item(0).version;var d=window.openDatabase(o,1,o,t);d.transaction(function(t){t.executeSql("SELECT * FROM __sys__",[],function(t,n){var o=n.rows;(function a(n){n>=o.length?t.executeSql("DROP TABLE __sys__",[],function(){r()},i):t.executeSql("DROP TABLE "+e.util.quote(o.item(n).name),[],function(){a(n+1)},function(){a(n+1)})})(0)},function(){r()})},i)})},i),a},cmp:function(t,n){return e.Key.encode(t)>e.Key.encode(n)?1:t===n?0:-1}};e.shimIndexedDB=o}}(idbModules),function(e,t){e.openDatabase!==void 0&&(e.shimIndexedDB=t.shimIndexedDB,e.shimIndexedDB&&(e.shimIndexedDB.__useShim=function(){e.indexedDB=t.shimIndexedDB,e.IDBDatabase=t.IDBDatabase,e.IDBTransaction=t.IDBTransaction,e.IDBCursor=t.IDBCursor,e.IDBKeyRange=t.IDBKeyRange},e.shimIndexedDB.__debug=function(e){t.DEBUG=e})),e.indexedDB=e.indexedDB||e.webkitIndexedDB||e.mozIndexedDB||e.oIndexedDB||e.msIndexedDB,e.indexedDB===void 0&&e.openDatabase!==void 0?e.shimIndexedDB.__useShim():(e.IDBDatabase=e.IDBDatabase||e.webkitIDBDatabase,e.IDBTransaction=e.IDBTransaction||e.webkitIDBTransaction,e.IDBCursor=e.IDBCursor||e.webkitIDBCursor,e.IDBKeyRange=e.IDBKeyRange||e.webkitIDBKeyRange,e.IDBTransaction||(e.IDBTransaction={}),e.IDBTransaction.READ_ONLY=e.IDBTransaction.READ_ONLY||"readonly",e.IDBTransaction.READ_WRITE=e.IDBTransaction.READ_WRITE||"readwrite")}(window,idbModules); | |
//@ sourceMappingURL=http://nparashuram.com/IndexedDBShim/dist/IndexedDBShim.min.map |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/*PouchDB*/ | |
(function() { | |
// BEGIN Math.uuid.js | |
/*! | |
Math.uuid.js (v1.4) | |
http://www.broofa.com | |
mailto:[email protected] | |
Copyright (c) 2010 Robert Kieffer | |
Dual licensed under the MIT and GPL licenses. | |
*/ | |
/* | |
* Generate a random uuid. | |
* | |
* USAGE: Math.uuid(length, radix) | |
* length - the desired number of characters | |
* radix - the number of allowable values for each character. | |
* | |
* EXAMPLES: | |
* // No arguments - returns RFC4122, version 4 ID | |
* >>> Math.uuid() | |
* "92329D39-6F5C-4520-ABFC-AAB64544E172" | |
* | |
* // One argument - returns ID of the specified length | |
* >>> Math.uuid(15) // 15 character ID (default base=62) | |
* "VcydxgltxrVZSTV" | |
* | |
* // Two arguments - returns ID of the specified length, and radix. (Radix must be <= 62) | |
* >>> Math.uuid(8, 2) // 8 character ID (base=2) | |
* "01001010" | |
* >>> Math.uuid(8, 10) // 8 character ID (base=10) | |
* "47473046" | |
* >>> Math.uuid(8, 16) // 8 character ID (base=16) | |
* "098F4D35" | |
*/ | |
(function() { | |
// Private array of chars to use | |
var CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'.split(''); | |
Math.uuid = function (len, radix) { | |
var chars = CHARS, uuid = []; | |
radix = radix || chars.length; | |
if (len) { | |
// Compact form | |
for (var i = 0; i < len; i++) uuid[i] = chars[0 | Math.random()*radix]; | |
} else { | |
// rfc4122, version 4 form | |
var r; | |
// rfc4122 requires these characters | |
uuid[8] = uuid[13] = uuid[18] = uuid[23] = '-'; | |
uuid[14] = '4'; | |
// Fill in random data. At i==19 set the high bits of clock sequence as | |
// per rfc4122, sec. 4.1.5 | |
for (var i = 0; i < 36; i++) { | |
if (!uuid[i]) { | |
r = 0 | Math.random()*16; | |
uuid[i] = chars[(i == 19) ? (r & 0x3) | 0x8 : r]; | |
} | |
} | |
} | |
return uuid.join(''); | |
}; | |
// A more performant, but slightly bulkier, RFC4122v4 solution. We boost performance | |
// by minimizing calls to random() | |
Math.uuidFast = function() { | |
var chars = CHARS, uuid = new Array(36), rnd=0, r; | |
for (var i = 0; i < 36; i++) { | |
if (i==8 || i==13 || i==18 || i==23) { | |
uuid[i] = '-'; | |
} else if (i==14) { | |
uuid[i] = '4'; | |
} else { | |
if (rnd <= 0x02) rnd = 0x2000000 + (Math.random()*0x1000000)|0; | |
r = rnd & 0xf; | |
rnd = rnd >> 4; | |
uuid[i] = chars[(i == 19) ? (r & 0x3) | 0x8 : r]; | |
} | |
} | |
return uuid.join(''); | |
}; | |
// A more compact, but less performant, RFC4122v4 solution: | |
Math.uuidCompact = function() { | |
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) { | |
var r = Math.random()*16|0, v = c == 'x' ? r : (r&0x3|0x8); | |
return v.toString(16); | |
}).toUpperCase(); | |
}; | |
})(); | |
// END Math.uuid.js | |
/** | |
* | |
* MD5 (Message-Digest Algorithm) | |
* | |
* For original source see http://www.webtoolkit.info/ | |
* Download: 15.02.2009 from http://www.webtoolkit.info/javascript-md5.html | |
* | |
* Licensed under CC-BY 2.0 License | |
* (http://creativecommons.org/licenses/by/2.0/uk/) | |
* | |
**/ | |
var Crypto = {}; | |
(function() { | |
Crypto.MD5 = function(string) { | |
function RotateLeft(lValue, iShiftBits) { | |
return (lValue<<iShiftBits) | (lValue>>>(32-iShiftBits)); | |
} | |
function AddUnsigned(lX,lY) { | |
var lX4,lY4,lX8,lY8,lResult; | |
lX8 = (lX & 0x80000000); | |
lY8 = (lY & 0x80000000); | |
lX4 = (lX & 0x40000000); | |
lY4 = (lY & 0x40000000); | |
lResult = (lX & 0x3FFFFFFF)+(lY & 0x3FFFFFFF); | |
if (lX4 & lY4) { | |
return (lResult ^ 0x80000000 ^ lX8 ^ lY8); | |
} | |
if (lX4 | lY4) { | |
if (lResult & 0x40000000) { | |
return (lResult ^ 0xC0000000 ^ lX8 ^ lY8); | |
} else { | |
return (lResult ^ 0x40000000 ^ lX8 ^ lY8); | |
} | |
} else { | |
return (lResult ^ lX8 ^ lY8); | |
} | |
} | |
function F(x,y,z) { return (x & y) | ((~x) & z); } | |
function G(x,y,z) { return (x & z) | (y & (~z)); } | |
function H(x,y,z) { return (x ^ y ^ z); } | |
function I(x,y,z) { return (y ^ (x | (~z))); } | |
function FF(a,b,c,d,x,s,ac) { | |
a = AddUnsigned(a, AddUnsigned(AddUnsigned(F(b, c, d), x), ac)); | |
return AddUnsigned(RotateLeft(a, s), b); | |
}; | |
function GG(a,b,c,d,x,s,ac) { | |
a = AddUnsigned(a, AddUnsigned(AddUnsigned(G(b, c, d), x), ac)); | |
return AddUnsigned(RotateLeft(a, s), b); | |
}; | |
function HH(a,b,c,d,x,s,ac) { | |
a = AddUnsigned(a, AddUnsigned(AddUnsigned(H(b, c, d), x), ac)); | |
return AddUnsigned(RotateLeft(a, s), b); | |
}; | |
function II(a,b,c,d,x,s,ac) { | |
a = AddUnsigned(a, AddUnsigned(AddUnsigned(I(b, c, d), x), ac)); | |
return AddUnsigned(RotateLeft(a, s), b); | |
}; | |
function ConvertToWordArray(string) { | |
var lWordCount; | |
var lMessageLength = string.length; | |
var lNumberOfWords_temp1=lMessageLength + 8; | |
var lNumberOfWords_temp2=(lNumberOfWords_temp1-(lNumberOfWords_temp1 % 64))/64; | |
var lNumberOfWords = (lNumberOfWords_temp2+1)*16; | |
var lWordArray=Array(lNumberOfWords-1); | |
var lBytePosition = 0; | |
var lByteCount = 0; | |
while ( lByteCount < lMessageLength ) { | |
lWordCount = (lByteCount-(lByteCount % 4))/4; | |
lBytePosition = (lByteCount % 4)*8; | |
lWordArray[lWordCount] = (lWordArray[lWordCount] | (string.charCodeAt(lByteCount)<<lBytePosition)); | |
lByteCount++; | |
} | |
lWordCount = (lByteCount-(lByteCount % 4))/4; | |
lBytePosition = (lByteCount % 4)*8; | |
lWordArray[lWordCount] = lWordArray[lWordCount] | (0x80<<lBytePosition); | |
lWordArray[lNumberOfWords-2] = lMessageLength<<3; | |
lWordArray[lNumberOfWords-1] = lMessageLength>>>29; | |
return lWordArray; | |
}; | |
function WordToHex(lValue) { | |
var WordToHexValue="",WordToHexValue_temp="",lByte,lCount; | |
for (lCount = 0;lCount<=3;lCount++) { | |
lByte = (lValue>>>(lCount*8)) & 255; | |
WordToHexValue_temp = "0" + lByte.toString(16); | |
WordToHexValue = WordToHexValue + WordToHexValue_temp.substr(WordToHexValue_temp.length-2,2); | |
} | |
return WordToHexValue; | |
}; | |
//** function Utf8Encode(string) removed. Aready defined in pidcrypt_utils.js | |
var x=Array(); | |
var k,AA,BB,CC,DD,a,b,c,d; | |
var S11=7, S12=12, S13=17, S14=22; | |
var S21=5, S22=9 , S23=14, S24=20; | |
var S31=4, S32=11, S33=16, S34=23; | |
var S41=6, S42=10, S43=15, S44=21; | |
// string = Utf8Encode(string); #function call removed | |
x = ConvertToWordArray(string); | |
a = 0x67452301; b = 0xEFCDAB89; c = 0x98BADCFE; d = 0x10325476; | |
for (k=0;k<x.length;k+=16) { | |
AA=a; BB=b; CC=c; DD=d; | |
a=FF(a,b,c,d,x[k+0], S11,0xD76AA478); | |
d=FF(d,a,b,c,x[k+1], S12,0xE8C7B756); | |
c=FF(c,d,a,b,x[k+2], S13,0x242070DB); | |
b=FF(b,c,d,a,x[k+3], S14,0xC1BDCEEE); | |
a=FF(a,b,c,d,x[k+4], S11,0xF57C0FAF); | |
d=FF(d,a,b,c,x[k+5], S12,0x4787C62A); | |
c=FF(c,d,a,b,x[k+6], S13,0xA8304613); | |
b=FF(b,c,d,a,x[k+7], S14,0xFD469501); | |
a=FF(a,b,c,d,x[k+8], S11,0x698098D8); | |
d=FF(d,a,b,c,x[k+9], S12,0x8B44F7AF); | |
c=FF(c,d,a,b,x[k+10],S13,0xFFFF5BB1); | |
b=FF(b,c,d,a,x[k+11],S14,0x895CD7BE); | |
a=FF(a,b,c,d,x[k+12],S11,0x6B901122); | |
d=FF(d,a,b,c,x[k+13],S12,0xFD987193); | |
c=FF(c,d,a,b,x[k+14],S13,0xA679438E); | |
b=FF(b,c,d,a,x[k+15],S14,0x49B40821); | |
a=GG(a,b,c,d,x[k+1], S21,0xF61E2562); | |
d=GG(d,a,b,c,x[k+6], S22,0xC040B340); | |
c=GG(c,d,a,b,x[k+11],S23,0x265E5A51); | |
b=GG(b,c,d,a,x[k+0], S24,0xE9B6C7AA); | |
a=GG(a,b,c,d,x[k+5], S21,0xD62F105D); | |
d=GG(d,a,b,c,x[k+10],S22,0x2441453); | |
c=GG(c,d,a,b,x[k+15],S23,0xD8A1E681); | |
b=GG(b,c,d,a,x[k+4], S24,0xE7D3FBC8); | |
a=GG(a,b,c,d,x[k+9], S21,0x21E1CDE6); | |
d=GG(d,a,b,c,x[k+14],S22,0xC33707D6); | |
c=GG(c,d,a,b,x[k+3], S23,0xF4D50D87); | |
b=GG(b,c,d,a,x[k+8], S24,0x455A14ED); | |
a=GG(a,b,c,d,x[k+13],S21,0xA9E3E905); | |
d=GG(d,a,b,c,x[k+2], S22,0xFCEFA3F8); | |
c=GG(c,d,a,b,x[k+7], S23,0x676F02D9); | |
b=GG(b,c,d,a,x[k+12],S24,0x8D2A4C8A); | |
a=HH(a,b,c,d,x[k+5], S31,0xFFFA3942); | |
d=HH(d,a,b,c,x[k+8], S32,0x8771F681); | |
c=HH(c,d,a,b,x[k+11],S33,0x6D9D6122); | |
b=HH(b,c,d,a,x[k+14],S34,0xFDE5380C); | |
a=HH(a,b,c,d,x[k+1], S31,0xA4BEEA44); | |
d=HH(d,a,b,c,x[k+4], S32,0x4BDECFA9); | |
c=HH(c,d,a,b,x[k+7], S33,0xF6BB4B60); | |
b=HH(b,c,d,a,x[k+10],S34,0xBEBFBC70); | |
a=HH(a,b,c,d,x[k+13],S31,0x289B7EC6); | |
d=HH(d,a,b,c,x[k+0], S32,0xEAA127FA); | |
c=HH(c,d,a,b,x[k+3], S33,0xD4EF3085); | |
b=HH(b,c,d,a,x[k+6], S34,0x4881D05); | |
a=HH(a,b,c,d,x[k+9], S31,0xD9D4D039); | |
d=HH(d,a,b,c,x[k+12],S32,0xE6DB99E5); | |
c=HH(c,d,a,b,x[k+15],S33,0x1FA27CF8); | |
b=HH(b,c,d,a,x[k+2], S34,0xC4AC5665); | |
a=II(a,b,c,d,x[k+0], S41,0xF4292244); | |
d=II(d,a,b,c,x[k+7], S42,0x432AFF97); | |
c=II(c,d,a,b,x[k+14],S43,0xAB9423A7); | |
b=II(b,c,d,a,x[k+5], S44,0xFC93A039); | |
a=II(a,b,c,d,x[k+12],S41,0x655B59C3); | |
d=II(d,a,b,c,x[k+3], S42,0x8F0CCC92); | |
c=II(c,d,a,b,x[k+10],S43,0xFFEFF47D); | |
b=II(b,c,d,a,x[k+1], S44,0x85845DD1); | |
a=II(a,b,c,d,x[k+8], S41,0x6FA87E4F); | |
d=II(d,a,b,c,x[k+15],S42,0xFE2CE6E0); | |
c=II(c,d,a,b,x[k+6], S43,0xA3014314); | |
b=II(b,c,d,a,x[k+13],S44,0x4E0811A1); | |
a=II(a,b,c,d,x[k+4], S41,0xF7537E82); | |
d=II(d,a,b,c,x[k+11],S42,0xBD3AF235); | |
c=II(c,d,a,b,x[k+2], S43,0x2AD7D2BB); | |
b=II(b,c,d,a,x[k+9], S44,0xEB86D391); | |
a=AddUnsigned(a,AA); | |
b=AddUnsigned(b,BB); | |
c=AddUnsigned(c,CC); | |
d=AddUnsigned(d,DD); | |
} | |
var temp = WordToHex(a)+WordToHex(b)+WordToHex(c)+WordToHex(d); | |
return temp.toLowerCase(); | |
} | |
})(); | |
// END Crypto.md5.js | |
//---------------------------------------------------------------------- | |
// | |
// ECMAScript 5 Polyfills | |
// from www.calocomrmen./polyfill/ | |
// | |
//---------------------------------------------------------------------- | |
//---------------------------------------------------------------------- | |
// ES5 15.2 Object Objects | |
//---------------------------------------------------------------------- | |
// ES 15.2.3.6 Object.defineProperty ( O, P, Attributes ) | |
// Partial support for most common case - getters, setters, and values | |
(function() { | |
if (!Object.defineProperty || | |
!(function () { try { Object.defineProperty({}, 'x', {}); return true; } catch (e) { return false; } } ())) { | |
var orig = Object.defineProperty; | |
Object.defineProperty = function (o, prop, desc) { | |
"use strict"; | |
// In IE8 try built-in implementation for defining properties on DOM prototypes. | |
if (orig) { try { return orig(o, prop, desc); } catch (e) {} } | |
if (o !== Object(o)) { throw new TypeError("Object.defineProperty called on non-object"); } | |
if (Object.prototype.__defineGetter__ && ('get' in desc)) { | |
Object.prototype.__defineGetter__.call(o, prop, desc.get); | |
} | |
if (Object.prototype.__defineSetter__ && ('set' in desc)) { | |
Object.prototype.__defineSetter__.call(o, prop, desc.set); | |
} | |
if ('value' in desc) { | |
o[prop] = desc.value; | |
} | |
return o; | |
}; | |
} | |
}()); | |
// ES5 15.2.3.14 Object.keys ( O ) | |
// https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Object/keys | |
if (!Object.keys) { | |
Object.keys = function (o) { | |
if (o !== Object(o)) { throw new TypeError('Object.keys called on non-object'); } | |
var ret = [], p; | |
for (p in o) { | |
if (Object.prototype.hasOwnProperty.call(o, p)) { | |
ret.push(p); | |
} | |
} | |
return ret; | |
}; | |
} | |
//---------------------------------------------------------------------- | |
// ES5 15.4 Array Objects | |
//---------------------------------------------------------------------- | |
// ES5 15.4.4.18 Array.prototype.forEach ( callbackfn [ , thisArg ] ) | |
// From https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Array/forEach | |
if (!Array.prototype.forEach) { | |
Array.prototype.forEach = function (fun /*, thisp */) { | |
"use strict"; | |
if (this === void 0 || this === null) { throw new TypeError(); } | |
var t = Object(this); | |
var len = t.length >>> 0; | |
if (typeof fun !== "function") { throw new TypeError(); } | |
var thisp = arguments[1], i; | |
for (i = 0; i < len; i++) { | |
if (i in t) { | |
fun.call(thisp, t[i], i, t); | |
} | |
} | |
}; | |
} | |
// ES5 15.4.4.19 Array.prototype.map ( callbackfn [ , thisArg ] ) | |
// From https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Array/Map | |
if (!Array.prototype.map) { | |
Array.prototype.map = function (fun /*, thisp */) { | |
"use strict"; | |
if (this === void 0 || this === null) { throw new TypeError(); } | |
var t = Object(this); | |
var len = t.length >>> 0; | |
if (typeof fun !== "function") { throw new TypeError(); } | |
var res = []; res.length = len; | |
var thisp = arguments[1], i; | |
for (i = 0; i < len; i++) { | |
if (i in t) { | |
res[i] = fun.call(thisp, t[i], i, t); | |
} | |
} | |
return res; | |
}; | |
} | |
// Extends method | |
// (taken from http://code.jquery.com/jquery-1.9.0.js) | |
// Populate the class2type map | |
var class2type = {}; | |
var types = ["Boolean", "Number", "String", "Function", "Array", "Date", "RegExp", "Object", "Error"]; | |
for (var i = 0; i < types.length; i++) { | |
var typename = types[i]; | |
class2type[ "[object " + typename + "]" ] = typename.toLowerCase(); | |
} | |
var core_toString = class2type.toString; | |
var core_hasOwn = class2type.hasOwnProperty; | |
var type = function(obj) { | |
if (obj === null) { | |
return String( obj ); | |
} | |
return typeof obj === "object" || typeof obj === "function" ? | |
class2type[core_toString.call(obj)] || "object" : | |
typeof obj; | |
}; | |
var isWindow = function(obj) { | |
return obj !== null && obj === obj.window; | |
}; | |
var isPlainObject = function( obj ) { | |
// Must be an Object. | |
// Because of IE, we also have to check the presence of the constructor property. | |
// Make sure that DOM nodes and window objects don't pass through, as well | |
if ( !obj || type(obj) !== "object" || obj.nodeType || isWindow( obj ) ) { | |
return false; | |
} | |
try { | |
// Not own constructor property must be Object | |
if ( obj.constructor && | |
!core_hasOwn.call(obj, "constructor") && | |
!core_hasOwn.call(obj.constructor.prototype, "isPrototypeOf") ) { | |
return false; | |
} | |
} catch ( e ) { | |
// IE8,9 Will throw exceptions on certain host objects #9897 | |
return false; | |
} | |
// Own properties are enumerated firstly, so to speed up, | |
// if last one is own, then all properties are own. | |
var key; | |
for ( key in obj ) {} | |
return key === undefined || core_hasOwn.call( obj, key ); | |
}; | |
var isFunction = function(obj) { | |
return type(obj) === "function"; | |
}; | |
var isArray = Array.isArray || function(obj) { | |
return type(obj) === "array"; | |
}; | |
var extend = function() { | |
var options, name, src, copy, copyIsArray, clone, | |
target = arguments[0] || {}, | |
i = 1, | |
length = arguments.length, | |
deep = false; | |
// Handle a deep copy situation | |
if ( typeof target === "boolean" ) { | |
deep = target; | |
target = arguments[1] || {}; | |
// skip the boolean and the target | |
i = 2; | |
} | |
// Handle case when target is a string or something (possible in deep copy) | |
if ( typeof target !== "object" && !isFunction(target) ) { | |
target = {}; | |
} | |
// extend jQuery itself if only one argument is passed | |
if ( length === i ) { | |
target = this; | |
--i; | |
} | |
for ( ; i < length; i++ ) { | |
// Only deal with non-null/undefined values | |
if ((options = arguments[ i ]) != null) { | |
// Extend the base object | |
for ( name in options ) { | |
src = target[ name ]; | |
copy = options[ name ]; | |
// Prevent never-ending loop | |
if ( target === copy ) { | |
continue; | |
} | |
// Recurse if we're merging plain objects or arrays | |
if ( deep && copy && ( isPlainObject(copy) || (copyIsArray = isArray(copy)) ) ) { | |
if ( copyIsArray ) { | |
copyIsArray = false; | |
clone = src && isArray(src) ? src : []; | |
} else { | |
clone = src && isPlainObject(src) ? src : {}; | |
} | |
// Never move original objects, clone them | |
target[ name ] = extend( deep, clone, copy ); | |
// Don't bring in undefined values | |
} else if ( copy !== undefined ) { | |
target[ name ] = copy; | |
} | |
} | |
} | |
} | |
// Return the modified object | |
return target; | |
}; | |
if (typeof module !== 'undefined' && module.exports) { | |
module.exports = extend; | |
} | |
var ajax = function ajax(options, callback) { | |
if (typeof options === "function") { | |
callback = options; | |
options = {}; | |
} | |
var call = function(fun) { | |
var args = Array.prototype.slice.call(arguments, 1); | |
if (typeof fun === typeof Function) { | |
fun.apply(this, args); | |
} | |
}; | |
var defaultOptions = { | |
method : "GET", | |
headers: {}, | |
json: true, | |
processData: true, | |
timeout: 10000 | |
}; | |
options = extend(true, defaultOptions, options); | |
if (options.auth) { | |
var token = btoa(options.auth.username + ':' + options.auth.password); | |
options.headers.Authorization = 'Basic ' + token; | |
} | |
var onSuccess = function(obj, resp, cb){ | |
if (!options.binary && !options.json && options.processData && typeof obj !== 'string') { | |
obj = JSON.stringify(obj); | |
} else if (!options.binary && options.json && typeof obj === 'string') { | |
try { | |
obj = JSON.parse(obj); | |
} catch (e) { | |
// Probably a malformed JSON from server | |
call(cb, e); | |
return; | |
} | |
} | |
call(cb, null, obj, resp); | |
}; | |
var onError = function(err, cb){ | |
var errParsed; | |
var errObj = err.responseText ? {status: err.status} : err; //this seems too clever | |
try{ | |
errParsed = JSON.parse(err.responseText); //would prefer not to have a try/catch clause | |
errObj = extend(true, {}, errObj, errParsed); | |
} catch(e){} | |
call(cb, errObj); | |
}; | |
if (typeof window !== 'undefined' && window.XMLHttpRequest) { | |
var timer,timedout = false; | |
var xhr = new XMLHttpRequest(); | |
xhr.open(options.method, options.url); | |
if (options.json) { | |
options.headers.Accept = 'application/json'; | |
options.headers['Content-Type'] = options.headers['Content-Type'] || 'application/json'; | |
if (options.body && options.processData && typeof options.body !== "string") { | |
options.body = JSON.stringify(options.body); | |
} | |
} | |
if (options.binary) { | |
xhr.responseType = 'arraybuffer'; | |
} | |
for (var key in options.headers){ | |
xhr.setRequestHeader(key, options.headers[key]); | |
} | |
if (!("body" in options)) { | |
options.body = null; | |
} | |
var abortReq = function() { | |
timedout=true; | |
xhr.abort(); | |
call(onError, xhr, callback); | |
}; | |
xhr.onreadystatechange = function() { | |
if (xhr.readyState !== 4 || timedout) { | |
return; | |
} | |
clearTimeout(timer); | |
if (xhr.status >= 200 && xhr.status < 300) { | |
var data; | |
if (options.binary) { | |
data = new Blob([xhr.response || ''], {type: xhr.getResponseHeader('Content-Type')}); | |
} else { | |
data = xhr.responseText; | |
} | |
call(onSuccess, data, xhr, callback); | |
} else { | |
call(onError, xhr, callback); | |
} | |
}; | |
if (options.timeout > 0) { | |
timer = setTimeout(abortReq, options.timeout); | |
} | |
xhr.send(options.body); | |
return {abort:abortReq}; | |
} else { | |
if (options.json) { | |
if (!options.binary) { | |
options.headers.Accept = 'application/json'; | |
} | |
options.headers['Content-Type'] = options.headers['Content-Type'] || 'application/json'; | |
} | |
if (options.binary) { | |
options.encoding = null; | |
options.json = false; | |
} | |
if (!options.processData) { | |
options.json = false; | |
} | |
return request(options, function(err, response, body) { | |
if (err) { | |
err.status = response ? response.statusCode : 400; | |
return call(onError, err, callback); | |
} | |
var content_type = response.headers['content-type']; | |
var data = (body || ''); | |
// CouchDB doesn't always return the right content-type for JSON data, so | |
// we check for ^{ and }$ (ignoring leading/trailing whitespace) | |
if (!options.binary && (options.json || !options.processData) && typeof data !== 'object' && | |
(/json/.test(content_type) || | |
(/^[\s]*\{/.test(data) && /\}[\s]*$/.test(data)))) { | |
data = JSON.parse(data); | |
} | |
if (data.error) { | |
data.status = response.statusCode; | |
call(onError, data, callback); | |
} | |
else { | |
call(onSuccess, data, response, callback); | |
} | |
}); | |
} | |
}; | |
if (typeof module !== 'undefined' && module.exports) { | |
module.exports = ajax; | |
} | |
/*globals PouchAdapter: true */ | |
"use strict"; | |
var Pouch = function Pouch(name, opts, callback) { | |
if (!(this instanceof Pouch)) { | |
return new Pouch(name, opts, callback); | |
} | |
if (typeof opts === 'function' || typeof opts === 'undefined') { | |
callback = opts; | |
opts = {}; | |
} | |
if (typeof name === 'object') { | |
opts = name; | |
name = undefined; | |
} | |
var backend = Pouch.parseAdapter(opts.name || name); | |
opts.name = opts.name || backend.name; | |
opts.adapter = opts.adapter || backend.adapter; | |
if (!Pouch.adapters[opts.adapter]) { | |
throw 'Adapter is missing'; | |
} | |
if (!Pouch.adapters[opts.adapter].valid()) { | |
throw 'Invalid Adapter'; | |
} | |
var adapter = new PouchAdapter(opts, function(err, db) { | |
if (err) { | |
if (callback) { | |
callback(err); | |
} | |
return; | |
} | |
for (var plugin in Pouch.plugins) { | |
// In future these will likely need to be async to allow the plugin | |
// to initialise | |
var pluginObj = Pouch.plugins[plugin](db); | |
for (var api in pluginObj) { | |
// We let things like the http adapter use its own implementation | |
// as it shares a lot of code | |
if (!(api in db)) { | |
db[api] = pluginObj[api]; | |
} | |
} | |
} | |
db.taskqueue.ready(true); | |
db.taskqueue.execute(db); | |
callback(null, db); | |
}); | |
for (var j in adapter) { | |
this[j] = adapter[j]; | |
} | |
for (var plugin in Pouch.plugins) { | |
// In future these will likely need to be async to allow the plugin | |
// to initialise | |
var pluginObj = Pouch.plugins[plugin](this); | |
for (var api in pluginObj) { | |
// We let things like the http adapter use its own implementation | |
// as it shares a lot of code | |
if (!(api in this)) { | |
this[api] = pluginObj[api]; | |
} | |
} | |
} | |
}; | |
Pouch.DEBUG = false; | |
Pouch.adapters = {}; | |
Pouch.plugins = {}; | |
Pouch.parseAdapter = function(name) { | |
var match = name.match(/([a-z\-]*):\/\/(.*)/); | |
if (match) { | |
// the http adapter expects the fully qualified name | |
name = /http(s?)/.test(match[1]) ? match[1] + '://' + match[2] : match[2]; | |
var adapter = match[1]; | |
if (!Pouch.adapters[adapter].valid()) { | |
throw 'Invalid adapter'; | |
} | |
return {name: name, adapter: match[1]}; | |
} | |
var preferredAdapters = ['idb', 'leveldb', 'websql']; | |
for (var i = 0; i < preferredAdapters.length; ++i) { | |
if (preferredAdapters[i] in Pouch.adapters) { | |
return { | |
name: name, | |
adapter: preferredAdapters[i] | |
}; | |
} | |
} | |
throw 'No valid adapter found'; | |
}; | |
Pouch.destroy = function(name, callback) { | |
var opts = Pouch.parseAdapter(name); | |
var cb = function(err, response) { | |
if (err) { | |
callback(err); | |
return; | |
} | |
for (var plugin in Pouch.plugins) { | |
Pouch.plugins[plugin]._delete(name); | |
} | |
if (Pouch.DEBUG) { | |
console.log(name + ': Delete Database'); | |
} | |
// call destroy method of the particular adaptor | |
Pouch.adapters[opts.adapter].destroy(opts.name, callback); | |
}; | |
// skip http and https adaptors for _all_dbs | |
var adapter = opts.adapter; | |
if (adapter === "http" || adapter === "https") { | |
cb(); | |
return; | |
} | |
// remove db from Pouch.ALL_DBS | |
new Pouch(Pouch.allDBName(opts.adapter), function(err, db) { | |
if (err) { | |
callback(err); | |
return; | |
} | |
// check if db has been registered in Pouch.ALL_DBS | |
var dbname = Pouch.DBName(opts.adapter, opts.name); | |
db.get(dbname, function(err, doc) { | |
if (err) { | |
if (err.status === 404) { | |
cb(); | |
} else { | |
cb(err); | |
} | |
} else { | |
db.remove(doc, function(err, response) { | |
cb(err); | |
}); | |
} | |
}); | |
}); | |
}; | |
Pouch.adapter = function (id, obj) { | |
if (obj.valid()) { | |
Pouch.adapters[id] = obj; | |
} | |
}; | |
Pouch.plugin = function(id, obj) { | |
Pouch.plugins[id] = obj; | |
}; | |
// name of database used to keep track of databases | |
Pouch.ALL_DBS = "_all_dbs"; | |
Pouch.DBName = function(adapter, name) { | |
return [adapter, "-", name].join(''); | |
}; | |
Pouch.allDBName = function(adapter) { | |
return [adapter, "://", Pouch.ALL_DBS].join(''); | |
}; | |
Pouch.open = function(adapter, name, callback) { | |
// skip http and https adaptors for _all_dbs | |
if (adapter === "http" || adapter === "https") { | |
callback(); | |
return; | |
} | |
new Pouch(Pouch.allDBName(adapter), function(err, db) { | |
if (err) { | |
callback(err); | |
return; | |
} | |
// check if db has been registered in Pouch.ALL_DBS | |
var dbname = Pouch.DBName(adapter, name); | |
db.get(dbname, function(err, response) { | |
if (err) { | |
if (err.status === 404) { | |
db.put({ | |
_id: dbname | |
}, callback); | |
} else { | |
callback(err); | |
} | |
} else { | |
callback(); | |
} | |
}); | |
}); | |
}; | |
Pouch._all_dbs = function(callback) { | |
var accumulate = function(adapters, all_dbs) { | |
if (adapters.length === 0) { | |
// remove duplicates | |
var result = []; | |
all_dbs.forEach(function(doc) { | |
var exists = result.some(function(db) { | |
return db.id === doc.id; | |
}); | |
if (!exists) { | |
result.push(doc); | |
} | |
}); | |
callback(null, result); | |
return; | |
} | |
var adapter = adapters.shift(); | |
// skip http and https adaptors for _all_dbs | |
if (adapter === "http" || adapter === "https") { | |
accumulate(adapters, all_dbs); | |
return; | |
} | |
new Pouch(Pouch.allDBName(adapter), function(err, db) { | |
if (err) { | |
callback(err); | |
return; | |
} | |
db.allDocs({include_docs: true}, function(err, response) { | |
if (err) { | |
callback(err); | |
return; | |
} | |
// append from current adapter rows | |
all_dbs.unshift.apply(all_dbs, response.rows); | |
// code to clear _all_dbs. | |
// response.rows.forEach(function(row) { | |
// db.remove(row.doc, function() { | |
// console.log(arguments); | |
// }); | |
// }); | |
// recurse | |
accumulate(adapters, all_dbs); | |
}); | |
}); | |
}; | |
var adapters = Object.keys(Pouch.adapters); | |
accumulate(adapters, []); | |
}; | |
// Enumerate errors, add the status code so we can reflect the HTTP api | |
// in future | |
Pouch.Errors = { | |
MISSING_BULK_DOCS: { | |
status: 400, | |
error: 'bad_request', | |
reason: "Missing JSON list of 'docs'" | |
}, | |
MISSING_DOC: { | |
status: 404, | |
error: 'not_found', | |
reason: 'missing' | |
}, | |
REV_CONFLICT: { | |
status: 409, | |
error: 'conflict', | |
reason: 'Document update conflict' | |
}, | |
INVALID_ID: { | |
status: 400, | |
error: 'invalid_id', | |
reason: '_id field must contain a string' | |
}, | |
MISSING_ID: { | |
status: 412, | |
error: 'missing_id', | |
reason: '_id is required for puts' | |
}, | |
RESERVED_ID: { | |
status: 400, | |
error: 'bad_request', | |
reason: 'Only reserved document ids may start with underscore.' | |
}, | |
NOT_OPEN: { | |
status: 412, | |
error: 'precondition_failed', | |
reason: 'Database not open so cannot close' | |
}, | |
UNKNOWN_ERROR: { | |
status: 500, | |
error: 'unknown_error', | |
reason: 'Database encountered an unknown error' | |
}, | |
INVALID_REQUEST: { | |
status: 400, | |
error: 'invalid_request', | |
reason: 'Request was invalid' | |
}, | |
QUERY_PARSE_ERROR: { | |
status: 400, | |
error: 'query_parse_error', | |
reason: 'Some query parameter is invalid' | |
}, | |
BAD_REQUEST: { | |
status: 400, | |
error: 'bad_request', | |
reason: 'Something wrong with the request' | |
} | |
}; | |
if (typeof module !== 'undefined' && module.exports) { | |
global.Pouch = Pouch; | |
Pouch.merge = require('./pouch.merge.js').merge; | |
Pouch.collate = require('./pouch.collate.js').collate; | |
Pouch.replicate = require('./pouch.replicate.js').replicate; | |
Pouch.utils = require('./pouch.utils.js'); | |
module.exports = Pouch; | |
var PouchAdapter = require('./pouch.adapter.js'); | |
// load adapters known to work under node | |
var adapters = ['leveldb', 'http']; | |
adapters.map(function(adapter) { | |
var adapter_path = './adapters/pouch.'+adapter+'.js'; | |
require(adapter_path); | |
}); | |
require('./plugins/pouchdb.mapreduce.js'); | |
} else { | |
window.Pouch = Pouch; | |
} | |
'use strict'; | |
// a few hacks to get things in the right place for node.js | |
if (typeof module !== 'undefined' && module.exports) { | |
module.exports = Pouch; | |
} | |
var stringCollate = function(a, b) { | |
// See: https://github.com/daleharvey/pouchdb/issues/40 | |
// This is incompatible with the CouchDB implementation, but its the | |
// best we can do for now | |
return (a === b) ? 0 : ((a > b) ? 1 : -1); | |
}; | |
var objectCollate = function(a, b) { | |
var ak = Object.keys(a), bk = Object.keys(b); | |
var len = Math.min(ak.length, bk.length); | |
for (var i = 0; i < len; i++) { | |
// First sort the keys | |
var sort = Pouch.collate(ak[i], bk[i]); | |
if (sort !== 0) { | |
return sort; | |
} | |
// if the keys are equal sort the values | |
sort = Pouch.collate(a[ak[i]], b[bk[i]]); | |
if (sort !== 0) { | |
return sort; | |
} | |
} | |
return (ak.length === bk.length) ? 0 : | |
(ak.length > bk.length) ? 1 : -1; | |
}; | |
var arrayCollate = function(a, b) { | |
var len = Math.min(a.length, b.length); | |
for (var i = 0; i < len; i++) { | |
var sort = Pouch.collate(a[i], b[i]); | |
if (sort !== 0) { | |
return sort; | |
} | |
} | |
return (a.length === b.length) ? 0 : | |
(a.length > b.length) ? 1 : -1; | |
}; | |
// The collation is defined by erlangs ordered terms | |
// the atoms null, true, false come first, then numbers, strings, | |
// arrays, then objects | |
var collationIndex = function(x) { | |
var id = ['boolean', 'number', 'string', 'object']; | |
if (id.indexOf(typeof x) !== -1) { | |
if (x === null) { | |
return 1; | |
} | |
return id.indexOf(typeof x) + 2; | |
} | |
if (Array.isArray(x)) { | |
return 4.5; | |
} | |
}; | |
Pouch.collate = function(a, b) { | |
var ai = collationIndex(a); | |
var bi = collationIndex(b); | |
if ((ai - bi) !== 0) { | |
return ai - bi; | |
} | |
if (a === null) { | |
return 0; | |
} | |
if (typeof a === 'number') { | |
return a - b; | |
} | |
if (typeof a === 'boolean') { | |
return a < b ? -1 : 1; | |
} | |
if (typeof a === 'string') { | |
return stringCollate(a, b); | |
} | |
if (Array.isArray(a)) { | |
return arrayCollate(a, b); | |
} | |
if (typeof a === 'object') { | |
return objectCollate(a, b); | |
} | |
}; | |
/*globals rootToLeaf: false, extend: false, traverseRevTree: false */ | |
'use strict'; | |
// a few hacks to get things in the right place for node.js | |
if (typeof module !== 'undefined' && module.exports) { | |
module.exports = Pouch; | |
var utils = require('./pouch.utils.js'); | |
for (var k in utils) { | |
global[k] = utils[k]; | |
} | |
} | |
// for a better overview of what this is doing, read: | |
// https://github.com/apache/couchdb/blob/master/src/couchdb/couch_key_tree.erl | |
// | |
// But for a quick intro, CouchDB uses a revision tree to store a documents | |
// history, A -> B -> C, when a document has conflicts, that is a branch in the | |
// tree, A -> (B1 | B2 -> C), We store these as a nested array in the format | |
// | |
// KeyTree = [Path ... ] | |
// Path = {pos: position_from_root, ids: Tree} | |
// Tree = [Key, [Tree, ...]], in particular single node: [Key, []] | |
// Turn a path as a flat array into a tree with a single branch | |
function pathToTree(path) { | |
var root = [path.shift(), []]; | |
var leaf = root; | |
var nleaf; | |
while (path.length) { | |
nleaf = [path.shift(), []]; | |
leaf[1].push(nleaf); | |
leaf = nleaf; | |
} | |
return root; | |
} | |
// Merge two trees together | |
// The roots of tree1 and tree2 must be the same revision | |
function mergeTree(in_tree1, in_tree2) { | |
var queue = [{tree1: in_tree1, tree2: in_tree2}]; | |
var conflicts = false; | |
while (queue.length > 0) { | |
var item = queue.pop(); | |
var tree1 = item.tree1; | |
var tree2 = item.tree2; | |
for (var i = 0; i < tree2[1].length; i++) { | |
if (!tree1[1][0]) { | |
conflicts = 'new_leaf'; | |
tree1[1][0] = tree2[1][i]; | |
continue; | |
} | |
var merged = false; | |
for (var j = 0; j < tree1[1].length; j++) { | |
if (tree1[1][j][0] === tree2[1][i][0]) { | |
queue.push({tree1: tree1[1][j], tree2: tree2[1][i]}); | |
merged = true; | |
} | |
} | |
if (!merged) { | |
conflicts = 'new_branch'; | |
tree1[1].push(tree2[1][i]); | |
tree1[1].sort(); | |
} | |
} | |
} | |
return {conflicts: conflicts, tree: in_tree1}; | |
} | |
function doMerge(tree, path, dontExpand) { | |
var restree = []; | |
var conflicts = false; | |
var merged = false; | |
var res, branch; | |
if (!tree.length) { | |
return {tree: [path], conflicts: 'new_leaf'}; | |
} | |
tree.forEach(function(branch) { | |
if (branch.pos === path.pos && branch.ids[0] === path.ids[0]) { | |
// Paths start at the same position and have the same root, so they need | |
// merged | |
res = mergeTree(branch.ids, path.ids); | |
restree.push({pos: branch.pos, ids: res.tree}); | |
conflicts = conflicts || res.conflicts; | |
merged = true; | |
} else if (dontExpand !== true) { | |
// The paths start at a different position, take the earliest path and | |
// traverse up until it as at the same point from root as the path we want to | |
// merge. If the keys match we return the longer path with the other merged | |
// After stemming we dont want to expand the trees | |
var t1 = branch.pos < path.pos ? branch : path; | |
var t2 = branch.pos < path.pos ? path : branch; | |
var diff = t2.pos - t1.pos; | |
var candidateParents = []; | |
var trees = []; | |
trees.push({ids: t1.ids, diff: diff, parent: null, parentIdx: null}); | |
while (trees.length > 0) { | |
var item = trees.pop(); | |
if (item.diff === 0) { | |
if (item.ids[0] === t2.ids[0]) { | |
candidateParents.push(item); | |
} | |
continue; | |
} | |
if (!item.ids) { | |
continue; | |
} | |
/*jshint loopfunc:true */ | |
item.ids[1].forEach(function(el, idx) { | |
trees.push({ids: el, diff: item.diff-1, parent: item.ids, parentIdx: idx}); | |
}); | |
} | |
var el = candidateParents[0]; | |
if (!el) { | |
restree.push(branch); | |
} else { | |
res = mergeTree(el.ids, t2.ids); | |
el.parent[1][el.parentIdx] = res.tree; | |
restree.push({pos: t1.pos, ids: t1.ids}); | |
conflicts = conflicts || res.conflicts; | |
merged = true; | |
} | |
} else { | |
restree.push(branch); | |
} | |
}); | |
// We didnt find | |
if (!merged) { | |
restree.push(path); | |
} | |
restree.sort(function(a, b) { | |
return a.pos - b.pos; | |
}); | |
return { | |
tree: restree, | |
conflicts: conflicts || 'internal_node' | |
}; | |
} | |
// To ensure we dont grow the revision tree infinitely, we stem old revisions | |
function stem(tree, depth) { | |
// First we break out the tree into a complete list of root to leaf paths, | |
// we cut off the start of the path and generate a new set of flat trees | |
var stemmedPaths = rootToLeaf(tree).map(function(path) { | |
var stemmed = path.ids.slice(-depth); | |
return { | |
pos: path.pos + (path.ids.length - stemmed.length), | |
ids: pathToTree(stemmed) | |
}; | |
}); | |
// Then we remerge all those flat trees together, ensuring that we dont | |
// connect trees that would go beyond the depth limit | |
return stemmedPaths.reduce(function(prev, current, i, arr) { | |
return doMerge(prev, current, true).tree; | |
}, [stemmedPaths.shift()]); | |
} | |
Pouch.merge = function(tree, path, depth) { | |
// Ugh, nicer way to not modify arguments in place? | |
tree = extend(true, [], tree); | |
path = extend(true, {}, path); | |
var newTree = doMerge(tree, path); | |
return { | |
tree: stem(newTree.tree, depth), | |
conflicts: newTree.conflicts | |
}; | |
}; | |
// We fetch all leafs of the revision tree, and sort them based on tree length | |
// and whether they were deleted, undeleted documents with the longest revision | |
// tree (most edits) win | |
// The final sort algorithm is slightly documented in a sidebar here: | |
// http://guide.couchdb.org/draft/conflicts.html | |
Pouch.merge.winningRev = function(metadata) { | |
var deletions = metadata.deletions || {}; | |
var leafs = []; | |
traverseRevTree(metadata.rev_tree, function(isLeaf, pos, id) { | |
if (isLeaf) { | |
leafs.push({pos: pos, id: id}); | |
} | |
}); | |
leafs.forEach(function(leaf) { | |
leaf.deleted = leaf.id in deletions; | |
}); | |
leafs.sort(function(a, b) { | |
if (a.deleted !== b.deleted) { | |
return a.deleted > b.deleted ? 1 : -1; | |
} | |
if (a.pos !== b.pos) { | |
return b.pos - a.pos; | |
} | |
return a.id < b.id ? 1 : -1; | |
}); | |
return leafs[0].pos + '-' + leafs[0].id; | |
}; | |
/*globals call: false, Crypto: false*/ | |
'use strict'; | |
if (typeof module !== 'undefined' && module.exports) { | |
module.exports = Pouch; | |
} | |
// We create a basic promise so the caller can cancel the replication possibly | |
// before we have actually started listening to changes etc | |
var Promise = function() { | |
this.cancelled = false; | |
this.cancel = function() { | |
this.cancelled = true; | |
}; | |
}; | |
// The RequestManager ensures that only one database request is active at | |
// at time, it ensures we dont max out simultaneous HTTP requests and makes | |
// the replication process easier to reason about | |
var RequestManager = function() { | |
var queue = []; | |
var api = {}; | |
var processing = false; | |
// Add a new request to the queue, if we arent currently processing anything | |
// then process it immediately | |
api.enqueue = function(fun, args) { | |
queue.push({fun: fun, args: args}); | |
if (!processing) { | |
api.process(); | |
} | |
}; | |
// Process the next request | |
api.process = function() { | |
if (processing || !queue.length) { | |
return; | |
} | |
processing = true; | |
var task = queue.shift(); | |
task.fun.apply(null, task.args); | |
}; | |
// We need to be notified whenever a request is complete to process | |
// the next request | |
api.notifyRequestComplete = function() { | |
processing = false; | |
api.process(); | |
}; | |
return api; | |
}; | |
// TODO: check CouchDB's replication id generation, generate a unique id particular | |
// to this replication | |
var genReplicationId = function(src, target, opts) { | |
var filterFun = opts.filter ? opts.filter.toString() : ''; | |
return '_local/' + Crypto.MD5(src.id() + target.id() + filterFun); | |
}; | |
// A checkpoint lets us restart replications from when they were last cancelled | |
var fetchCheckpoint = function(src, id, callback) { | |
src.get(id, function(err, doc) { | |
if (err && err.status === 404) { | |
callback(null, 0); | |
} else { | |
callback(null, doc.last_seq); | |
} | |
}); | |
}; | |
var writeCheckpoint = function(src, id, checkpoint, callback) { | |
var check = { | |
_id: id, | |
last_seq: checkpoint | |
}; | |
src.get(check._id, function(err, doc) { | |
if (doc && doc._rev) { | |
check._rev = doc._rev; | |
} | |
src.put(check, function(err, doc) { | |
callback(); | |
}); | |
}); | |
}; | |
function replicate(src, target, opts, promise) { | |
var requests = new RequestManager(); | |
var writeQueue = []; | |
var repId = genReplicationId(src, target, opts); | |
var results = []; | |
var completed = false; | |
var pending = 0; | |
var last_seq = 0; | |
var continuous = opts.continuous || false; | |
var result = { | |
ok: true, | |
start_time: new Date(), | |
docs_read: 0, | |
docs_written: 0 | |
}; | |
function docsWritten(err, res, len) { | |
requests.notifyRequestComplete(); | |
if (opts.onChange) { | |
for (var i = 0; i < len; i++) { | |
/*jshint validthis:true */ | |
opts.onChange.apply(this, [result]); | |
} | |
} | |
pending -= len; | |
result.docs_written += len; | |
isCompleted(); | |
} | |
function writeDocs() { | |
if (!writeQueue.length) { | |
return requests.notifyRequestComplete(); | |
} | |
var len = writeQueue.length; | |
target.bulkDocs({docs: writeQueue}, {new_edits: false}, function(err, res) { | |
docsWritten(err, res, len); | |
}); | |
writeQueue = []; | |
} | |
function eachRev(id, rev) { | |
src.get(id, {revs: true, rev: rev, attachments: true}, function(err, doc) { | |
requests.notifyRequestComplete(); | |
writeQueue.push(doc); | |
requests.enqueue(writeDocs); | |
}); | |
} | |
function onRevsDiff(err, diffs) { | |
requests.notifyRequestComplete(); | |
if (err) { | |
if (continuous) { | |
promise.cancel(); | |
} | |
call(opts.complete, err, null); | |
return; | |
} | |
// We already have the full document stored | |
if (Object.keys(diffs).length === 0) { | |
pending--; | |
isCompleted(); | |
return; | |
} | |
var _enqueuer = function (rev) { | |
requests.enqueue(eachRev, [id, rev]); | |
}; | |
for (var id in diffs) { | |
diffs[id].missing.forEach(_enqueuer); | |
} | |
} | |
function fetchRevsDiff(diff) { | |
target.revsDiff(diff, onRevsDiff); | |
} | |
function onChange(change) { | |
last_seq = change.seq; | |
results.push(change); | |
result.docs_read++; | |
pending++; | |
var diff = {}; | |
diff[change.id] = change.changes.map(function(x) { return x.rev; }); | |
requests.enqueue(fetchRevsDiff, [diff]); | |
} | |
function complete() { | |
completed = true; | |
isCompleted(); | |
} | |
function isCompleted() { | |
if (completed && pending === 0) { | |
result.end_time = Date.now(); | |
writeCheckpoint(src, repId, last_seq, function(err, res) { | |
call(opts.complete, err, result); | |
}); | |
} | |
} | |
fetchCheckpoint(src, repId, function(err, checkpoint) { | |
if (err) { | |
return call(opts.complete, err); | |
} | |
last_seq = checkpoint; | |
// Was the replication cancelled by the caller before it had a chance | |
// to start. Shouldnt we be calling complete? | |
if (promise.cancelled) { | |
return; | |
} | |
var repOpts = { | |
limit: 25, | |
continuous: continuous, | |
since: last_seq, | |
style: 'all_docs', | |
onChange: onChange, | |
complete: complete | |
}; | |
if (opts.filter) { | |
repOpts.filter = opts.filter; | |
} | |
if (opts.query_params) { | |
repOpts.query_params = opts.query_params; | |
} | |
var changes = src.changes(repOpts); | |
if (opts.continuous) { | |
promise.cancel = changes.cancel; | |
} | |
}); | |
} | |
function toPouch(db, callback) { | |
if (typeof db === 'string') { | |
return new Pouch(db, callback); | |
} | |
callback(null, db); | |
} | |
Pouch.replicate = function(src, target, opts, callback) { | |
if (opts instanceof Function) { | |
callback = opts; | |
opts = {}; | |
} | |
if (opts === undefined) { | |
opts = {}; | |
} | |
opts.complete = callback; | |
var replicateRet = new Promise(); | |
toPouch(src, function(err, src) { | |
if (err) { | |
return call(callback, err); | |
} | |
toPouch(target, function(err, target) { | |
if (err) { | |
return call(callback, err); | |
} | |
replicate(src, target, opts, replicateRet); | |
}); | |
}); | |
return replicateRet; | |
}; | |
/*jshint strict: false */ | |
/*global request: true, Buffer: true, escape: true, $:true */ | |
/*global extend: true, Crypto: true */ | |
// Pretty dumb name for a function, just wraps callback calls so we dont | |
// to if (callback) callback() everywhere | |
var call = function(fun) { | |
var args = Array.prototype.slice.call(arguments, 1); | |
if (typeof fun === typeof Function) { | |
fun.apply(this, args); | |
} | |
}; | |
// Wrapper for functions that call the bulkdocs api with a single doc, | |
// if the first result is an error, return an error | |
var yankError = function(callback) { | |
return function(err, results) { | |
if (err || results[0].error) { | |
call(callback, err || results[0]); | |
} else { | |
call(callback, null, results[0]); | |
} | |
}; | |
}; | |
var isLocalId = function(id) { | |
return (/^_local/).test(id); | |
}; | |
var isAttachmentId = function(id) { | |
return (/\//.test(id) && !isLocalId(id) && !/^_design/.test(id)); | |
}; | |
// Parse document ids: docid[/attachid] | |
// - /attachid is optional, and can have slashes in it too | |
// - int ids and strings beginning with _design or _local are not split | |
// returns an object: { docId: docid, attachmentId: attachid } | |
var parseDocId = function(id) { | |
var ids = (typeof id === 'string') && !(/^_(design|local)\//.test(id)) ? | |
id.split('/') : [id]; | |
return { | |
docId: ids[0], | |
attachmentId: ids.splice(1).join('/').replace(/^\/+/, '') | |
}; | |
}; | |
// check if a specific revision of a doc has been deleted | |
// - metadata: the metadata object from the doc store | |
// - rev: (optional) the revision to check. defaults to metadata.rev | |
var isDeleted = function(metadata, rev) { | |
if (!metadata || !metadata.deletions) { | |
return false; | |
} | |
if (!rev) { | |
rev = Pouch.merge.winningRev(metadata); | |
} | |
if (rev.indexOf('-') >= 0) { | |
rev = rev.split('-')[1]; | |
} | |
return metadata.deletions[rev] === true; | |
}; | |
// Determine id an ID is valid | |
// - invalid IDs begin with an underescore that does not begin '_design' or '_local' | |
// - any other string value is a valid id | |
var isValidId = function(id) { | |
if (/^_/.test(id)) { | |
return (/^_(design|local)/).test(id); | |
} | |
return true; | |
}; | |
// Preprocess documents, parse their revisions, assign an id and a | |
// revision for new writes that are missing them, etc | |
var parseDoc = function(doc, newEdits) { | |
var error = null; | |
// check for an attachment id and add attachments as needed | |
if (doc._id) { | |
var id = parseDocId(doc._id); | |
if (id.attachmentId !== '') { | |
var attachment = btoa(JSON.stringify(doc)); | |
doc = {_id: id.docId}; | |
if (!doc._attachments) { | |
doc._attachments = {}; | |
} | |
doc._attachments[id.attachmentId] = { | |
content_type: 'application/json', | |
data: attachment | |
}; | |
} | |
} | |
var nRevNum; | |
var newRevId; | |
var revInfo; | |
if (newEdits) { | |
if (!doc._id) { | |
doc._id = Math.uuid(); | |
} | |
newRevId = Math.uuid(32, 16).toLowerCase(); | |
if (doc._rev) { | |
revInfo = /^(\d+)-(.+)$/.exec(doc._rev); | |
if (!revInfo) { | |
throw "invalid value for property '_rev'"; | |
} | |
doc._rev_tree = [{ | |
pos: parseInt(revInfo[1], 10), | |
ids: [revInfo[2], [[newRevId, []]]] | |
}]; | |
nRevNum = parseInt(revInfo[1], 10) + 1; | |
} else { | |
doc._rev_tree = [{ | |
pos: 1, | |
ids : [newRevId, []] | |
}]; | |
nRevNum = 1; | |
} | |
} else { | |
if (doc._revisions) { | |
doc._rev_tree = [{ | |
pos: doc._revisions.start - doc._revisions.ids.length + 1, | |
ids: doc._revisions.ids.reduce(function(acc, x) { | |
if (acc === null) { | |
return [x, []]; | |
} else { | |
return [x, [acc]]; | |
} | |
}, null) | |
}]; | |
nRevNum = doc._revisions.start; | |
newRevId = doc._revisions.ids[0]; | |
} | |
if (!doc._rev_tree) { | |
revInfo = /^(\d+)-(.+)$/.exec(doc._rev); | |
nRevNum = parseInt(revInfo[1], 10); | |
newRevId = revInfo[2]; | |
doc._rev_tree = [{ | |
pos: parseInt(revInfo[1], 10), | |
ids: [revInfo[2], []] | |
}]; | |
} | |
} | |
if (typeof doc._id !== 'string') { | |
error = Pouch.Errors.INVALID_ID; | |
} | |
else if (!isValidId(doc._id)) { | |
error = Pouch.Errors.RESERVED_ID; | |
} | |
doc._id = decodeURIComponent(doc._id); | |
doc._rev = [nRevNum, newRevId].join('-'); | |
if (error) { | |
return error; | |
} | |
return Object.keys(doc).reduce(function(acc, key) { | |
if (/^_/.test(key) && key !== '_attachments') { | |
acc.metadata[key.slice(1)] = doc[key]; | |
} else { | |
acc.data[key] = doc[key]; | |
} | |
return acc; | |
}, {metadata : {}, data : {}}); | |
}; | |
var compareRevs = function(a, b) { | |
// Sort by id | |
if (a.id !== b.id) { | |
return (a.id < b.id ? -1 : 1); | |
} | |
// Then by deleted | |
if (a.deleted ^ b.deleted) { | |
return (a.deleted ? -1 : 1); | |
} | |
// Then by rev id | |
if (a.rev_tree[0].pos === b.rev_tree[0].pos) { | |
return (a.rev_tree[0].ids < b.rev_tree[0].ids ? -1 : 1); | |
} | |
// Then by depth of edits | |
return (a.rev_tree[0].start < b.rev_tree[0].start ? -1 : 1); | |
}; | |
// Pretty much all below can be combined into a higher order function to | |
// traverse revisions | |
// Callback has signature function(isLeaf, pos, id, [context]) | |
// The return value from the callback will be passed as context to all children of that node | |
var traverseRevTree = function(revs, callback) { | |
var toVisit = []; | |
revs.forEach(function(tree) { | |
toVisit.push({pos: tree.pos, ids: tree.ids}); | |
}); | |
while (toVisit.length > 0) { | |
var node = toVisit.pop(); | |
var pos = node.pos; | |
var tree = node.ids; | |
var newCtx = callback(tree[1].length === 0, pos, tree[0], node.ctx); | |
/*jshint loopfunc: true */ | |
tree[1].forEach(function(branch) { | |
toVisit.push({pos: pos+1, ids: branch, ctx: newCtx}); | |
}); | |
} | |
}; | |
var collectRevs = function(path) { | |
var revs = []; | |
traverseRevTree([path], function(isLeaf, pos, id) { | |
revs.push({rev: pos + "-" + id, status: 'available'}); | |
}); | |
return revs; | |
}; | |
var collectLeaves = function(revs) { | |
var leaves = []; | |
traverseRevTree(revs, function(isLeaf, pos, id) { | |
if (isLeaf) { | |
leaves.unshift({rev: pos + "-" + id, pos: pos}); | |
} | |
}); | |
leaves.sort(function(a, b) { | |
return b.pos - a.pos; | |
}); | |
leaves.map(function(leaf) { delete leaf.pos; }); | |
return leaves; | |
}; | |
var collectConflicts = function(revs, deletions) { | |
// Remove all deleted leaves | |
var leaves = collectLeaves(revs); | |
for(var i = 0; i < leaves.length; i++){ | |
var leaf = leaves.shift(); | |
var rev = leaf.rev.split("-")[1]; | |
if(deletions && !deletions[rev]){ | |
leaves.push(leaf); | |
} | |
} | |
// First is current rev | |
leaves.shift(); | |
return leaves.map(function(x) { return x.rev; }); | |
}; | |
// returns first element of arr satisfying callback predicate | |
var arrayFirst = function(arr, callback) { | |
for (var i = 0; i < arr.length; i++) { | |
if (callback(arr[i], i) === true) { | |
return arr[i]; | |
} | |
} | |
return false; | |
}; | |
var filterChange = function(opts) { | |
return function(change) { | |
if (opts.filter && !opts.filter.call(this, change.doc)) { | |
return; | |
} | |
if (!opts.include_docs) { | |
delete change.doc; | |
} | |
call(opts.onChange, change); | |
}; | |
}; | |
// returns array of all branches from root to leaf in the ids form: | |
// [[id, ...], ...] | |
var rootToLeaf = function(tree) { | |
var paths = []; | |
traverseRevTree(tree, function(isLeaf, pos, id, history) { | |
history = history ? history.slice(0) : []; | |
history.push(id); | |
if (isLeaf) { | |
var rootPos = pos + 1 - history.length; | |
paths.unshift({pos: rootPos, ids: history}); | |
} | |
return history; | |
}); | |
return paths; | |
}; | |
// Basic wrapper for localStorage | |
var win = this; | |
var localJSON = (function(){ | |
if (!win.localStorage) { | |
return false; | |
} | |
return { | |
set: function(prop, val) { | |
localStorage.setItem(prop, JSON.stringify(val)); | |
}, | |
get: function(prop, def) { | |
try { | |
if (localStorage.getItem(prop) === null) { | |
return def; | |
} | |
return JSON.parse((localStorage.getItem(prop) || 'false')); | |
} catch(err) { | |
return def; | |
} | |
}, | |
remove: function(prop) { | |
localStorage.removeItem(prop); | |
} | |
}; | |
})(); | |
if (typeof module !== 'undefined' && module.exports) { | |
// use node.js's crypto library instead of the Crypto object created by deps/uuid.js | |
var crypto = require('crypto'); | |
var Crypto = { | |
MD5: function(str) { | |
return crypto.createHash('md5').update(str).digest('hex'); | |
} | |
}; | |
var extend = require('./deps/extend'); | |
var ajax = require('./deps/ajax'); | |
request = require('request'); | |
_ = require('underscore'); | |
$ = _; | |
module.exports = { | |
Crypto: Crypto, | |
call: call, | |
yankError: yankError, | |
isLocalId: isLocalId, | |
isAttachmentId: isAttachmentId, | |
parseDocId: parseDocId, | |
parseDoc: parseDoc, | |
isDeleted: isDeleted, | |
compareRevs: compareRevs, | |
collectRevs: collectRevs, | |
collectLeaves: collectLeaves, | |
collectConflicts: collectConflicts, | |
arrayFirst: arrayFirst, | |
filterChange: filterChange, | |
atob: function(str) { | |
return decodeURIComponent(escape(new Buffer(str, 'base64').toString('binary'))); | |
}, | |
btoa: function(str) { | |
return new Buffer(unescape(encodeURIComponent(str)), 'binary').toString('base64'); | |
}, | |
extend: extend, | |
ajax: ajax, | |
traverseRevTree: traverseRevTree, | |
rootToLeaf: rootToLeaf | |
}; | |
} | |
var Changes = function() { | |
var api = {}; | |
var listeners = {}; | |
window.addEventListener("storage", function(e) { | |
api.notify(e.key); | |
}); | |
api.addListener = function(db_name, id, db, opts) { | |
if (!listeners[db_name]) { | |
listeners[db_name] = {}; | |
} | |
listeners[db_name][id] = { | |
db: db, | |
opts: opts | |
}; | |
}; | |
api.removeListener = function(db_name, id) { | |
delete listeners[db_name][id]; | |
}; | |
api.clearListeners = function(db_name) { | |
delete listeners[db_name]; | |
}; | |
api.notify = function(db_name) { | |
if (!listeners[db_name]) { return; } | |
Object.keys(listeners[db_name]).forEach(function (i) { | |
var opts = listeners[db_name][i].opts; | |
listeners[db_name][i].db.changes({ | |
include_docs: opts.include_docs, | |
conflicts: opts.conflicts, | |
continuous: false, | |
descending: false, | |
filter: opts.filter, | |
since: opts.since, | |
onChange: function(c) { | |
if (c.seq > opts.since && !opts.cancelled) { | |
opts.since = c.seq; | |
call(opts.onChange, c); | |
} | |
} | |
}); | |
}); | |
}; | |
return api; | |
}; | |
/*globals yankError: false, extend: false, call: false, parseDocId: false */ | |
"use strict"; | |
/* | |
* A generic pouch adapter | |
*/ | |
var PouchAdapter = function(opts, callback) { | |
var api = {}; | |
var customApi = Pouch.adapters[opts.adapter](opts, function(err, db) { | |
if (err) { | |
if (callback) { | |
callback(err); | |
} | |
return; | |
} | |
for (var j in api) { | |
if (!db.hasOwnProperty(j)) { | |
db[j] = api[j]; | |
} | |
} | |
// Don't call Pouch.open for ALL_DBS | |
// Pouch.open saves the db's name into ALL_DBS | |
if (opts.name === Pouch.ALL_DBS) { | |
callback(err, db); | |
} else { | |
Pouch.open(opts.adapter, opts.name, function(err) { callback(err, db); }); | |
} | |
}); | |
api.post = function (doc, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
return customApi.bulkDocs({docs: [doc]}, opts, yankError(callback)); | |
}; | |
api.put = function(doc, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
if (!doc || !('_id' in doc)) { | |
return call(callback, Pouch.Errors.MISSING_ID); | |
} | |
return customApi.bulkDocs({docs: [doc]}, opts, yankError(callback)); | |
}; | |
api.putAttachment = function (id, rev, blob, type, callback) { | |
if (typeof type === 'function') { | |
callback = type; | |
type = blob; | |
blob = rev; | |
rev = null; | |
} | |
if (typeof type === 'undefined') { | |
type = blob; | |
blob = rev; | |
rev = null; | |
} | |
id = parseDocId(id); | |
function createAttachment(doc) { | |
doc._attachments = doc._attachments || {}; | |
doc._attachments[id.attachmentId] = { | |
content_type: type, | |
data: blob | |
}; | |
api.put(doc, callback); | |
} | |
api.get(id.docId, function(err, doc) { | |
// create new doc | |
if (err && err.error === Pouch.Errors.MISSING_DOC.error) { | |
createAttachment({_id: id.docId}); | |
return; | |
} | |
if (err) { | |
call(callback, err); | |
return; | |
} | |
if (doc._rev !== rev) { | |
call(callback, Pouch.Errors.REV_CONFLICT); | |
return; | |
} | |
createAttachment(doc); | |
}); | |
}; | |
api.removeAttachment = function (id, rev, callback) { | |
id = parseDocId(id); | |
api.get(id.docId, function(err, obj) { | |
if (err) { | |
call(callback, err); | |
return; | |
} | |
if (obj._rev !== rev) { | |
call(callback, Pouch.Errors.REV_CONFLICT); | |
return; | |
} | |
obj._attachments = obj._attachments || {}; | |
delete obj._attachments[id.attachmentId]; | |
api.put(obj, callback); | |
}); | |
}; | |
api.remove = function (doc, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
if (opts === undefined) { | |
opts = {}; | |
} | |
opts.was_delete = true; | |
var newDoc = extend(true, {}, doc); | |
newDoc._deleted = true; | |
return customApi.bulkDocs({docs: [newDoc]}, opts, yankError(callback)); | |
}; | |
api.revsDiff = function (req, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
var ids = Object.keys(req); | |
var count = 0; | |
var missing = {}; | |
function readDoc(err, doc, id) { | |
req[id].map(function(revId) { | |
var matches = function(x) { return x.rev !== revId; }; | |
if (!doc || doc._revs_info.every(matches)) { | |
if (!missing[id]) { | |
missing[id] = {missing: []}; | |
} | |
missing[id].missing.push(revId); | |
} | |
}); | |
if (++count === ids.length) { | |
return call(callback, null, missing); | |
} | |
} | |
ids.map(function(id) { | |
api.get(id, {revs_info: true}, function(err, doc) { | |
readDoc(err, doc, id); | |
}); | |
}); | |
}; | |
/* Begin api wrappers. Specific functionality to storage belongs in the _[method] */ | |
api.get = function (id, opts, callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('get', arguments); | |
return; | |
} | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
id = parseDocId(id); | |
if (id.attachmentId !== '') { | |
return customApi.getAttachment(id, callback); | |
} | |
return customApi._get(id, opts, callback); | |
}; | |
api.getAttachment = function(id, opts, callback) { | |
if (opts instanceof Function) { | |
callback = opts; | |
opts = {}; | |
} | |
if (typeof id === 'string') { | |
id = parseDocId(id); | |
} | |
return customApi._getAttachment(id, opts, callback); | |
}; | |
api.allDocs = function(opts, callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('allDocs', arguments); | |
return; | |
} | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
if ('keys' in opts) { | |
if ('startkey' in opts) { | |
call(callback, extend({ | |
reason: 'Query parameter `start_key` is not compatible with multi-get' | |
}, Pouch.Errors.QUERY_PARSE_ERROR)); | |
return; | |
} | |
if ('endkey' in opts) { | |
call(callback, extend({ | |
reason: 'Query parameter `end_key` is not compatible with multi-get' | |
}, Pouch.Errors.QUERY_PARSE_ERROR)); | |
return; | |
} | |
} | |
return customApi._allDocs(opts, callback); | |
}; | |
api.changes = function(opts) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('changes', arguments); | |
return; | |
} | |
return customApi._changes(opts); | |
}; | |
api.close = function(callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('close', arguments); | |
return; | |
} | |
return customApi._close(callback); | |
}; | |
api.info = function(callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('info', arguments); | |
return; | |
} | |
return customApi._info(callback); | |
}; | |
api.id = function() { | |
return customApi._id(); | |
}; | |
api.type = function() { | |
return (typeof customApi._type === 'function') ? customApi._type() : opts.adapter; | |
}; | |
api.bulkDocs = function(req, opts, callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('bulkDocs', arguments); | |
return; | |
} | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
if (!opts) { | |
opts = {}; | |
} | |
if (!req || !req.docs || req.docs.length < 1) { | |
return call(callback, Pouch.Errors.MISSING_BULK_DOCS); | |
} | |
if (!Array.isArray(req.docs)) { | |
return call(callback, Pouch.Errors.QUERY_PARSE_ERROR); | |
} | |
if (!('new_edits' in opts)) { | |
opts.new_edits = true; | |
} | |
return customApi._bulkDocs(req, opts, callback); | |
}; | |
/* End Wrappers */ | |
var taskqueue = {}; | |
taskqueue.ready = false; | |
taskqueue.queue = []; | |
api.taskqueue = {}; | |
api.taskqueue.execute = function (db) { | |
if (taskqueue.ready) { | |
taskqueue.queue.forEach(function(d) { | |
db[d.task].apply(null, d.parameters); | |
}); | |
} | |
}; | |
api.taskqueue.ready = function() { | |
if (arguments.length === 0) { | |
return taskqueue.ready; | |
} | |
taskqueue.ready = arguments[0]; | |
}; | |
api.taskqueue.addTask = function(task, parameters) { | |
taskqueue.queue.push({ task: task, parameters: parameters }); | |
}; | |
api.replicate = {}; | |
api.replicate.from = function (url, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
return Pouch.replicate(url, customApi, opts, callback); | |
}; | |
api.replicate.to = function (dbName, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
return Pouch.replicate(customApi, dbName, opts, callback); | |
}; | |
for (var j in api) { | |
if (!customApi.hasOwnProperty(j)) { | |
customApi[j] = api[j]; | |
} | |
} | |
// Http adapter can skip setup so we force the db to be ready and execute any jobs | |
if (opts.skipSetup) { | |
api.taskqueue.ready(true); | |
api.taskqueue.execute(api); | |
} | |
return customApi; | |
}; | |
if (typeof module !== 'undefined' && module.exports) { | |
module.exports = PouchAdapter; | |
} | |
/*globals Pouch: true, call: false, ajax: true */ | |
/*globals require: false, console: false */ | |
"use strict"; | |
var HTTP_TIMEOUT = 10000; | |
// parseUri 1.2.2 | |
// (c) Steven Levithan <stevenlevithan.com> | |
// MIT License | |
function parseUri (str) { | |
var o = parseUri.options; | |
var m = o.parser[o.strictMode ? "strict" : "loose"].exec(str); | |
var uri = {}; | |
var i = 14; | |
while (i--) { | |
uri[o.key[i]] = m[i] || ""; | |
} | |
uri[o.q.name] = {}; | |
uri[o.key[12]].replace(o.q.parser, function ($0, $1, $2) { | |
if ($1) { | |
uri[o.q.name][$1] = $2; | |
} | |
}); | |
return uri; | |
} | |
parseUri.options = { | |
strictMode: false, | |
key: ["source","protocol","authority","userInfo","user","password","host", | |
"port","relative","path","directory","file","query","anchor"], | |
q: { | |
name: "queryKey", | |
parser: /(?:^|&)([^&=]*)=?([^&]*)/g | |
}, | |
parser: { | |
strict: /^(?:([^:\/?#]+):)?(?:\/\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?))?((((?:[^?#\/]*\/)*)([^?#]*))(?:\?([^#]*))?(?:#(.*))?)/, | |
loose: /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/ | |
} | |
}; | |
// Get all the information you possibly can about the URI given by name and | |
// return it as a suitable object. | |
function getHost(name) { | |
// If the given name contains "http:" | |
if (/http(s?):/.test(name)) { | |
// Prase the URI into all its little bits | |
var uri = parseUri(name); | |
// Store the fact that it is a remote URI | |
uri.remote = true; | |
// Store the user and password as a separate auth object | |
if (uri.user || uri.password) { | |
uri.auth = {username: uri.user, password: uri.password}; | |
} | |
// Split the path part of the URI into parts using '/' as the delimiter | |
// after removing any leading '/' and any trailing '/' | |
var parts = uri.path.replace(/(^\/|\/$)/g, '').split('/'); | |
// Store the first part as the database name and remove it from the parts | |
// array | |
uri.db = parts.pop(); | |
// Restore the path by joining all the remaining parts (all the parts | |
// except for the database name) with '/'s | |
uri.path = parts.join('/'); | |
return uri; | |
} | |
// If the given name does not contain 'http:' then return a very basic object | |
// with no host, the current path, the given name as the database name and no | |
// username/password | |
return {host: '', path: '/', db: name, auth: false}; | |
} | |
// Generate a URL with the host data given by opts and the given path | |
function genDBUrl(opts, path) { | |
// If the host is remote | |
if (opts.remote) { | |
// If the host already has a path, then we need to have a path delimiter | |
// Otherwise, the path delimiter is the empty string | |
var pathDel = !opts.path ? '' : '/'; | |
// Return the URL made up of all the host's information and the given path | |
return opts.protocol + '://' + opts.host + ':' + opts.port + '/' + | |
opts.path + pathDel + opts.db + '/' + path; | |
} | |
// If the host is not remote, then return the URL made up of just the | |
// database name and the given path | |
return '/' + opts.db + '/' + path; | |
} | |
// Generate a URL with the host data given by opts and the given path | |
function genUrl(opts, path) { | |
if (opts.remote) { | |
return opts.protocol + '://' + opts.host + ':' + opts.port + '/' + path; | |
} | |
return '/' + path; | |
} | |
// Implements the PouchDB API for dealing with CouchDB instances over HTTP | |
var HttpPouch = function(opts, callback) { | |
// Parse the URI given by opts.name into an easy-to-use object | |
var host = getHost(opts.name); | |
if (opts.auth) { | |
host.auth = opts.auth; | |
} | |
// Generate the database URL based on the host | |
var db_url = genDBUrl(host, ''); | |
// The functions that will be publically available for HttpPouch | |
var api = {}; | |
var uuids = { | |
list: [], | |
get: function(opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {count: 10}; | |
} | |
var cb = function(err, body) { | |
if (err || !('uuids' in body)) { | |
call(callback, err || Pouch.Errors.UNKNOWN_ERROR); | |
} else { | |
uuids.list = uuids.list.concat(body.uuids); | |
call(callback, null, "OK"); | |
} | |
}; | |
var params = '?count=' + opts.count; | |
ajax({ | |
auth: host.auth, | |
method: 'GET', | |
url: genUrl(host, '_uuids') + params | |
}, cb); | |
} | |
}; | |
// Create a new CouchDB database based on the given opts | |
var createDB = function(){ | |
ajax({auth: host.auth, method: 'PUT', url: db_url}, function(err, ret) { | |
// If we get an "Unauthorized" error | |
if (err && err.status === 401) { | |
// Test if the database already exists | |
ajax({auth: host.auth, method: 'HEAD', url: db_url}, function (err, ret) { | |
// If there is still an error | |
if (err) { | |
// Give the error to the callback to deal with | |
call(callback, err); | |
} else { | |
// Continue as if there had been no errors | |
call(callback, null, api); | |
} | |
}); | |
// If there were no errros or if the only error is "Precondition Failed" | |
// (note: "Precondition Failed" occurs when we try to create a database | |
// that already exists) | |
} else if (!err || err.status === 412) { | |
// Continue as if there had been no errors | |
call(callback, null, api); | |
} else { | |
call(callback, Pouch.Errors.UNKNOWN_ERROR); | |
} | |
}); | |
}; | |
if (!opts.skipSetup) { | |
ajax({auth: host.auth, method: 'GET', url: db_url}, function(err, ret) { | |
//check if the db exists | |
if (err) { | |
if (err.status === 404) { | |
//if it doesn't, create it | |
createDB(); | |
} else { | |
call(callback, err); | |
} | |
} else { | |
//go do stuff with the db | |
call(callback, null, api); | |
} | |
}); | |
} | |
api.type = function() { | |
return 'http'; | |
}; | |
// The HttpPouch's ID is its URL | |
api.id = function() { | |
return genDBUrl(host, ''); | |
}; | |
api.request = function(options, callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('request', arguments); | |
return; | |
} | |
options.auth = host.auth; | |
options.url = genDBUrl(host, options.url); | |
ajax(options, callback); | |
}; | |
// Sends a POST request to the host calling the couchdb _compact function | |
// version: The version of CouchDB it is running | |
api.compact = function(callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('compact', arguments); | |
return; | |
} | |
ajax({ | |
auth: host.auth, | |
url: genDBUrl(host, '_compact'), | |
method: 'POST' | |
}, callback); | |
}; | |
// Calls GET on the host, which gets back a JSON string containing | |
// couchdb: A welcome string | |
// version: The version of CouchDB it is running | |
api.info = function(callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('info', arguments); | |
return; | |
} | |
ajax({ | |
auth: host.auth, | |
method:'GET', | |
url: genDBUrl(host, '') | |
}, callback); | |
}; | |
// Get the document with the given id from the database given by host. | |
// The id could be solely the _id in the database, or it may be a | |
// _design/ID or _local/ID path | |
api.get = function(id, opts, callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('get', arguments); | |
return; | |
} | |
// If no options were given, set the callback to the second parameter | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
// List of parameters to add to the GET request | |
var params = []; | |
// If it exists, add the opts.revs value to the list of parameters. | |
// If revs=true then the resulting JSON will include a field | |
// _revisions containing an array of the revision IDs. | |
if (opts.revs) { | |
params.push('revs=true'); | |
} | |
// If it exists, add the opts.revs_info value to the list of parameters. | |
// If revs_info=true then the resulting JSON will include the field | |
// _revs_info containing an array of objects in which each object | |
// representing an available revision. | |
if (opts.revs_info) { | |
params.push('revs_info=true'); | |
} | |
// If it exists, add the opts.open_revs value to the list of parameters. | |
// If open_revs=all then the resulting JSON will include all the leaf | |
// revisions. If open_revs=["rev1", "rev2",...] then the resulting JSON | |
// will contain an array of objects containing data of all revisions | |
if (opts.open_revs) { | |
if (opts.open_revs !== "all") { | |
opts.open_revs = JSON.stringify(opts.open_revs); | |
} | |
params.push('open_revs=' + opts.open_revs); | |
} | |
// If it exists, add the opts.attachments value to the list of parameters. | |
// If attachments=true the resulting JSON will include the base64-encoded | |
// contents in the "data" property of each attachment. | |
if (opts.attachments) { | |
params.push('attachments=true'); | |
} | |
// If it exists, add the opts.rev value to the list of parameters. | |
// If rev is given a revision number then get the specified revision. | |
if (opts.rev) { | |
params.push('rev=' + opts.rev); | |
} | |
// If it exists, add the opts.conflicts value to the list of parameters. | |
// If conflicts=true then the resulting JSON will include the field | |
// _conflicts containing all the conflicting revisions. | |
if (opts.conflicts) { | |
params.push('conflicts=' + opts.conflicts); | |
} | |
// Format the list of parameters into a valid URI query string | |
params = params.join('&'); | |
params = params === '' ? '' : '?' + params; | |
// Set the options for the ajax call | |
var options = { | |
auth: host.auth, | |
method: 'GET', | |
url: genDBUrl(host, id + params) | |
}; | |
// If the given id contains at least one '/' and the part before the '/' | |
// is NOT "_design" and is NOT "_local" | |
// OR | |
// If the given id contains at least two '/' and the part before the first | |
// '/' is "_design". | |
// TODO This second condition seems strange since if parts[0] === '_design' | |
// then we already know that parts[0] !== '_local'. | |
var parts = id.split('/'); | |
if ((parts.length > 1 && parts[0] !== '_design' && parts[0] !== '_local') || | |
(parts.length > 2 && parts[0] === '_design' && parts[0] !== '_local')) { | |
// Binary is expected back from the server | |
options.binary = true; | |
} | |
// Get the document | |
ajax(options, function(err, doc, xhr) { | |
// If the document does not exist, send an error to the callback | |
if (err) { | |
return call(callback, Pouch.Errors.MISSING_DOC); | |
} | |
// Send the document to the callback | |
call(callback, null, doc, xhr); | |
}); | |
}; | |
// Delete the document given by doc from the database given by host. | |
api.remove = function(doc, opts, callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('remove', arguments); | |
return; | |
} | |
// If no options were given, set the callback to be the second parameter | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
// Delete the document | |
ajax({ | |
auth: host.auth, | |
method:'DELETE', | |
url: genDBUrl(host, doc._id) + '?rev=' + doc._rev | |
}, callback); | |
}; | |
// Remove the attachment given by the id and rev | |
api.removeAttachment = function idb_removeAttachment(id, rev, callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('removeAttachment', arguments); | |
return; | |
} | |
ajax({ | |
auth: host.auth, | |
method: 'DELETE', | |
url: genDBUrl(host, id) + '?rev=' + rev | |
}, callback); | |
}; | |
// Add the attachment given by blob and its contentType property | |
// to the document with the given id, the revision given by rev, and | |
// add it to the database given by host. | |
api.putAttachment = function(id, rev, blob, type, callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('putAttachment', arguments); | |
return; | |
} | |
if (typeof type === 'function') { | |
callback = type; | |
type = blob; | |
blob = rev; | |
rev = null; | |
} | |
if (typeof type === 'undefined') { | |
type = blob; | |
blob = rev; | |
rev = null; | |
} | |
var url = genDBUrl(host, id); | |
if (rev) { | |
url += '?rev=' + rev; | |
} | |
// Add the attachment | |
ajax({ | |
auth: host.auth, | |
method:'PUT', | |
url: url, | |
headers: {'Content-Type': type}, | |
processData: false, | |
body: blob | |
}, callback); | |
}; | |
// Add the document given by doc (in JSON string format) to the database | |
// given by host. This fails if the doc has no _id field. | |
api.put = function(doc, opts, callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('put', arguments); | |
return; | |
} | |
// If no options were given, set the callback to be the second parameter | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
if (!doc || !('_id' in doc)) { | |
return call(callback, Pouch.Errors.MISSING_ID); | |
} | |
// List of parameter to add to the PUT request | |
var params = []; | |
// If it exists, add the opts.new_edits value to the list of parameters. | |
// If new_edits = false then the database will NOT assign this document a | |
// new revision number | |
if (opts && typeof opts.new_edits !== 'undefined') { | |
params.push('new_edits=' + opts.new_edits); | |
} | |
// Format the list of parameters into a valid URI query string | |
params = params.join('&'); | |
if (params !== '') { | |
params = '?' + params; | |
} | |
// Add the document | |
ajax({ | |
auth: host.auth, | |
method: 'PUT', | |
url: genDBUrl(host, doc._id) + params, | |
body: doc | |
}, callback); | |
}; | |
// Add the document given by doc (in JSON string format) to the database | |
// given by host. This does not assume that doc is a new document (i.e. does not | |
// have a _id or a _rev field. | |
api.post = function(doc, opts, callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('post', arguments); | |
return; | |
} | |
// If no options were given, set the callback to be the second parameter | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
if (! ("_id" in doc)) { | |
if (uuids.list.length > 0) { | |
doc._id = uuids.list.pop(); | |
api.put(doc, opts, callback); | |
}else { | |
uuids.get(function(err, resp) { | |
if (err) { | |
return call(callback, Pouch.Errors.UNKNOWN_ERROR); | |
} | |
doc._id = uuids.list.pop(); | |
api.put(doc, opts, callback); | |
}); | |
} | |
} else { | |
api.put(doc, opts, callback); | |
} | |
}; | |
// Update/create multiple documents given by req in the database | |
// given by host. | |
api.bulkDocs = function(req, opts, callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('bulkDocs', arguments); | |
return; | |
} | |
// If no options were given, set the callback to be the second parameter | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
if (!opts) { | |
opts = {}; | |
} | |
// If opts.new_edits exists add it to the document data to be | |
// send to the database. | |
// If new_edits=false then it prevents the database from creating | |
// new revision numbers for the documents. Instead it just uses | |
// the old ones. This is used in database replication. | |
if (typeof opts.new_edits !== 'undefined') { | |
req.new_edits = opts.new_edits; | |
} | |
// Update/create the documents | |
ajax({ | |
auth: host.auth, | |
method:'POST', | |
url: genDBUrl(host, '_bulk_docs'), | |
body: req | |
}, callback); | |
}; | |
// Get a listing of the documents in the database given | |
// by host and ordered by increasing id. | |
api.allDocs = function(opts, callback) { | |
// If no options were given, set the callback to be the second parameter | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('allDocs', arguments); | |
return; | |
} | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
// List of parameters to add to the GET request | |
var params = []; | |
var body; | |
var method = 'GET'; | |
// TODO I don't see conflicts as a valid parameter for a | |
// _all_docs request (see http://wiki.apache.org/couchdb/HTTP_Document_API#all_docs) | |
if (opts.conflicts) { | |
params.push('conflicts=true'); | |
} | |
// If opts.descending is truthy add it to params | |
if (opts.descending) { | |
params.push('descending=true'); | |
} | |
// If opts.include_docs exists, add the include_docs value to the | |
// list of parameters. | |
// If include_docs=true then include the associated document with each | |
// result. | |
if (opts.include_docs) { | |
params.push('include_docs=true'); | |
} | |
// If opts.startkey exists, add the startkey value to the list of | |
// parameters. | |
// If startkey is given then the returned list of documents will | |
// start with the document whose id is startkey. | |
if (opts.startkey) { | |
params.push('startkey=' + | |
encodeURIComponent(JSON.stringify(opts.startkey))); | |
} | |
// If opts.endkey exists, add the endkey value to the list of parameters. | |
// If endkey is given then the returned list of docuemnts will | |
// end with the document whose id is endkey. | |
if (opts.endkey) { | |
params.push('endkey=' + encodeURIComponent(JSON.stringify(opts.endkey))); | |
} | |
// Format the list of parameters into a valid URI query string | |
params = params.join('&'); | |
if (params !== '') { | |
params = '?' + params; | |
} | |
// If keys are supplied, issue a POST request to circumvent GET query string limits | |
// see http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options | |
if (typeof opts.keys !== 'undefined') { | |
method = 'POST'; | |
body = JSON.stringify({keys:opts.keys}); | |
} | |
// Get the document listing | |
ajax({ | |
auth: host.auth, | |
method: method, | |
url: genDBUrl(host, '_all_docs' + params), | |
body: body | |
}, callback); | |
}; | |
// Get a list of changes made to documents in the database given by host. | |
// TODO According to the README, there should be two other methods here, | |
// api.changes.addListener and api.changes.removeListener. | |
api.changes = function(opts) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('changes', arguments); | |
return; | |
} | |
if (Pouch.DEBUG) { | |
console.log(db_url + ': Start Changes Feed: continuous=' + opts.continuous); | |
} | |
// Query string of all the parameters to add to the GET request | |
var params = [], | |
paramsStr; | |
if (opts.style) { | |
params.push('style='+opts.style); | |
} | |
// If opts.include_docs exists, opts.filter exists, and opts.filter is a | |
// function, add the include_docs value to the query string. | |
// If include_docs=true then include the associated document with each | |
// result. | |
if (opts.include_docs || opts.filter && typeof opts.filter === 'function') { | |
params.push('include_docs=true'); | |
} | |
// If opts.continuous exists, add the feed value to the query string. | |
// If feed=longpoll then it waits for either a timeout or a change to | |
// occur before returning. | |
if (opts.continuous) { | |
params.push('feed=longpoll'); | |
} | |
// If opts.conflicts exists, add the conflicts value to the query string. | |
// TODO I can't find documentation of what conflicts=true does. See | |
// http://wiki.apache.org/couchdb/HTTP_database_API#Changes | |
if (opts.conflicts) { | |
params.push('conflicts=true'); | |
} | |
// If opts.descending exists, add the descending value to the query string. | |
// if descending=true then the change results are returned in | |
// descending order (most recent change first). | |
if (opts.descending) { | |
params.push('descending=true'); | |
} | |
// If opts.filter exists and is a string then add the filter value | |
// to the query string. | |
// If filter is given a string containing the name of a filter in | |
// the design, then only documents passing through the filter will | |
// be returned. | |
if (opts.filter && typeof opts.filter === 'string') { | |
params.push('filter=' + opts.filter); | |
} | |
// If opts.query_params exists, pass it through to the changes request. | |
// These parameters may be used by the filter on the source database. | |
if (opts.query_params && typeof opts.query_params === 'object') { | |
for (var param_name in opts.query_params) { | |
if (opts.query_params.hasOwnProperty(param_name)) { | |
params.push(param_name+'='+opts.query_params[param_name]); | |
} | |
} | |
} | |
paramsStr = '?'; | |
if (params.length > 0) { | |
paramsStr += params.join('&'); | |
} | |
var xhr; | |
var last_seq; | |
// Get all the changes starting wtih the one immediately after the | |
// sequence number given by since. | |
var fetch = function(since, callback) { | |
// Set the options for the ajax call | |
var xhrOpts = { | |
auth: host.auth, method:'GET', | |
url: genDBUrl(host, '_changes' + paramsStr + '&since=' + since), | |
timeout: null // _changes can take a long time to generate, especially when filtered | |
}; | |
last_seq = since; | |
if (opts.aborted) { | |
return; | |
} | |
// Get the changes | |
xhr = ajax(xhrOpts, callback); | |
}; | |
// If opts.since exists, get all the changes from the sequence | |
// number given by opts.since. Otherwise, get all the changes | |
// from the sequence number 0. | |
var fetchTimeout = 10; | |
var fetchRetryCount = 0; | |
var fetched = function(err, res) { | |
// If the result of the ajax call (res) contains changes (res.results) | |
if (res && res.results) { | |
// For each change | |
res.results.forEach(function(c) { | |
var hasFilter = opts.filter && typeof opts.filter === 'function'; | |
if (opts.aborted || hasFilter && !opts.filter.apply(this, [c.doc])) { | |
return; | |
} | |
// Process the change | |
call(opts.onChange, c); | |
}); | |
} | |
// The changes feed may have timed out with no results | |
// if so reuse last update sequence | |
if (res && res.last_seq) { | |
last_seq = res.last_seq; | |
} | |
if (opts.continuous) { | |
// Increase retry delay exponentially as long as errors persist | |
if (err) { | |
fetchRetryCount += 1; | |
} else { | |
fetchRetryCount = 0; | |
} | |
var timeoutMultiplier = 1 << fetchRetryCount; | |
// i.e. Math.pow(2, fetchRetryCount) | |
var retryWait = fetchTimeout * timeoutMultiplier; | |
var maximumWait = opts.maximumWait || 30000; | |
if (retryWait > maximumWait) { | |
call(opts.complete, err || Pouch.Errors.UNKNOWN_ERROR, null); | |
} | |
// Queue a call to fetch again with the newest sequence number | |
setTimeout(function () { | |
fetch(last_seq, fetched); | |
}, retryWait); | |
} else { | |
// We're done, call the callback | |
call(opts.complete, null, res); | |
} | |
}; | |
fetch(opts.since || 0, fetched); | |
// Return a method to cancel this method from processing any more | |
return { | |
cancel: function() { | |
if (Pouch.DEBUG) { | |
console.log(db_url + ': Cancel Changes Feed'); | |
} | |
opts.aborted = true; | |
xhr.abort(); | |
} | |
}; | |
}; | |
// Given a set of document/revision IDs (given by req), tets the subset of | |
// those that do NOT correspond to revisions stored in the database. | |
// See http://wiki.apache.org/couchdb/HttpPostRevsDiff | |
api.revsDiff = function(req, opts, callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('revsDiff', arguments); | |
return; | |
} | |
// If no options were given, set the callback to be the second parameter | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
// Get the missing document/revision IDs | |
ajax({ | |
auth: host.auth, | |
method:'POST', | |
url: genDBUrl(host, '_revs_diff'), | |
body: req | |
}, function(err, res) { | |
call(callback, err, res); | |
}); | |
}; | |
api.close = function(callback) { | |
if (!api.taskqueue.ready()) { | |
api.taskqueue.addTask('close', arguments); | |
return; | |
} | |
call(callback, null); | |
}; | |
return api; | |
}; | |
// Delete the HttpPouch specified by the given name. | |
HttpPouch.destroy = function(name, callback) { | |
var host = getHost(name); | |
ajax({auth: host.auth, method: 'DELETE', url: genDBUrl(host, '')}, callback); | |
}; | |
// HttpPouch is a valid adapter. | |
HttpPouch.valid = function() { | |
return true; | |
}; | |
if (typeof module !== 'undefined' && module.exports) { | |
// running in node | |
var pouchdir = '../'; | |
Pouch = require(pouchdir + 'pouch.js'); | |
ajax = Pouch.utils.ajax; | |
} | |
// Set HttpPouch to be the adapter used with the http scheme. | |
Pouch.adapter('http', HttpPouch); | |
Pouch.adapter('https', HttpPouch); | |
// While most of the IDB behaviors match between implementations a | |
// lot of the names still differ. This section tries to normalize the | |
// different objects & methods. | |
window.indexedDB = window.indexedDB || | |
window.mozIndexedDB || | |
window.webkitIndexedDB; | |
// still needed for R/W transactions in Android Chrome. follow MDN example: | |
// https://developer.mozilla.org/en-US/docs/IndexedDB/IDBDatabase#transaction | |
// note though that Chrome Canary fails on undefined READ_WRITE constants | |
// on the native IDBTransaction object | |
window.IDBTransaction = (window.IDBTransaction && window.IDBTransaction.READ_WRITE) | |
? window.IDBTransaction | |
: (window.webkitIDBTransaction && window.webkitIDBTransaction.READ_WRITE) | |
? window.webkitIDBTransaction | |
: { READ_WRITE: 'readwrite' }; | |
window.IDBKeyRange = window.IDBKeyRange || | |
window.webkitIDBKeyRange; | |
window.storageInfo = window.storageInfo || | |
window.webkitStorageInfo; | |
window.requestFileSystem = window.requestFileSystem || | |
window.webkitRequestFileSystem; | |
var idbError = function(callback) { | |
return function(event) { | |
call(callback, { | |
status: 500, | |
error: event.type, | |
reason: event.target | |
}); | |
}; | |
}; | |
var IdbPouch = function(opts, callback) { | |
// IndexedDB requires a versioned database structure, this is going to make | |
// it hard to dynamically create object stores if we needed to for things | |
// like views | |
var POUCH_VERSION = 1; | |
// The object stores created for each database | |
// DOC_STORE stores the document meta data, its revision history and state | |
var DOC_STORE = 'document-store'; | |
// BY_SEQ_STORE stores a particular version of a document, keyed by its | |
// sequence id | |
var BY_SEQ_STORE = 'by-sequence'; | |
// Where we store attachments | |
var ATTACH_STORE = 'attach-store'; | |
// Where we store meta data | |
var META_STORE = 'meta-store'; | |
// Where we detect blob support | |
var DETECT_BLOB_SUPPORT_STORE = 'detect-blob-support' | |
var name = opts.name; | |
var req = indexedDB.open(name, POUCH_VERSION); | |
var meta = { | |
id: 'meta-store', | |
updateSeq: 0, | |
}; | |
var blobSupport = null; | |
var instanceId = null; | |
var api = {}; | |
var idb = null; | |
if (Pouch.DEBUG) | |
console.log(name + ': Open Database'); | |
// TODO: before we release, make sure we write upgrade needed | |
// in a way that supports a future upgrade path | |
req.onupgradeneeded = function(e) { | |
var db = e.target.result; | |
db.createObjectStore(DOC_STORE, {keyPath : 'id'}) | |
.createIndex('seq', 'seq', {unique: true}); | |
db.createObjectStore(BY_SEQ_STORE, {autoIncrement : true}) | |
.createIndex('_rev', '_rev', {unique: true}); | |
db.createObjectStore(ATTACH_STORE, {keyPath: 'digest'}); | |
db.createObjectStore(META_STORE, {keyPath: 'id', autoIncrement: false}); | |
db.createObjectStore(DETECT_BLOB_SUPPORT_STORE); | |
}; | |
req.onsuccess = function(e) { | |
idb = e.target.result; | |
var txn = idb.transaction([META_STORE, DETECT_BLOB_SUPPORT_STORE], IDBTransaction.READ_WRITE); | |
idb.onversionchange = function() { | |
idb.close(); | |
}; | |
// polyfill the new onupgradeneeded api for chrome. can get rid of when | |
// saucelabs moves to chrome 23 | |
if (idb.setVersion && Number(idb.version) !== POUCH_VERSION) { | |
var versionReq = idb.setVersion(POUCH_VERSION); | |
versionReq.onsuccess = function(evt) { | |
function setVersionComplete() { | |
req.onsuccess(e); | |
} | |
evt.target.result.oncomplete = setVersionComplete; | |
req.onupgradeneeded(e); | |
}; | |
return; | |
} | |
var req = txn.objectStore(META_STORE).get('meta-store'); | |
req.onsuccess = function(e) { | |
var reqDBId, | |
result; | |
if (e.target.result) { | |
meta = e.target.result; | |
} | |
if (name + '_id' in meta) { | |
instanceId = meta[name + '_id']; | |
} else { | |
instanceId = Math.uuid(); | |
meta[name + '_id'] = instanceId; | |
reqDBId = txn.objectStore(META_STORE).put(meta); | |
} | |
// detect blob support | |
try { | |
txn.objectStore(DETECT_BLOB_SUPPORT_STORE).put(new Blob(), "key"); | |
blobSupport = true; | |
} catch (e) { | |
blobSupport = false; | |
} finally { | |
call(callback, null, api); | |
} | |
} | |
}; | |
req.onerror = idbError(callback); | |
api.type = function() { | |
return 'idb'; | |
}; | |
// Each database needs a unique id so that we can store the sequence | |
// checkpoint without having other databases confuse itself. | |
api.id = function idb_id() { | |
return instanceId; | |
}; | |
api._bulkDocs = function idb_bulkDocs(req, opts, callback) { | |
var newEdits = opts.new_edits; | |
var userDocs = extend(true, [], req.docs); | |
// Parse the docs, give them a sequence number for the result | |
var docInfos = userDocs.map(function(doc, i) { | |
var newDoc = parseDoc(doc, newEdits); | |
newDoc._bulk_seq = i; | |
if (doc._deleted) { | |
if (!newDoc.metadata.deletions) { | |
newDoc.metadata.deletions = {}; | |
} | |
newDoc.metadata.deletions[doc._rev.split('-')[1]] = true; | |
} | |
return newDoc; | |
}); | |
var docInfoErrors = docInfos.filter(function(docInfo) { | |
return docInfo.error; | |
}); | |
if (docInfoErrors.length) { | |
return call(callback, docInfoErrors[0]); | |
} | |
var results = []; | |
var docs = []; | |
// Group multiple edits to the same document | |
docInfos.forEach(function(docInfo) { | |
if (docInfo.error) { | |
return results.push(docInfo); | |
} | |
if (!docs.length || !newEdits || docInfo.metadata.id !== docs[0].metadata.id) { | |
return docs.unshift(docInfo); | |
} | |
// We mark subsequent bulk docs with a duplicate id as conflicts | |
results.push(makeErr(Pouch.Errors.REV_CONFLICT, docInfo._bulk_seq)); | |
}); | |
function processDocs() { | |
if (!docs.length) { | |
return; | |
} | |
var currentDoc = docs.shift(); | |
var req = txn.objectStore(DOC_STORE).get(currentDoc.metadata.id); | |
req.onsuccess = function process_docRead(event) { | |
var oldDoc = event.target.result; | |
if (!oldDoc) { | |
insertDoc(currentDoc); | |
} else { | |
updateDoc(oldDoc, currentDoc); | |
} | |
}; | |
} | |
function complete(event) { | |
var aresults = []; | |
results.sort(sortByBulkSeq); | |
results.forEach(function(result) { | |
delete result._bulk_seq; | |
if (result.error) { | |
aresults.push(result); | |
return; | |
} | |
var metadata = result.metadata; | |
var rev = Pouch.merge.winningRev(metadata); | |
aresults.push({ | |
ok: true, | |
id: metadata.id, | |
rev: rev | |
}); | |
if (isLocalId(metadata.id)) { | |
return; | |
} | |
IdbPouch.Changes.notify(name); | |
localStorage[name] = (localStorage[name] === "a") ? "b" : "a"; | |
}); | |
call(callback, null, aresults); | |
} | |
function preprocessAttachment(att, callback) { | |
if (att.stub) { | |
return callback(); | |
} | |
if (typeof att.data === 'string') { | |
var data = atob(att.data); | |
att.digest = 'md5-' + Crypto.MD5(data); | |
if (blobSupport) { | |
var type = att.content_type; | |
att.data = new Blob([data], {type: type}); | |
} | |
return callback(); | |
} | |
var reader = new FileReader(); | |
reader.onloadend = function(e) { | |
att.digest = 'md5-' + Crypto.MD5(this.result); | |
if (!blobSupport) { | |
att.data = btoa(this.result); | |
} | |
callback(); | |
}; | |
reader.readAsBinaryString(att.data); | |
} | |
function preprocessAttachments(callback) { | |
if (!docInfos.length) { | |
return callback(); | |
} | |
var docv = 0; | |
docInfos.forEach(function(docInfo) { | |
var attachments = docInfo.data && docInfo.data._attachments ? | |
Object.keys(docInfo.data._attachments) : []; | |
if (!attachments.length) { | |
return done(); | |
} | |
var recv = 0; | |
for (var key in docInfo.data._attachments) { | |
preprocessAttachment(docInfo.data._attachments[key], function() { | |
recv++; | |
if (recv == attachments.length) { | |
done(); | |
} | |
}); | |
} | |
}); | |
function done() { | |
docv++; | |
if (docInfos.length === docv) { | |
callback(); | |
} | |
} | |
} | |
function writeDoc(docInfo, callback) { | |
var err = null; | |
var recv = 0; | |
docInfo.data._id = docInfo.metadata.id; | |
docInfo.data._rev = docInfo.metadata.rev; | |
meta.updateSeq++; | |
var req = txn.objectStore(META_STORE).put(meta); | |
if (isDeleted(docInfo.metadata, docInfo.metadata.rev)) { | |
docInfo.data._deleted = true; | |
} | |
var attachments = docInfo.data._attachments ? | |
Object.keys(docInfo.data._attachments) : []; | |
for (var key in docInfo.data._attachments) { | |
if (!docInfo.data._attachments[key].stub) { | |
var data = docInfo.data._attachments[key].data; | |
delete docInfo.data._attachments[key].data; | |
var digest = docInfo.data._attachments[key].digest; | |
saveAttachment(docInfo, digest, data, function(err) { | |
recv++; | |
collectResults(err); | |
}); | |
} else { | |
recv++; | |
collectResults(); | |
} | |
} | |
if (!attachments.length) { | |
finish(); | |
} | |
function collectResults(attachmentErr) { | |
if (!err) { | |
if (attachmentErr) { | |
err = attachmentErr; | |
call(callback, err); | |
} else if (recv == attachments.length) { | |
finish(); | |
} | |
} | |
} | |
function finish() { | |
var dataReq = txn.objectStore(BY_SEQ_STORE).put(docInfo.data); | |
dataReq.onsuccess = function(e) { | |
if (Pouch.DEBUG) | |
console.log(name + ': Wrote Document ', docInfo.metadata.id); | |
docInfo.metadata.seq = e.target.result; | |
// Current _rev is calculated from _rev_tree on read | |
delete docInfo.metadata.rev; | |
var metaDataReq = txn.objectStore(DOC_STORE).put(docInfo.metadata); | |
metaDataReq.onsuccess = function() { | |
results.push(docInfo); | |
call(callback); | |
}; | |
}; | |
} | |
} | |
function updateDoc(oldDoc, docInfo) { | |
docInfo.metadata.deletions = extend(docInfo.metadata.deletions, oldDoc.deletions); | |
var merged = Pouch.merge(oldDoc.rev_tree, docInfo.metadata.rev_tree[0], 1000); | |
var inConflict = (isDeleted(oldDoc) && isDeleted(docInfo.metadata)) || | |
(!isDeleted(oldDoc) && newEdits && merged.conflicts !== 'new_leaf'); | |
if (inConflict) { | |
results.push(makeErr(Pouch.Errors.REV_CONFLICT, docInfo._bulk_seq)); | |
return processDocs(); | |
} | |
docInfo.metadata.rev_tree = merged.tree; | |
writeDoc(docInfo, processDocs); | |
} | |
function insertDoc(docInfo) { | |
// Cant insert new deleted documents | |
if ('was_delete' in opts && isDeleted(docInfo.metadata)) { | |
results.push(Pouch.Errors.MISSING_DOC); | |
return processDocs(); | |
} | |
writeDoc(docInfo, processDocs); | |
} | |
// Insert sequence number into the error so we can sort later | |
function makeErr(err, seq) { | |
err._bulk_seq = seq; | |
return err; | |
} | |
function saveAttachment(docInfo, digest, data, callback) { | |
var objectStore = txn.objectStore(ATTACH_STORE); | |
var getReq = objectStore.get(digest).onsuccess = function(e) { | |
var ref = [docInfo.metadata.id, docInfo.metadata.rev].join('@'); | |
var newAtt = {digest: digest, body: data}; | |
if (e.target.result) { | |
if (e.target.result.refs) { | |
// only update references if this attachment already has them | |
// since we cannot migrate old style attachments here without | |
// doing a full db scan for references | |
newAtt.refs = e.target.result.refs; | |
newAtt.refs[ref] = true; | |
} | |
} else { | |
newAtt.refs = {}; | |
newAtt.refs[ref] = true; | |
} | |
var putReq = objectStore.put(newAtt).onsuccess = function(e) { | |
call(callback); | |
}; | |
putReq.onerror = putReq.ontimeout = idbError(callback); | |
}; | |
getReq.onerror = getReq.ontimeout = idbError(callback); | |
} | |
var txn; | |
preprocessAttachments(function() { | |
txn = idb.transaction([DOC_STORE, BY_SEQ_STORE, ATTACH_STORE, META_STORE], IDBTransaction.READ_WRITE); | |
txn.onerror = idbError(callback); | |
txn.ontimeout = idbError(callback); | |
txn.oncomplete = complete; | |
processDocs(); | |
}); | |
}; | |
function sortByBulkSeq(a, b) { | |
return a._bulk_seq - b._bulk_seq; | |
} | |
// First we look up the metadata in the ids database, then we fetch the | |
// current revision(s) from the by sequence store | |
api._get = function idb_get(id, opts, callback) { | |
var result; | |
var txn = idb.transaction([DOC_STORE, BY_SEQ_STORE, ATTACH_STORE], 'readonly'); | |
txn.oncomplete = function() { | |
// Leaves are set when we ask about open_revs | |
// Using this approach they can be quite easily abstracted out to some | |
// generic api.get | |
if (leaves) { | |
result = []; | |
var count = leaves.length; | |
leaves.forEach(function(leaf){ | |
api.get(id.docId, {rev: leaf}, function(err, doc){ | |
if (!err) { | |
result.push({ok: doc}); | |
} else { | |
result.push({missing: leaf}); | |
} | |
count--; | |
if(!count) { | |
finish(); | |
} | |
}); | |
}); | |
} else { | |
finish(); | |
} | |
}; | |
function finish() { | |
if ('error' in result) { | |
call(callback, result); | |
} else { | |
call(callback, null, result); | |
} | |
} | |
var leaves; | |
txn.objectStore(DOC_STORE).get(id.docId).onsuccess = function(e) { | |
var metadata = e.target.result; | |
if (!e.target.result || (isDeleted(metadata, opts.rev) && !opts.rev)) { | |
result = Pouch.Errors.MISSING_DOC; | |
return; | |
} | |
if (opts.open_revs) { | |
if (opts.open_revs === "all") { | |
leaves = collectLeaves(metadata.rev_tree).map(function(leaf){ | |
return leaf.rev; | |
}); | |
} else { | |
leaves = opts.open_revs; // should be some validation here | |
} | |
return; // open_revs can be used only with revs | |
} | |
var rev = Pouch.merge.winningRev(metadata); | |
var key = opts.rev ? opts.rev : rev; | |
var index = txn.objectStore(BY_SEQ_STORE).index('_rev'); | |
index.get(key).onsuccess = function(e) { | |
var doc = e.target.result; | |
if (!doc) { | |
result = Pouch.Errors.MISSING_DOC; | |
return; | |
} | |
if (opts.revs) { // FIXME: if rev is given it should return ids from root to rev (don't include newer) | |
var path = arrayFirst(rootToLeaf(metadata.rev_tree), function(arr) { | |
return arr.ids.indexOf(doc._rev.split('-')[1]) !== -1; | |
}); | |
path.ids.reverse(); | |
doc._revisions = { | |
start: (path.pos + path.ids.length) - 1, | |
ids: path.ids | |
}; | |
} | |
if (opts.revs_info) { // FIXME: this returns revs for whole tree and should return only branch for winner | |
doc._revs_info = metadata.rev_tree.reduce(function(prev, current) { | |
return prev.concat(collectRevs(current)); | |
}, []); | |
} | |
if (opts.conflicts) { | |
var conflicts = collectConflicts(metadata.rev_tree, metadata.deletions); | |
if (conflicts.length) { | |
doc._conflicts = conflicts; | |
} | |
} | |
if (opts.attachments && doc._attachments) { | |
var attachments = Object.keys(doc._attachments); | |
var recv = 0; | |
attachments.forEach(function(key) { | |
api.getAttachment(doc._id + '/' + key, {encode: true, txn: txn}, function(err, data) { | |
doc._attachments[key].data = data; | |
if (++recv === attachments.length) { | |
result = doc; | |
} | |
}); | |
}); | |
} else { | |
if (doc._attachments){ | |
for (var key in doc._attachments) { | |
doc._attachments[key].stub = true; | |
} | |
} | |
result = doc; | |
} | |
}; | |
}; | |
}; | |
api._getAttachment = function(id, opts, callback) { | |
var result; | |
var txn; | |
// This can be called while we are in a current transaction, pass the context | |
// along and dont wait for the transaction to complete here. | |
if ('txn' in opts) { | |
txn = opts.txn; | |
} else { | |
txn = idb.transaction([DOC_STORE, BY_SEQ_STORE, ATTACH_STORE], 'readonly'); | |
txn.oncomplete = function() { call(callback, null, result); } | |
} | |
txn.objectStore(DOC_STORE).get(id.docId).onsuccess = function(e) { | |
var metadata = e.target.result; | |
var bySeq = txn.objectStore(BY_SEQ_STORE); | |
bySeq.get(metadata.seq).onsuccess = function(e) { | |
var attachment = e.target.result._attachments[id.attachmentId]; | |
var digest = attachment.digest; | |
var type = attachment.content_type | |
txn.objectStore(ATTACH_STORE).get(digest).onsuccess = function(e) { | |
var data = e.target.result.body; | |
if (opts.encode) { | |
if (blobSupport) { | |
var reader = new FileReader(); | |
reader.onloadend = function(e) { | |
result = btoa(this.result); | |
if ('txn' in opts) { | |
call(callback, null, result); | |
} | |
} | |
reader.readAsBinaryString(data); | |
} else { | |
result = data; | |
if ('txn' in opts) { | |
call(callback, null, result); | |
} | |
} | |
} else { | |
if (blobSupport) { | |
result = data; | |
} else { | |
result = new Blob([atob(data)], {type: type}); | |
} | |
if ('txn' in opts) { | |
call(callback, null, result); | |
} | |
} | |
} | |
}; | |
} | |
return; | |
} | |
api._allDocs = function idb_allDocs(opts, callback) { | |
var start = 'startkey' in opts ? opts.startkey : false; | |
var end = 'endkey' in opts ? opts.endkey : false; | |
var descending = 'descending' in opts ? opts.descending : false; | |
descending = descending ? 'prev' : null; | |
var keyRange = start && end ? IDBKeyRange.bound(start, end) | |
: start ? IDBKeyRange.lowerBound(start) | |
: end ? IDBKeyRange.upperBound(end) : null; | |
var transaction = idb.transaction([DOC_STORE, BY_SEQ_STORE], 'readonly'); | |
transaction.oncomplete = function() { | |
if ('keys' in opts) { | |
opts.keys.forEach(function(key) { | |
if (key in resultsMap) { | |
results.push(resultsMap[key]); | |
} else { | |
results.push({"key": key, "error": "not_found"}); | |
} | |
}); | |
if (opts.descending) { | |
results.reverse(); | |
} | |
} | |
call(callback, null, { | |
total_rows: results.length, | |
rows: results | |
}); | |
}; | |
var oStore = transaction.objectStore(DOC_STORE); | |
var oCursor = descending ? oStore.openCursor(keyRange, descending) | |
: oStore.openCursor(keyRange); | |
var results = []; | |
var resultsMap = {}; | |
oCursor.onsuccess = function(e) { | |
if (!e.target.result) { | |
return; | |
} | |
var cursor = e.target.result; | |
var metadata = cursor.value; | |
// If opts.keys is set we want to filter here only those docs with | |
// key in opts.keys. With no performance tests it is difficult to | |
// guess if iteration with filter is faster than many single requests | |
function allDocsInner(metadata, data) { | |
if (isLocalId(metadata.id)) { | |
return cursor['continue'](); | |
} | |
var doc = { | |
id: metadata.id, | |
key: metadata.id, | |
value: { | |
rev: Pouch.merge.winningRev(metadata) | |
} | |
}; | |
if (opts.include_docs) { | |
doc.doc = data; | |
doc.doc._rev = Pouch.merge.winningRev(metadata); | |
if (opts.conflicts) { | |
doc.doc._conflicts = collectConflicts(metadata.rev_tree, metadata.deletions); | |
} | |
} | |
if ('keys' in opts) { | |
if (opts.keys.indexOf(metadata.id) > -1) { | |
if (isDeleted(metadata)) { | |
doc.value.deleted = true; | |
doc.doc = null; | |
} | |
resultsMap[doc.id] = doc; | |
} | |
} else { | |
if(!isDeleted(metadata)) { | |
results.push(doc); | |
} | |
} | |
cursor['continue'](); | |
} | |
if (!opts.include_docs) { | |
allDocsInner(metadata); | |
} else { | |
var index = transaction.objectStore(BY_SEQ_STORE).index('_rev'); | |
var mainRev = Pouch.merge.winningRev(metadata); | |
index.get(mainRev).onsuccess = function(event) { | |
allDocsInner(cursor.value, event.target.result); | |
}; | |
} | |
} | |
}; | |
// Looping through all the documents in the database is a terrible idea | |
// easiest to implement though, should probably keep a counter | |
api._info = function idb_info(callback) { | |
var count = 0; | |
var result; | |
var txn = idb.transaction([DOC_STORE], 'readonly'); | |
txn.oncomplete = function() { | |
callback(null, result); | |
}; | |
txn.objectStore(DOC_STORE).openCursor().onsuccess = function(e) { | |
var cursor = e.target.result; | |
if (!cursor) { | |
result = { | |
db_name: name, | |
doc_count: count, | |
update_seq: meta.updateSeq | |
}; | |
return; | |
} | |
if (cursor.value.deleted !== true) { | |
count++; | |
} | |
cursor['continue'](); | |
}; | |
}; | |
api._changes = function idb_changes(opts) { | |
if (Pouch.DEBUG) | |
console.log(name + ': Start Changes Feed: continuous=' + opts.continuous); | |
opts = extend(true, {}, opts); | |
if (!opts.since) opts.since = 0; | |
if (opts.continuous) { | |
var id = name + ':' + Math.uuid(); | |
opts.cancelled = false; | |
IdbPouch.Changes.addListener(name, id, api, opts); | |
IdbPouch.Changes.notify(name); | |
return { | |
cancel: function() { | |
if (Pouch.DEBUG) console.log(name + ': Cancel Changes Feed'); | |
opts.cancelled = true; | |
IdbPouch.Changes.removeListener(name, id); | |
} | |
}; | |
} | |
var descending = 'descending' in opts ? opts.descending : false; | |
descending = descending ? 'prev' : null; | |
// Ignore the `since` parameter when `descending` is true | |
opts.since = opts.since && !descending ? opts.since : 0; | |
var results = [], resultIndices = {}, dedupResults = []; | |
var txn; | |
if (opts.filter && typeof opts.filter === 'string') { | |
var filterName = opts.filter.split('/'); | |
api.get('_design/' + filterName[0], function(err, ddoc) { | |
var filter = eval('(function() { return ' + | |
ddoc.filters[filterName[1]] + ' })()'); | |
opts.filter = filter; | |
fetchChanges(); | |
}); | |
} else { | |
fetchChanges(); | |
} | |
function fetchChanges() { | |
txn = idb.transaction([DOC_STORE, BY_SEQ_STORE]); | |
txn.oncomplete = onTxnComplete; | |
var req = descending | |
? txn.objectStore(BY_SEQ_STORE) | |
.openCursor(IDBKeyRange.lowerBound(opts.since, true), descending) | |
: txn.objectStore(BY_SEQ_STORE) | |
.openCursor(IDBKeyRange.lowerBound(opts.since, true)); | |
req.onsuccess = onsuccess; | |
req.onerror = onerror; | |
} | |
function onsuccess(event) { | |
if (!event.target.result) { | |
// Filter out null results casued by deduping | |
for (var i = 0, l = results.length; i < l; i++ ) { | |
var result = results[i]; | |
if (result) dedupResults.push(result); | |
} | |
return false; | |
} | |
var cursor = event.target.result; | |
// Try to pre-emptively dedup to save us a bunch of idb calls | |
var changeId = cursor.value._id, changeIdIndex = resultIndices[changeId]; | |
if (changeIdIndex !== undefined) { | |
results[changeIdIndex].seq = cursor.key; // update so it has the later sequence number | |
results.push(results[changeIdIndex]); | |
results[changeIdIndex] = null; | |
resultIndices[changeId] = results.length - 1; | |
return cursor['continue'](); | |
} | |
var index = txn.objectStore(DOC_STORE); | |
index.get(cursor.value._id).onsuccess = function(event) { | |
var metadata = event.target.result; | |
if (isLocalId(metadata.id)) { | |
return cursor['continue'](); | |
} | |
var mainRev = Pouch.merge.winningRev(metadata); | |
var index = txn.objectStore(BY_SEQ_STORE).index('_rev'); | |
index.get(mainRev).onsuccess = function(docevent) { | |
var doc = docevent.target.result; | |
var changeList = [{rev: mainRev}] | |
if (opts.style === 'all_docs') { | |
// console.log('all docs', changeList, collectLeaves(metadata.rev_tree)); | |
changeList = collectLeaves(metadata.rev_tree); | |
} | |
var change = { | |
id: metadata.id, | |
seq: cursor.key, | |
changes: changeList, | |
doc: doc, | |
}; | |
if (isDeleted(metadata, mainRev)) { | |
change.deleted = true; | |
} | |
if (opts.conflicts) { | |
change.doc._conflicts = collectConflicts(metadata.rev_tree, metadata.deletions); | |
} | |
// Dedupe the changes feed | |
var changeId = change.id, changeIdIndex = resultIndices[changeId]; | |
if (changeIdIndex !== undefined) { | |
results[changeIdIndex] = null; | |
} | |
results.push(change); | |
resultIndices[changeId] = results.length - 1; | |
cursor['continue'](); | |
} | |
}; | |
}; | |
function onTxnComplete() { | |
dedupResults.map(function(c) { | |
if (opts.filter && !opts.filter.apply(this, [c.doc])) { | |
return; | |
} | |
if (!opts.include_docs) { | |
delete c.doc; | |
} | |
call(opts.onChange, c); | |
}); | |
call(opts.complete, null, {results: dedupResults}); | |
}; | |
function onerror(error) { | |
// TODO: shouldn't we pass some params here? | |
call(opts.complete); | |
}; | |
}; | |
api._close = function(callback) { | |
if (idb === null) { | |
return call(callback, Pouch.Errors.NOT_OPEN); | |
} | |
// https://developer.mozilla.org/en-US/docs/IndexedDB/IDBDatabase#close | |
// "Returns immediately and closes the connection in a separate thread..." | |
idb.close(); | |
call(callback, null); | |
}; | |
return api; | |
}; | |
IdbPouch.valid = function idb_valid() { | |
if (!document.location.host) { | |
console.error('indexedDB cannot be used in pages served from the filesystem'); | |
} | |
return !!window.indexedDB && !!document.location.host; | |
}; | |
IdbPouch.destroy = function idb_destroy(name, callback) { | |
if (Pouch.DEBUG) | |
console.log(name + ': Delete Database'); | |
IdbPouch.Changes.clearListeners(name); | |
var req = indexedDB.deleteDatabase(name); | |
req.onsuccess = function() { | |
call(callback, null); | |
}; | |
req.onerror = idbError(callback); | |
}; | |
IdbPouch.Changes = Changes(); | |
Pouch.adapter('idb', IdbPouch); | |
"use strict"; | |
function quote(str) { | |
return "'" + str + "'"; | |
} | |
var POUCH_VERSION = 1; | |
var POUCH_SIZE = 5 * 1024 * 1024; | |
// The object stores created for each database | |
// DOC_STORE stores the document meta data, its revision history and state | |
var DOC_STORE = quote('document-store'); | |
// BY_SEQ_STORE stores a particular version of a document, keyed by its | |
// sequence id | |
var BY_SEQ_STORE = quote('by-sequence'); | |
// Where we store attachments | |
var ATTACH_STORE = quote('attach-store'); | |
var META_STORE = quote('metadata-store'); | |
var unknownError = function(callback) { | |
return function(event) { | |
call(callback, { | |
status: 500, | |
error: event.type, | |
reason: event.target | |
}); | |
}; | |
}; | |
var webSqlPouch = function(opts, callback) { | |
var api = {}; | |
var update_seq = 0; | |
var name = opts.name; | |
var db = openDatabase(name, POUCH_VERSION, name, POUCH_SIZE); | |
if (!db) { | |
return call(callback, Pouch.Errors.UNKNOWN_ERROR); | |
} | |
function dbCreated() { | |
callback(null, api); | |
} | |
db.transaction(function (tx) { | |
var meta = 'CREATE TABLE IF NOT EXISTS ' + META_STORE + | |
' (update_seq)'; | |
var attach = 'CREATE TABLE IF NOT EXISTS ' + ATTACH_STORE + | |
' (digest, json, body BLOB)'; | |
var doc = 'CREATE TABLE IF NOT EXISTS ' + DOC_STORE + | |
' (id unique, seq, json, winningseq)'; | |
var seq = 'CREATE TABLE IF NOT EXISTS ' + BY_SEQ_STORE + | |
' (seq INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, rev UNIQUE, json)'; | |
tx.executeSql(attach); | |
tx.executeSql(doc); | |
tx.executeSql(seq); | |
tx.executeSql(meta); | |
var sql = 'SELECT update_seq FROM ' + META_STORE; | |
tx.executeSql(sql, [], function(tx, result) { | |
if (!result.rows.length) { | |
var initSeq = 'INSERT INTO ' + META_STORE + ' (update_seq) VALUES (?)'; | |
tx.executeSql(initSeq, [0]); | |
return; | |
} | |
update_seq = result.rows.item(0).update_seq; | |
}); | |
}, unknownError(callback), dbCreated); | |
api.type = function() { | |
return 'websql'; | |
}; | |
api.id = function() { | |
var id = localJSON.get(name + '_id', null); | |
if (id === null) { | |
id = Math.uuid(); | |
localJSON.set(name + '_id', id); | |
} | |
return id; | |
}; | |
api._info = function(callback) { | |
db.transaction(function(tx) { | |
var sql = 'SELECT COUNT(id) AS count FROM ' + DOC_STORE; | |
tx.executeSql(sql, [], function(tx, result) { | |
callback(null, { | |
db_name: name, | |
doc_count: result.rows.item(0).count, | |
update_seq: update_seq | |
}); | |
}); | |
}); | |
}; | |
api._bulkDocs = function idb_bulkDocs(req, opts, callback) { | |
var newEdits = opts.new_edits; | |
var userDocs = extend(true, [], req.docs); | |
// Parse the docs, give them a sequence number for the result | |
var docInfos = userDocs.map(function(doc, i) { | |
var newDoc = parseDoc(doc, newEdits); | |
newDoc._bulk_seq = i; | |
if (doc._deleted) { | |
if (!newDoc.metadata.deletions) { | |
newDoc.metadata.deletions = {}; | |
} | |
newDoc.metadata.deletions[doc._rev.split('-')[1]] = true; | |
} | |
return newDoc; | |
}); | |
var docInfoErrors = docInfos.filter(function(docInfo) { | |
return docInfo.error; | |
}); | |
if (docInfoErrors.length) { | |
return call(callback, docInfoErrors[0]); | |
} | |
var tx; | |
var results = []; | |
var docs = []; | |
var fetchedDocs = {}; | |
// Group multiple edits to the same document | |
docInfos.forEach(function(docInfo) { | |
if (docInfo.error) { | |
return results.push(docInfo); | |
} | |
if (!docs.length || !newEdits || docInfo.metadata.id !== docs[0].metadata.id) { | |
return docs.unshift(docInfo); | |
} | |
// We mark subsequent bulk docs with a duplicate id as conflicts | |
results.push(makeErr(Pouch.Errors.REV_CONFLICT, docInfo._bulk_seq)); | |
}); | |
function sortByBulkSeq(a, b) { | |
return a._bulk_seq - b._bulk_seq; | |
} | |
function complete(event) { | |
var aresults = []; | |
results.sort(sortByBulkSeq); | |
results.forEach(function(result) { | |
delete result._bulk_seq; | |
if (result.error) { | |
aresults.push(result); | |
return; | |
} | |
var metadata = result.metadata; | |
var rev = Pouch.merge.winningRev(metadata); | |
aresults.push({ | |
ok: true, | |
id: metadata.id, | |
rev: rev | |
}); | |
if (isLocalId(metadata.id)) { | |
return; | |
} | |
update_seq++; | |
var sql = 'UPDATE ' + META_STORE + ' SET update_seq=?'; | |
tx.executeSql(sql, [update_seq], function() { | |
webSqlPouch.Changes.notify(name); | |
localStorage[name] = (localStorage[name] === "a") ? "b" : "a"; | |
}); | |
}); | |
call(callback, null, aresults); | |
} | |
function preprocessAttachment(att, callback) { | |
if (att.stub) { | |
return callback(); | |
} | |
if (typeof att.data === 'string') { | |
att.data = atob(att.data); | |
att.digest = 'md5-' + Crypto.MD5(att.data); | |
return callback(); | |
} | |
var reader = new FileReader(); | |
reader.onloadend = function(e) { | |
att.data = this.result; | |
att.digest = 'md5-' + Crypto.MD5(this.result); | |
callback(); | |
}; | |
reader.readAsBinaryString(att.data); | |
} | |
function preprocessAttachments(callback) { | |
if (!docInfos.length) { | |
return callback(); | |
} | |
var docv = 0; | |
docInfos.forEach(function(docInfo) { | |
var attachments = docInfo.data && docInfo.data._attachments ? | |
Object.keys(docInfo.data._attachments) : []; | |
if (!attachments.length) { | |
return done(); | |
} | |
var recv = 0; | |
for (var key in docInfo.data._attachments) { | |
preprocessAttachment(docInfo.data._attachments[key], function() { | |
recv++; | |
if (recv == attachments.length) { | |
done(); | |
} | |
}); | |
} | |
}); | |
function done() { | |
docv++; | |
if (docInfos.length === docv) { | |
callback(); | |
} | |
} | |
} | |
function writeDoc(docInfo, callback, isUpdate) { | |
var err = null; | |
var recv = 0; | |
docInfo.data._id = docInfo.metadata.id; | |
docInfo.data._rev = docInfo.metadata.rev; | |
if (isDeleted(docInfo.metadata, docInfo.metadata.rev)) { | |
docInfo.data._deleted = true; | |
} | |
var attachments = docInfo.data._attachments ? | |
Object.keys(docInfo.data._attachments) : []; | |
for (var key in docInfo.data._attachments) { | |
if (!docInfo.data._attachments[key].stub) { | |
var data = docInfo.data._attachments[key].data; | |
delete docInfo.data._attachments[key].data; | |
var digest = docInfo.data._attachments[key].digest; | |
saveAttachment(docInfo, digest, data, function(err) { | |
recv++; | |
collectResults(err); | |
}); | |
} else { | |
recv++; | |
collectResults(); | |
} | |
} | |
if (!attachments.length) { | |
finish(); | |
} | |
function collectResults(attachmentErr) { | |
if (!err) { | |
if (attachmentErr) { | |
err = attachmentErr; | |
call(callback, err); | |
} else if (recv == attachments.length) { | |
finish(); | |
} | |
} | |
} | |
function dataWritten(tx, result) { | |
var seq = docInfo.metadata.seq = result.insertId; | |
delete docInfo.metadata.rev; | |
var mainRev = Pouch.merge.winningRev(docInfo.metadata); | |
var sql = isUpdate ? | |
'UPDATE ' + DOC_STORE + ' SET seq=?, json=?, winningseq=(SELECT seq FROM ' + BY_SEQ_STORE + ' WHERE rev=?) WHERE id=?' : | |
'INSERT INTO ' + DOC_STORE + ' (id, seq, winningseq, json) VALUES (?, ?, ?, ?);'; | |
var params = isUpdate ? | |
[seq, JSON.stringify(docInfo.metadata), mainRev, docInfo.metadata.id] : | |
[docInfo.metadata.id, seq, seq, JSON.stringify(docInfo.metadata)]; | |
tx.executeSql(sql, params, function(tx, result) { | |
results.push(docInfo); | |
call(callback, null); | |
}); | |
} | |
function finish() { | |
var data = docInfo.data; | |
var sql = 'INSERT INTO ' + BY_SEQ_STORE + ' (rev, json) VALUES (?, ?);'; | |
tx.executeSql(sql, [data._rev, JSON.stringify(data)], dataWritten); | |
} | |
} | |
function updateDoc(oldDoc, docInfo) { | |
docInfo.metadata.deletions = extend(docInfo.metadata.deletions, oldDoc.deletions); | |
var merged = Pouch.merge(oldDoc.rev_tree, docInfo.metadata.rev_tree[0], 1000); | |
var inConflict = (isDeleted(oldDoc) && isDeleted(docInfo.metadata)) || | |
(!isDeleted(oldDoc) && newEdits && merged.conflicts !== 'new_leaf'); | |
if (inConflict) { | |
results.push(makeErr(Pouch.Errors.REV_CONFLICT, docInfo._bulk_seq)); | |
return processDocs(); | |
} | |
docInfo.metadata.rev_tree = merged.tree; | |
writeDoc(docInfo, processDocs, true); | |
} | |
function insertDoc(docInfo) { | |
// Cant insert new deleted documents | |
if ('was_delete' in opts && isDeleted(docInfo.metadata)) { | |
results.push(Pouch.Errors.MISSING_DOC); | |
return processDocs(); | |
} | |
writeDoc(docInfo, processDocs, false); | |
} | |
function processDocs() { | |
if (!docs.length) { | |
return complete(); | |
} | |
var currentDoc = docs.shift(); | |
var id = currentDoc.metadata.id; | |
if (id in fetchedDocs) { | |
updateDoc(fetchedDocs[id], currentDoc); | |
} else { | |
// if we have newEdits=false then we can update the same | |
// document twice in a single bulk docs call | |
fetchedDocs[id] = currentDoc.metadata; | |
insertDoc(currentDoc); | |
} | |
} | |
// Insert sequence number into the error so we can sort later | |
function makeErr(err, seq) { | |
err._bulk_seq = seq; | |
return err; | |
} | |
function saveAttachment(docInfo, digest, data, callback) { | |
var ref = [docInfo.metadata.id, docInfo.metadata.rev].join('@'); | |
var newAtt = {digest: digest}; | |
var sql = 'SELECT digest, json FROM ' + ATTACH_STORE + ' WHERE digest=?'; | |
tx.executeSql(sql, [digest], function(tx, result) { | |
if (!result.rows.length) { | |
newAtt.refs = {}; | |
newAtt.refs[ref] = true; | |
sql = 'INSERT INTO ' + ATTACH_STORE + '(digest, json, body) VALUES (?, ?, ?)'; | |
tx.executeSql(sql, [digest, JSON.stringify(newAtt), data], function() { | |
call(callback, null); | |
}); | |
} else { | |
newAtt.refs = JSON.parse(result.rows.item(0).json).refs; | |
sql = 'UPDATE ' + ATTACH_STORE + ' SET json=?, body=? WHERE digest=?'; | |
tx.executeSql(sql, [JSON.stringify(newAtt), data, digest], function() { | |
call(callback, null); | |
}); | |
} | |
}); | |
} | |
function metadataFetched(tx, results) { | |
for (var j=0; j<results.rows.length; j++) { | |
var row = results.rows.item(j); | |
fetchedDocs[row.id] = JSON.parse(row.json); | |
} | |
processDocs(); | |
} | |
preprocessAttachments(function() { | |
db.transaction(function(txn) { | |
tx = txn; | |
var ids = '(' + docs.map(function(d) { | |
return quote(d.metadata.id); | |
}).join(',') + ')'; | |
var sql = 'SELECT * FROM ' + DOC_STORE + ' WHERE id IN ' + ids; | |
tx.executeSql(sql, [], metadataFetched); | |
}, unknownError(callback)); | |
}); | |
}; | |
api._get = function(id, opts, callback) { | |
var result; | |
var leaves; | |
db.transaction(function(tx) { | |
var sql = 'SELECT * FROM ' + DOC_STORE + ' WHERE id=?'; | |
tx.executeSql(sql, [id.docId], function(tx, results) { | |
if (!results.rows.length) { | |
result = Pouch.Errors.MISSING_DOC; | |
return; | |
} | |
var metadata = JSON.parse(results.rows.item(0).json); | |
if (isDeleted(metadata, opts.rev) && !opts.rev) { | |
result = Pouch.Errors.MISSING_DOC; | |
return; | |
} | |
if (opts.open_revs) { | |
if (opts.open_revs === "all") { | |
leaves = collectLeaves(metadata.rev_tree).map(function(leaf){ | |
return leaf.rev; | |
}); | |
} else { | |
leaves = opts.open_revs; // should be some validation here | |
} | |
return; // open_revs can be used only with revs | |
} | |
var rev = Pouch.merge.winningRev(metadata); | |
var key = opts.rev ? opts.rev : rev; | |
var sql = 'SELECT * FROM ' + BY_SEQ_STORE + ' WHERE rev=?'; | |
tx.executeSql(sql, [key], function(tx, results) { | |
if (!results.rows.length) { | |
result = Pouch.Errors.MISSING_DOC; | |
return; | |
} | |
var doc = JSON.parse(results.rows.item(0).json); | |
if (opts.revs) { | |
var path = arrayFirst(rootToLeaf(metadata.rev_tree), function(arr) { | |
return arr.ids.indexOf(doc._rev.split('-')[1]) !== -1; | |
}); | |
path.ids.reverse(); | |
doc._revisions = { | |
start: (path.pos + path.ids.length) - 1, | |
ids: path.ids | |
}; | |
} | |
if (opts.revs_info) { | |
doc._revs_info = metadata.rev_tree.reduce(function(prev, current) { | |
return prev.concat(collectRevs(current)); | |
}, []); | |
} | |
if (opts.conflicts) { | |
var conflicts = collectConflicts(metadata.rev_tree, metadata.deletions); | |
if (conflicts.length) { | |
doc._conflicts = conflicts; | |
} | |
} | |
if (opts.attachments && doc._attachments) { | |
var attachments = Object.keys(doc._attachments); | |
var recv = 0; | |
attachments.forEach(function(key) { | |
api.getAttachment(doc._id + '/' + key, {encode: true, txn: tx}, function(err, data) { | |
doc._attachments[key].data = data; | |
if (++recv === attachments.length) { | |
result = doc; | |
} | |
}); | |
}); | |
} else { | |
if (doc._attachments){ | |
for (var key in doc._attachments) { | |
doc._attachments[key].stub = true; | |
} | |
} | |
result = doc; | |
} | |
}); | |
}); | |
}, unknownError(callback), function() { | |
if (leaves) { | |
result = []; | |
var count = leaves.length; | |
leaves.forEach(function(leaf){ | |
api.get(id.docId, {rev: leaf}, function(err, doc){ | |
if (!err) { | |
result.push({ok: doc}); | |
} else { | |
result.push({missing: leaf}); | |
} | |
count--; | |
if(!count) { | |
finish(); | |
} | |
}); | |
}); | |
} else { | |
finish(); | |
} | |
}); | |
function finish(){ | |
if ('error' in result) { | |
call(callback, result); | |
} else { | |
call(callback, null, result); | |
} | |
} | |
}; | |
api._allDocs = function(opts, callback) { | |
var results = []; | |
var resultsMap = {}; | |
var start = 'startkey' in opts ? opts.startkey : false; | |
var end = 'endkey' in opts ? opts.endkey : false; | |
var descending = 'descending' in opts ? opts.descending : false; | |
var sql = 'SELECT ' + DOC_STORE + '.id, ' + BY_SEQ_STORE + '.seq, ' + | |
BY_SEQ_STORE + '.json AS data, ' + DOC_STORE + '.json AS metadata FROM ' + | |
BY_SEQ_STORE + ' JOIN ' + DOC_STORE + ' ON ' + BY_SEQ_STORE + '.seq = ' + | |
DOC_STORE + '.winningseq'; | |
if ('keys' in opts) { | |
sql += ' WHERE ' + DOC_STORE + '.id IN (' + opts.keys.map(function(key){ | |
return quote(key); | |
}).join(',') + ')'; | |
} else { | |
if (start) { | |
sql += ' WHERE ' + DOC_STORE + '.id >= "' + start + '"'; | |
} | |
if (end) { | |
sql += (start ? ' AND ' : ' WHERE ') + DOC_STORE + '.id <= "' + end + '"'; | |
} | |
sql += ' ORDER BY ' + DOC_STORE + '.id ' + (descending ? 'DESC' : 'ASC'); | |
} | |
db.transaction(function(tx) { | |
tx.executeSql(sql, [], function(tx, result) { | |
for (var i = 0, l = result.rows.length; i < l; i++ ) { | |
var doc = result.rows.item(i); | |
var metadata = JSON.parse(doc.metadata); | |
var data = JSON.parse(doc.data); | |
if (!(isLocalId(metadata.id))) { | |
var doc = { | |
id: metadata.id, | |
key: metadata.id, | |
value: {rev: Pouch.merge.winningRev(metadata)} | |
}; | |
if (opts.include_docs) { | |
doc.doc = data; | |
doc.doc._rev = Pouch.merge.winningRev(metadata); | |
if (opts.conflicts) { | |
doc.doc._conflicts = collectConflicts(metadata.rev_tree, metadata.deletions); | |
} | |
} | |
if ('keys' in opts) { | |
if (opts.keys.indexOf(metadata.id) > -1) { | |
if (isDeleted(metadata)) { | |
doc.value.deleted = true; | |
doc.doc = null; | |
} | |
resultsMap[doc.id] = doc; | |
} | |
} else { | |
if(!isDeleted(metadata)) { | |
results.push(doc); | |
} | |
} | |
} | |
} | |
}); | |
}, unknownError(callback), function() { | |
if ('keys' in opts) { | |
opts.keys.forEach(function(key) { | |
if (key in resultsMap) { | |
results.push(resultsMap[key]); | |
} else { | |
results.push({"key": key, "error": "not_found"}); | |
} | |
}); | |
if (opts.descending) { | |
results.reverse(); | |
} | |
} | |
call(callback, null, { | |
total_rows: results.length, | |
rows: results | |
}); | |
}); | |
} | |
api._changes = function idb_changes(opts) { | |
if (Pouch.DEBUG) | |
console.log(name + ': Start Changes Feed: continuous=' + opts.continuous); | |
opts = extend(true, {}, opts); | |
if (!opts.since) opts.since = 0; | |
if (opts.continuous) { | |
opts.cancelled = false; | |
webSqlPouch.Changes.addListener(name, id, api, opts); | |
webSqlPouch.Changes.notify(name); | |
return { | |
cancel: function() { | |
if (Pouch.DEBUG) console.log(name + ': Cancel Changes Feed'); | |
opts.cancelled = true; | |
webSqlPouch.Changes.removeListener(name, id); | |
} | |
}; | |
} | |
var descending = 'descending' in opts ? opts.descending : false; | |
descending = descending ? 'prev' : null; | |
// Ignore the `since` parameter when `descending` is true | |
opts.since = opts.since && !descending ? opts.since : 0; | |
var results = [], resultIndices = {}, dedupResults = []; | |
var id = name + ':' + Math.uuid(); | |
var txn; | |
if (opts.filter && typeof opts.filter === 'string') { | |
var filterName = opts.filter.split('/'); | |
api.get('_design/' + filterName[0], function(err, ddoc) { | |
var filter = eval('(function() { return ' + | |
ddoc.filters[filterName[1]] + ' })()'); | |
opts.filter = filter; | |
fetchChanges(); | |
}); | |
} else { | |
fetchChanges(); | |
} | |
function fetchChanges() { | |
var sql = 'SELECT ' + DOC_STORE + '.id, ' + BY_SEQ_STORE + '.seq, ' + | |
BY_SEQ_STORE + '.json AS data, ' + DOC_STORE + '.json AS metadata FROM ' + | |
BY_SEQ_STORE + ' JOIN ' + DOC_STORE + ' ON ' + BY_SEQ_STORE + '.seq = ' + | |
DOC_STORE + '.winningseq WHERE ' + DOC_STORE + '.seq > ' + opts.since + | |
' ORDER BY ' + DOC_STORE + '.seq ' + (descending ? 'DESC' : 'ASC'); | |
db.transaction(function(tx) { | |
tx.executeSql(sql, [], function(tx, result) { | |
for (var i = 0, l = result.rows.length; i < l; i++ ) { | |
var doc = result.rows.item(i); | |
var metadata = JSON.parse(doc.metadata); | |
if (!isLocalId(metadata.id)) { | |
var change = { | |
id: metadata.id, | |
seq: doc.seq, | |
changes: collectLeaves(metadata.rev_tree), | |
doc: JSON.parse(doc.data), | |
}; | |
change.doc._rev = Pouch.merge.winningRev(metadata); | |
if (isDeleted(metadata, change.doc._rev)) { | |
change.deleted = true; | |
} | |
if (opts.conflicts) { | |
change.doc._conflicts = collectConflicts(metadata.rev_tree, metadata.deletions); | |
} | |
results.push(change); | |
} | |
} | |
for (var i = 0, l = results.length; i < l; i++ ) { | |
var result = results[i]; | |
if (result) dedupResults.push(result); | |
} | |
dedupResults.map(function(c) { | |
if (opts.filter && !opts.filter.apply(this, [c.doc])) { | |
return; | |
} | |
if (!opts.include_docs) { | |
delete c.doc; | |
} | |
call(opts.onChange, c); | |
}); | |
call(opts.complete, null, {results: dedupResults}); | |
}); | |
}); | |
} | |
}; | |
api._getAttachment = function(id, opts, callback) { | |
var res; | |
// This can be called while we are in a current transaction, pass the context | |
// along and dont wait for the transaction to complete here. | |
if ('txn' in opts) { | |
fetchAttachment(opts.txn); | |
} else { | |
db.transaction(fetchAttachment, unknownError(callback), function() { | |
call(callback, null, res); | |
}); | |
} | |
function fetchAttachment(tx) { | |
var sql = 'SELECT ' + BY_SEQ_STORE + '.json AS data FROM ' + DOC_STORE + | |
' JOIN ' + BY_SEQ_STORE + ' ON ' + BY_SEQ_STORE + '.seq = ' + DOC_STORE + | |
'.seq WHERE ' + DOC_STORE + '.id = "' + id.docId + '"' ; | |
tx.executeSql(sql, [], function(tx, result) { | |
var doc = JSON.parse(result.rows.item(0).data); | |
var attachment = doc._attachments[id.attachmentId]; | |
var digest = attachment.digest; | |
var type = attachment.content_type; | |
var sql = 'SELECT body FROM ' + ATTACH_STORE + ' WHERE digest=?'; | |
tx.executeSql(sql, [digest], function(tx, result) { | |
var data = result.rows.item(0).body; | |
if (opts.encode) { | |
res = btoa(data); | |
} else { | |
res = new Blob([data], {type: type}); | |
} | |
if ('txn' in opts) { | |
call(callback, null, res); | |
} | |
}); | |
}); | |
} | |
} | |
return api; | |
} | |
webSqlPouch.valid = function() { | |
return !!window.openDatabase; | |
}; | |
webSqlPouch.destroy = function(name, callback) { | |
var db = openDatabase(name, POUCH_VERSION, name, POUCH_SIZE); | |
localJSON.set(name + '_id', null); | |
db.transaction(function (tx) { | |
tx.executeSql('DROP TABLE IF EXISTS ' + DOC_STORE, []); | |
tx.executeSql('DROP TABLE IF EXISTS ' + BY_SEQ_STORE, []); | |
tx.executeSql('DROP TABLE IF EXISTS ' + ATTACH_STORE, []); | |
tx.executeSql('DROP TABLE IF EXISTS ' + META_STORE, []); | |
}, unknownError(callback), function() { | |
call(callback, null); | |
}); | |
}; | |
webSqlPouch.Changes = Changes(); | |
Pouch.adapter('websql', webSqlPouch); | |
/*global Pouch: true */ | |
"use strict"; | |
// This is the first implementation of a basic plugin, we register the | |
// plugin object with pouch and it is mixin'd to each database created | |
// (regardless of adapter), adapters can override plugins by providing | |
// their own implementation. functions on the plugin object that start | |
// with _ are reserved function that are called by pouchdb for special | |
// notifications. | |
// If we wanted to store incremental views we can do it here by listening | |
// to the changes feed (keeping track of our last update_seq between page loads) | |
// and storing the result of the map function (possibly using the upcoming | |
// extracted adapter functions) | |
var MapReduce = function(db) { | |
function viewQuery(fun, options) { | |
if (!options.complete) { | |
return; | |
} | |
if (!fun.reduce) { | |
options.reduce = false; | |
} | |
function sum(values) { | |
return values.reduce(function(a, b) { return a + b; }, 0); | |
} | |
var results = []; | |
var current = null; | |
var num_started= 0; | |
var completed= false; | |
var emit = function(key, val) { | |
var viewRow = { | |
id: current.doc._id, | |
key: key, | |
value: val | |
}; | |
if (options.startkey && Pouch.collate(key, options.startkey) < 0) return; | |
if (options.endkey && Pouch.collate(key, options.endkey) > 0) return; | |
if (options.key && Pouch.collate(key, options.key) !== 0) return; | |
num_started++; | |
if (options.include_docs) { | |
//in this special case, join on _id (issue #106) | |
if (val && typeof val === 'object' && val._id){ | |
db.get(val._id, | |
function(_, joined_doc){ | |
if (joined_doc) { | |
viewRow.doc = joined_doc; | |
} | |
results.push(viewRow); | |
checkComplete(); | |
}); | |
return; | |
} else { | |
viewRow.doc = current.doc; | |
} | |
} | |
results.push(viewRow); | |
}; | |
// ugly way to make sure references to 'emit' in map/reduce bind to the | |
// above emit | |
eval('fun.map = ' + fun.map.toString() + ';'); | |
if (fun.reduce) { | |
eval('fun.reduce = ' + fun.reduce.toString() + ';'); | |
} | |
// exclude _conflicts key by default | |
// or to use options.conflicts if it's set when called by db.query | |
var conflicts = ('conflicts' in options ? options.conflicts : false); | |
//only proceed once all documents are mapped and joined | |
var checkComplete= function(){ | |
if (completed && results.length == num_started){ | |
results.sort(function(a, b) { | |
return Pouch.collate(a.key, b.key); | |
}); | |
if (options.descending) { | |
results.reverse(); | |
} | |
if (options.reduce === false) { | |
return options.complete(null, {rows: results}); | |
} | |
var groups = []; | |
results.forEach(function(e) { | |
var last = groups[groups.length-1] || null; | |
if (last && Pouch.collate(last.key[0][0], e.key) === 0) { | |
last.key.push([e.key, e.id]); | |
last.value.push(e.value); | |
return; | |
} | |
groups.push({key: [[e.key, e.id]], value: [e.value]}); | |
}); | |
groups.forEach(function(e) { | |
e.value = fun.reduce(e.key, e.value) || null; | |
e.key = e.key[0][0]; | |
}); | |
options.complete(null, {rows: groups}); | |
} | |
} | |
db.changes({ | |
conflicts: conflicts, | |
include_docs: true, | |
onChange: function(doc) { | |
if (!('deleted' in doc)) { | |
current = {doc: doc.doc}; | |
fun.map.call(this, doc.doc); | |
} | |
}, | |
complete: function() { | |
completed= true; | |
checkComplete(); | |
} | |
}); | |
} | |
function httpQuery(fun, opts, callback) { | |
// List of parameters to add to the PUT request | |
var params = []; | |
var body = undefined; | |
var method = 'GET'; | |
// If opts.reduce exists and is defined, then add it to the list | |
// of parameters. | |
// If reduce=false then the results are that of only the map function | |
// not the final result of map and reduce. | |
if (typeof opts.reduce !== 'undefined') { | |
params.push('reduce=' + opts.reduce); | |
} | |
if (typeof opts.include_docs !== 'undefined') { | |
params.push('include_docs=' + opts.include_docs); | |
} | |
if (typeof opts.limit !== 'undefined') { | |
params.push('limit=' + opts.limit); | |
} | |
if (typeof opts.descending !== 'undefined') { | |
params.push('descending=' + opts.descending); | |
} | |
if (typeof opts.startkey !== 'undefined') { | |
params.push('startkey=' + encodeURIComponent(JSON.stringify(opts.startkey))); | |
} | |
if (typeof opts.endkey !== 'undefined') { | |
params.push('endkey=' + encodeURIComponent(JSON.stringify(opts.endkey))); | |
} | |
if (typeof opts.key !== 'undefined') { | |
params.push('key=' + encodeURIComponent(JSON.stringify(opts.key))); | |
} | |
// If keys are supplied, issue a POST request to circumvent GET query string limits | |
// see http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options | |
if (typeof opts.keys !== 'undefined') { | |
method = 'POST'; | |
body = JSON.stringify({keys:opts.keys}); | |
} | |
// Format the list of parameters into a valid URI query string | |
params = params.join('&'); | |
params = params === '' ? '' : '?' + params; | |
// We are referencing a query defined in the design doc | |
if (typeof fun === 'string') { | |
var parts = fun.split('/'); | |
db.request({ | |
method: method, | |
url: '_design/' + parts[0] + '/_view/' + parts[1] + params, | |
body: body | |
}, callback); | |
return; | |
} | |
// We are using a temporary view, terrible for performance but good for testing | |
var queryObject = JSON.parse(JSON.stringify(fun, function(key, val) { | |
if (typeof val === 'function') { | |
return val + ''; // implicitly `toString` it | |
} | |
return val; | |
})); | |
db.request({ | |
method:'POST', | |
url: '_temp_view' + params, | |
body: queryObject | |
}, callback); | |
} | |
function query(fun, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
if (callback) { | |
opts.complete = callback; | |
} | |
if (db.type() === 'http') { | |
if (typeof fun === 'function'){ | |
return httpQuery({map: fun}, opts, callback); | |
} | |
return httpQuery(fun, opts, callback); | |
} | |
if (typeof fun === 'object') { | |
return viewQuery(fun, opts); | |
} | |
if (typeof fun === 'function') { | |
return viewQuery({map: fun}, opts); | |
} | |
var parts = fun.split('/'); | |
db.get('_design/' + parts[0], function(err, doc) { | |
if (err) { | |
if (callback) callback(err); | |
return; | |
} | |
viewQuery({ | |
map: doc.views[parts[1]].map, | |
reduce: doc.views[parts[1]].reduce | |
}, opts); | |
}); | |
} | |
return {'query': query}; | |
}; | |
// Deletion is a noop since we dont store the results of the view | |
MapReduce._delete = function() { }; | |
Pouch.plugin('mapreduce', MapReduce); | |
})(this); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment