Created
August 12, 2015 07:10
-
-
Save paulrouget/230ab0467b4f65946da2 to your computer and use it in GitHub Desktop.
crash log
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
DEBUG:compositing::constellation: constellation got init load URL message | |
DEBUG:net::resource_task: resource_task: loading url: file:///Users/paul/github/servo/resources/rippy.jpg | |
DEBUG:compositing::constellation: constellation got window resize message | |
DEBUG:compositing::constellation: handle_resized_window_msg: 800×600 800×600 | |
INFO:style::parser: 51:3 Unsupported property declaration: 'width: fit-content;' | |
INFO:style::parser: 52:3 Unsupported property declaration: 'height: fit-content;' | |
INFO:style::parser: 60:1 Invalid rule: 'dialog::backdrop {' | |
INFO:style::parser: 69:3 Unsupported rule: 'dialog:modal {' | |
INFO:style::parser: 87:8 Unsupported property declaration: 'display: ruby;' | |
INFO:style::parser: 88:6 Unsupported property declaration: 'display: ruby-text;' | |
INFO:style::parser: 100:31 Unsupported property declaration: 'text-decoration: dotted underline;' | |
INFO:style::parser: 112:7 Unsupported property declaration: 'display-outside: break-opportunity;' | |
INFO:style::parser: 116:1 Invalid rule: '[dir]:dir(ltr), bdi:dir(ltr), input[type=tel]:dir(ltr) {' | |
INFO:style::parser: 117:1 Invalid rule: '[dir]:dir(rtl), bdi:dir(rtl) {' | |
DEBUG:net::resource_task: resource_task: loading url: http://localhost:6060/index.html | |
INFO:style::parser: 143:1 Invalid rule: ':matches(article, aside, nav, section) h1 {' | |
INFO:style::parser: 144:1 Invalid rule: ':matches(article, aside, nav, section) :matches(article, aside, nav, section) h1 {' | |
INFO:style::parser: 145:1 Invalid rule: ':matches(article, aside, nav, section) :matches(article, aside, nav, section) :matches(article, aside, nav, section) h1 {' | |
INFO:style::parser: 146:1 Invalid rule: ':matches(article, aside, nav, section) :matches(article, aside, nav, section) :matches(article, aside, nav, section) :matches(article, aside, nav, section) h1 {' | |
INFO:style::parser: 147:1 Invalid rule: ':matches(article, aside, nav, section) :matches(article, aside, nav, section) :matches(article, aside, nav, section) :matches(article, aside, nav, section) :matches(article, aside, nav, section) h1 {' | |
INFO:style::parser: 149:1 Invalid rule: ':matches(article, aside, nav, section) hgroup > h1 ~ h2 {' | |
INFO:style::parser: 150:1 Invalid rule: ':matches(article, aside, nav, section) :matches(article, aside, nav, section) hgroup > h1 ~ h2 {' | |
INFO:style::parser: 151:1 Invalid rule: ':matches(article, aside, nav, section) :matches(article, aside, nav, section) :matches(article, aside, nav, section) hgroup > h1 ~ h2 {' | |
INFO:style::parser: 152:1 Invalid rule: ':matches(article, aside, nav, section) :matches(article, aside, nav, section) :matches(article, aside, nav, section) :matches(article, aside, nav, section) hgroup > h1 ~ h2 {' | |
INFO:style::parser: 154:1 Invalid rule: ':matches(article, aside, nav, section) hgroup > h1 ~ h3 {' | |
INFO:style::parser: 155:1 Invalid rule: ':matches(article, aside, nav, section) :matches(article, aside, nav, section) hgroup > h1 ~ h3 {' | |
INFO:style::parser: 156:1 Invalid rule: ':matches(article, aside, nav, section) :matches(article, aside, nav, section) :matches(article, aside, nav, section) hgroup > h1 ~ h3 {' | |
INFO:net::http_loader: requesting http://localhost:6060/index.html | |
INFO:style::parser: 158:1 Invalid rule: ':matches(article, aside, nav, section) hgroup > h1 ~ h4 {' | |
INFO:style::parser: 159:1 Invalid rule: ':matches(article, aside, nav, section) :matches(article, aside, nav, section) hgroup > h1 ~ h4 {' | |
INFO:style::parser: 161:1 Invalid rule: ':matches(article, aside, nav, section) hgroup > h1 ~ h5 {' | |
INFO:style::parser: 169:1 Invalid rule: ':matches(dir, dl, menu, ol, ul) :matches(dir, dl, menu, ol, ul) {' | |
INFO:style::parser: 180:1 Invalid rule: ':matches(dir, menu, ol, ul) :matches(dir, menu, ul) {' | |
INFO:style::parser: 184:1 Invalid rule: ':matches(dir, menu, ol, ul) :matches(dir, menu, ol, ul) :matches(dir, menu, ul) {' | |
INFO:style::parser: 201:3 Unsupported property declaration: 'visibility: collapse;' | |
INFO:style::parser: 219:1 Invalid rule: 'table:matches( | |
[rules=none i], [rules=groups i], [rules=rows i], | |
[rules=cols i], [rules=all i], | |
[frame=void i], [frame=above i], [frame=below i], | |
[frame=hsides i], [frame=lhs i], [frame=rhs i], | |
[frame=vsides i], [frame=box i], [frame=border i] | |
), | |
table:matches( | |
[rules=none i], [rules=groups i], [rules=rows i], | |
[rules=cols i], [rules=all i] | |
) > tr > :matches(td, th), | |
table:matches( | |
[rules=none i], [rules=groups i], [rules=rows i], | |
[rules=cols i], [rules=all i] | |
) > :matches(thead, tbody, tfoot) > tr > :matches(td, th) {' | |
INFO:style::parser: 238:1 Invalid rule: ':matches(table, thead, tbody, tfoot, tr) > form {' | |
INFO:style::parser: 247:12 Unsupported property declaration: 'white-space: pre-wrap;' | |
INFO:style::parser: 262:3 Unsupported property declaration: 'border-color: ThreeDFace;' | |
INFO:style::parser: 264:3 Unsupported property declaration: 'min-width: min-content;' | |
INFO:style::parser: 273:9 Unsupported property declaration: 'object-fit: contain;' | |
INFO:style::parser: 276:12 Unsupported property declaration: 'white-space: pre-wrap;' | |
INFO:style::parser: 10:27 Unsupported property declaration: 'font-family: monospace !important;' | |
DEBUG:hyper::net: http scheme | |
INFO:style::parser: 8:13 Unsupported property declaration: 'white-space: pre-wrap;' | |
INFO:style::parser: 24:1 Invalid rule: 'ol[type=1], li[type=1] {' | |
INFO:style::parser: 27:26 Unsupported property declaration: 'list-style-type: lower-roman;' | |
INFO:style::parser: 28:26 Unsupported property declaration: 'list-style-type: upper-roman;' | |
INFO:style::parser: 38:1 Invalid rule: ':matches(thead, tbody, tfoot, tr, td, th)[align=absmiddle i] {' | |
INFO:style::parser: 43:1 Invalid rule: ':matches(p, h1, h2, h3, h4, h5, h6)[align=left i] {' | |
INFO:style::parser: 44:1 Invalid rule: ':matches(p, h1, h2, h3, h4, h5, h6)[align=right i] {' | |
INFO:style::parser: 45:1 Invalid rule: ':matches(p, h1, h2, h3, h4, h5, h6)[align=center i] {' | |
INFO:style::parser: 46:1 Invalid rule: ':matches(p, h1, h2, h3, h4, h5, h6)[align=justify i] {' | |
INFO:style::parser: 47:1 Invalid rule: ':matches(thead, tbody, tfoot, tr, td, th)[valign=top i] {' | |
INFO:style::parser: 48:1 Invalid rule: ':matches(thead, tbody, tfoot, tr, td, th)[valign=middle i] {' | |
INFO:style::parser: 49:1 Invalid rule: ':matches(thead, tbody, tfoot, tr, td, th)[valign=bottom i] {' | |
INFO:style::parser: 50:1 Invalid rule: ':matches(thead, tbody, tfoot, tr, td, th)[valign=baseline i] {' | |
INFO:style::parser: 54:1 Invalid rule: 'table:matches([rules=none i], [rules=groups i], [rules=rows i], [rules=cols i], [rules=all i]) {' | |
INFO:net::cookie_storage: === COOKIES SENT: | |
INFO:net::http_loader: GET | |
INFO:net::http_loader: - Accept: text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8 | |
INFO:net::http_loader: - User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Servo/1.0 Firefox/37.0 | |
INFO:net::http_loader: - Host: localhost:6060 | |
INFO:net::http_loader: - Accept-Encoding: gzip, deflate | |
INFO:net::http_loader: None | |
DEBUG:hyper::http::h1: request line: Get "/index.html" Http11 | |
DEBUG:hyper::http::h1: headers=Headers { Accept: text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8, User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Servo/1.0 Firefox/37.0, Host: localhost:6060, Accept-Encoding: gzip, deflate, } | |
DEBUG:hyper::client::response: version=Http11, status=Ok | |
DEBUG:hyper::client::response: headers=Headers { cache-control: max-age=0, server: ecstatic-0.8.0, etag: "3784572-573-Sat Aug 08 2015 09:30:06 GMT+0200 (CEST)", Connection: keep-alive, Date: Wed, 12 Aug 2015 07:07:32 GMT, last-modified: Sat, 08 Aug 2015 07:30:06 GMT, content-length: 573, content-type: text/html; charset=UTF-8, } | |
INFO:net::http_loader: got HTTP response 200 OK, headers: | |
INFO:net::http_loader: - cache-control: max-age=0 | |
INFO:net::http_loader: - server: ecstatic-0.8.0 | |
INFO:net::http_loader: - etag: "3784572-573-Sat Aug 08 2015 09:30:06 GMT+0200 (CEST)" | |
INFO:net::http_loader: - Connection: keep-alive | |
INFO:net::http_loader: - Date: Wed, 12 Aug 2015 07:07:32 GMT | |
INFO:net::http_loader: - last-modified: Sat, 08 Aug 2015 07:30:06 GMT | |
INFO:net::http_loader: - content-length: 573 | |
INFO:net::http_loader: - content-type: text/html; charset=UTF-8 | |
DEBUG:mime: starting params, len=24 | |
DEBUG:mime: param_from_str, start=10 | |
INFO:style::parser: 84:1 Invalid rule: 'table:matches([rules=none i], [rules=groups i], [rules=rows i]) > tr > :matches(td, th), | |
table:matches([rules=none i], [rules=groups i], [rules=rows i]) > :matches(thead, tbody, tfoot) > tr > :matches(td, th) {' | |
DEBUG:mime: starting params, len=24 | |
DEBUG:mime: param_from_str, start=10 | |
INFO:style::parser: 89:1 Invalid rule: 'table[rules=cols i] > tr > :matches(td, th), | |
table[rules=cols i] > :matches(thead, tbody, tfoot) > tr > :matches(td, th) {' | |
INFO:style::parser: 94:1 Invalid rule: 'table[rules=all i] > tr > :matches(td, th), | |
table[rules=all i] > :matches(thead, tbody, tfoot) > tr > :matches(td, th) {' | |
DEBUG:script::script_task: ScriptTask: loading http://localhost:6060/index.html on page PipelineId(0) | |
INFO:style::parser: 106:1 Invalid rule: 'table[rules=groups i] > :matches(thead, tbody, tfoot) {' | |
INFO:style::parser: 112:1 Invalid rule: 'table[rules=rows i] > tr, | |
table[rules=rows i] > :matches(thead, tbody, tfoot) > tr {' | |
INFO:style::parser: 128:1 Invalid rule: 'iframe[frameborder=0], iframe[frameborder=no i] {' | |
INFO:style::parser: 130:1 Invalid rule: ':matches(applet, embed, iframe, img, input[type=image i], object)[align=left i] {' | |
INFO:style::parser: 133:1 Invalid rule: ':matches(applet, embed, iframe, img, input[type=image i], object)[align=right i] {' | |
INFO:style::parser: 136:1 Invalid rule: ':matches(applet, embed, iframe, img, input[type=image i], object)[align=top i] {' | |
INFO:style::parser: 139:1 Invalid rule: ':matches(applet, embed, iframe, img, input[type=image i], object)[align=baseline i] {' | |
INFO:style::parser: 142:1 Invalid rule: ':matches(applet, embed, iframe, img, input[type=image i], object)[align=texttop i] {' | |
INFO:style::parser: 145:1 Invalid rule: ':matches(applet, embed, iframe, img, input[type=image i], object):matches([align=absmiddle i], [align=abscenter i]) {' | |
INFO:style::parser: 148:1 Invalid rule: ':matches(applet, embed, iframe, img, input[type=image i], object)[align=bottom i] {' | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state TagOpen | |
DEBUG:html5ever::tokenizer: got character ! | |
DEBUG:html5ever::tokenizer: processing in state MarkupDeclarationOpen | |
DEBUG:html5ever::tokenizer: processing in state Doctype | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: processing in state BeforeDoctypeName | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: processing in state DoctypeName | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character m | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "\n"))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \"\\n\")) in insertion mode BeforeHtml | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(Whitespace, Tendril<UTF8>(inline: \"\\n\")) in insertion mode BeforeHtml | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state TagOpen | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: processing in state TagName | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character m | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing TagToken(Tag { kind: StartTag, name: Atom(\'html\' type=static), self_closing: false, attrs: [] }) in insertion mode BeforeHtml | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "\n\n "))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \"\\n\\n \")) in insertion mode BeforeHead | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(Whitespace, Tendril<UTF8>(inline: \"\\n\\n \")) in insertion mode BeforeHead | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state TagOpen | |
DEBUG:html5ever::tokenizer: got character ! | |
DEBUG:html5ever::tokenizer: processing in state MarkupDeclarationOpen | |
DEBUG:html5ever::tokenizer: processing in state CommentStart | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: processing in state Comment | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character T | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character S | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character u | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character c | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character C | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character d | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character F | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character m | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: got character u | |
DEBUG:html5ever::tokenizer: got character b | |
DEBUG:html5ever::tokenizer: got character j | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character c | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character m | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character f | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character M | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character z | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character P | |
DEBUG:html5ever::tokenizer: got character u | |
DEBUG:html5ever::tokenizer: got character b | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character c | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character L | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character c | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character n | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character , | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character v | |
DEBUG:html5ever::tokenizer: got character . | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character 2 | |
DEBUG:html5ever::tokenizer: got character . | |
DEBUG:html5ever::tokenizer: got character 0 | |
DEBUG:html5ever::tokenizer: got character . | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character I | |
DEBUG:html5ever::tokenizer: got character f | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character c | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character p | |
DEBUG:html5ever::tokenizer: got character y | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character f | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character M | |
DEBUG:html5ever::tokenizer: got character P | |
DEBUG:html5ever::tokenizer: got character L | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character w | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character n | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character d | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character b | |
DEBUG:html5ever::tokenizer: got character u | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character d | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character w | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character f | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character , | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character Y | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character u | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character c | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character n | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character b | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character n | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character n | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character p | |
DEBUG:html5ever::tokenizer: got character : | |
DEBUG:html5ever::tokenizer: got character / | |
DEBUG:html5ever::tokenizer: got character / | |
DEBUG:html5ever::tokenizer: got character m | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character z | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character . | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character g | |
DEBUG:html5ever::tokenizer: got character / | |
DEBUG:html5ever::tokenizer: got character M | |
DEBUG:html5ever::tokenizer: got character P | |
DEBUG:html5ever::tokenizer: got character L | |
DEBUG:html5ever::tokenizer: got character / | |
DEBUG:html5ever::tokenizer: got character 2 | |
DEBUG:html5ever::tokenizer: got character . | |
DEBUG:html5ever::tokenizer: got character 0 | |
DEBUG:html5ever::tokenizer: got character / | |
DEBUG:html5ever::tokenizer: got character . | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character * | |
DEBUG:html5ever::tokenizer: got character / | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character - | |
DEBUG:html5ever::tokenizer: processing in state CommentEndDash | |
DEBUG:html5ever::tokenizer: got character - | |
DEBUG:html5ever::tokenizer: processing in state CommentEnd | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing CommentToken(Tendril<UTF8>(owned: \"\\n\\n This Source Code Form is subject to the terms of the Mozilla Public\\n License, v. 2.0. If a copy of the MPL was not distributed with this\\n file, You can obtain one at http://mozilla.org/MPL/2.0/. */\\n\\n \")) in insertion mode BeforeHead | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "\n\n\n "))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \"\\n\\n\\n \")) in insertion mode BeforeHead | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(Whitespace, Tendril<UTF8>(inline: \"\\n\\n\\n \")) in insertion mode BeforeHead | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state TagOpen | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: processing in state TagName | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character d | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing TagToken(Tag { kind: StartTag, name: Atom(\'head\' type=static), self_closing: false, attrs: [] }) in insertion mode BeforeHead | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "\n "))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \"\\n \")) in insertion mode InHead | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(Whitespace, Tendril<UTF8>(inline: \"\\n \")) in insertion mode InHead | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state TagOpen | |
DEBUG:html5ever::tokenizer: got character m | |
DEBUG:html5ever::tokenizer: processing in state TagName | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: processing in state BeforeAttributeName | |
DEBUG:html5ever::tokenizer: got character c | |
DEBUG:html5ever::tokenizer: processing in state AttributeName | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character = | |
DEBUG:html5ever::tokenizer: processing in state BeforeAttributeValue | |
DEBUG:html5ever::tokenizer: got character ' | |
DEBUG:html5ever::tokenizer: processing in state AttributeValue(SingleQuoted) | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "utf-8"))) | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('\'')) | |
DEBUG:html5ever::tokenizer: got character ' | |
DEBUG:html5ever::tokenizer: processing in state AfterAttributeValueQuoted | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing TagToken(Tag { kind: StartTag, name: Atom(\'meta\' type=static), self_closing: false, attrs: [Attribute { name: QualName { ns: Namespace(Atom(\'\' type=static)), local: Atom(\'charset\' type=static) }, value: Tendril<UTF8>(inline: \"utf-8\") }] }) in insertion mode InHead | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "\n "))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \"\\n \")) in insertion mode InHead | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(Whitespace, Tendril<UTF8>(inline: \"\\n \")) in insertion mode InHead | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state TagOpen | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: processing in state TagName | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing TagToken(Tag { kind: StartTag, name: Atom(\'title\' type=static), self_closing: false, attrs: [] }) in insertion mode InHead | |
DEBUG:html5ever::tokenizer: processing in state RawData(Rcdata) | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "firefox"))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \"firefox\")) in insertion mode Text | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state RawLessThanSign(Rcdata) | |
DEBUG:html5ever::tokenizer: got character / | |
DEBUG:html5ever::tokenizer: processing in state RawEndTagOpen(Rcdata) | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: processing in state RawEndTagName(Rcdata) | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing TagToken(Tag { kind: EndTag, name: Atom(\'title\' type=static), self_closing: false, attrs: [] }) in insertion mode Text | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "\n "))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \"\\n \")) in insertion mode InHead | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(Whitespace, Tendril<UTF8>(inline: \"\\n \")) in insertion mode InHead | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state TagOpen | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: processing in state TagName | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character n | |
DEBUG:html5ever::tokenizer: got character k | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: processing in state BeforeAttributeName | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: processing in state AttributeName | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character = | |
DEBUG:html5ever::tokenizer: processing in state BeforeAttributeValue | |
DEBUG:html5ever::tokenizer: got character ' | |
DEBUG:html5ever::tokenizer: processing in state AttributeValue(SingleQuoted) | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(shared: "stylesheet"))) | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('\'')) | |
DEBUG:html5ever::tokenizer: got character ' | |
DEBUG:html5ever::tokenizer: processing in state AfterAttributeValueQuoted | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: processing in state BeforeAttributeName | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: processing in state AttributeName | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character = | |
DEBUG:html5ever::tokenizer: processing in state BeforeAttributeValue | |
DEBUG:html5ever::tokenizer: got character ' | |
DEBUG:html5ever::tokenizer: processing in state AttributeValue(SingleQuoted) | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "default"))) | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('\'')) | |
DEBUG:html5ever::tokenizer: got character ' | |
DEBUG:html5ever::tokenizer: processing in state AfterAttributeValueQuoted | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: processing in state BeforeAttributeName | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: processing in state AttributeName | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character f | |
DEBUG:html5ever::tokenizer: got character = | |
DEBUG:html5ever::tokenizer: processing in state BeforeAttributeValue | |
DEBUG:html5ever::tokenizer: got character ' | |
DEBUG:html5ever::tokenizer: processing in state AttributeValue(SingleQuoted) | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(shared: "css/theme.css"))) | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('\'')) | |
DEBUG:html5ever::tokenizer: got character ' | |
DEBUG:html5ever::tokenizer: processing in state AfterAttributeValueQuoted | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing TagToken(Tag { kind: StartTag, name: Atom(\'link\' type=static), self_closing: false, attrs: [Attribute { name: QualName { ns: Namespace(Atom(\'\' type=static)), local: Atom(\'rel\' type=static) }, value: Tendril<UTF8>(owned: \"stylesheet\") }, Attribute { name: QualName { ns: Namespace(Atom(\'\' type=static)), local: Atom(\'title\' type=static) }, value: Tendril<UTF8>(inline: \"default\") }, Attribute { name: QualName { ns: Namespace(Atom(\'\' type=static)), local: Atom(\'href\' type=static) }, value: Tendril<UTF8>(owned: \"css/theme.css\") }] }) in insertion mode InHead | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "\n\n "))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \"\\n\\n \")) in insertion mode InHead | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(Whitespace, Tendril<UTF8>(inline: \"\\n\\n \")) in insertion mode InHead | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state TagOpen | |
DEBUG:html5ever::tokenizer: got character ! | |
DEBUG:html5ever::tokenizer: processing in state MarkupDeclarationOpen | |
DEBUG:html5ever::tokenizer: processing in state CommentStart | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: processing in state Comment | |
DEBUG:html5ever::tokenizer: got character M | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character d | |
DEBUG:net::resource_task: resource_task: loading url: http://localhost:6060/css/theme.css | |
DEBUG:html5ever::tokenizer: got character u | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character n | |
DEBUG:html5ever::tokenizer: got character d | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character n | |
DEBUG:html5ever::tokenizer: got character g | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character - | |
DEBUG:html5ever::tokenizer: processing in state CommentEndDash | |
DEBUG:html5ever::tokenizer: got character - | |
DEBUG:html5ever::tokenizer: processing in state CommentEnd | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing CommentToken(Tendril<UTF8>(owned: \" Module handling \")) in insertion mode InHead | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "\n "))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \"\\n \")) in insertion mode InHead | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(Whitespace, Tendril<UTF8>(inline: \"\\n \")) in insertion mode InHead | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state TagOpen | |
DEBUG:html5ever::tokenizer: got character ! | |
DEBUG:html5ever::tokenizer: processing in state MarkupDeclarationOpen | |
DEBUG:html5ever::tokenizer: processing in state CommentStart | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: processing in state Comment | |
DEBUG:html5ever::tokenizer: got character A | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character f | |
DEBUG:html5ever::tokenizer: got character u | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character J | |
DEBUG:html5ever::tokenizer: got character S | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character c | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character d | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character l | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character d | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character d | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character f | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character m | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character j | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character v | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: got character c | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character p | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: got character - | |
DEBUG:html5ever::tokenizer: processing in state CommentEndDash | |
DEBUG:html5ever::tokenizer: got character - | |
DEBUG:html5ever::tokenizer: processing in state CommentEnd | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing CommentToken(Tendril<UTF8>(owned: \" All further JS code is loaded from javascript \")) in insertion mode InHead | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "\n "))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \"\\n \")) in insertion mode InHead | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(Whitespace, Tendril<UTF8>(inline: \"\\n \")) in insertion mode InHead | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state TagOpen | |
DEBUG:html5ever::tokenizer: got character / | |
DEBUG:html5ever::tokenizer: processing in state EndTagOpen | |
DEBUG:html5ever::tokenizer: got character h | |
DEBUG:html5ever::tokenizer: processing in state TagName | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character d | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing TagToken(Tag { kind: EndTag, name: Atom(\'head\' type=static), self_closing: false, attrs: [] }) in insertion mode InHead | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "\n "))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \"\\n \")) in insertion mode AfterHead | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(Whitespace, Tendril<UTF8>(inline: \"\\n \")) in insertion mode AfterHead | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state TagOpen | |
DEBUG:html5ever::tokenizer: got character b | |
DEBUG:html5ever::tokenizer: processing in state TagName | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character d | |
DEBUG:html5ever::tokenizer: got character y | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: processing in state BeforeAttributeName | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: processing in state AttributeName | |
DEBUG:html5ever::tokenizer: got character a | |
DEBUG:html5ever::tokenizer: got character b | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character n | |
DEBUG:html5ever::tokenizer: got character d | |
DEBUG:html5ever::tokenizer: got character e | |
DEBUG:html5ever::tokenizer: got character x | |
DEBUG:html5ever::tokenizer: got character = | |
DEBUG:html5ever::tokenizer: processing in state BeforeAttributeValue | |
DEBUG:html5ever::tokenizer: got character " | |
DEBUG:html5ever::tokenizer: processing in state AttributeValue(DoubleQuoted) | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "-1"))) | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('\"')) | |
DEBUG:html5ever::tokenizer: got character " | |
DEBUG:html5ever::tokenizer: processing in state AfterAttributeValueQuoted | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing TagToken(Tag { kind: StartTag, name: Atom(\'body\' type=static), self_closing: false, attrs: [Attribute { name: QualName { ns: Namespace(Atom(\'\' type=static)), local: Atom(\'tabindex\' type=static) }, value: Tendril<UTF8>(inline: \"-1\") }] }) in insertion mode AfterHead | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:gfx::paint_task: PaintTask: beginning painting loop | |
DEBUG:compositing::constellation: constellation got head parsed message | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "\n "))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \"\\n \")) in insertion mode InBody | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state TagOpen | |
DEBUG:html5ever::tokenizer: got character / | |
DEBUG:html5ever::tokenizer: processing in state EndTagOpen | |
DEBUG:html5ever::tokenizer: got character b | |
DEBUG:html5ever::tokenizer: processing in state TagName | |
DEBUG:html5ever::tokenizer: got character o | |
DEBUG:html5ever::tokenizer: got character d | |
DEBUG:html5ever::tokenizer: got character y | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing TagToken(Tag { kind: EndTag, name: Atom(\'body\' type=static), self_closing: false, attrs: [] }) in insertion mode InBody | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: "\n "))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \"\\n \")) in insertion mode AfterBody | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(Whitespace, Tendril<UTF8>(inline: \"\\n \")) in insertion mode AfterBody | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(Whitespace, Tendril<UTF8>(inline: \"\\n \")) in insertion mode InBody | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state TagOpen | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: processing in state TagName | |
DEBUG:html5ever::tokenizer: got character c | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character p | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character | |
DEBUG:html5ever::tokenizer: processing in state BeforeAttributeName | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: processing in state AttributeName | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character c | |
DEBUG:html5ever::tokenizer: got character = | |
DEBUG:html5ever::tokenizer: processing in state BeforeAttributeValue | |
DEBUG:html5ever::tokenizer: got character " | |
DEBUG:html5ever::tokenizer: processing in state AttributeValue(DoubleQuoted) | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(shared: "./dist/browser/index.js"))) | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('\"')) | |
DEBUG:html5ever::tokenizer: got character " | |
DEBUG:html5ever::tokenizer: processing in state AfterAttributeValueQuoted | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing TagToken(Tag { kind: StartTag, name: Atom(\'script\' type=static), self_closing: false, attrs: [Attribute { name: QualName { ns: Namespace(Atom(\'\' type=static)), local: Atom(\'src\' type=static) }, value: Tendril<UTF8>(owned: \"./dist/browser/index.js\") }] }) in insertion mode AfterBody | |
INFO:net::http_loader: requesting http://localhost:6060/css/theme.css | |
DEBUG:script::parse::html: Parse error: Unexpected token | |
DEBUG:html5ever::tree_builder: processing TagToken(Tag { kind: StartTag, name: Atom(\'script\' type=static), self_closing: false, attrs: [Attribute { name: QualName { ns: Namespace(Atom(\'\' type=static)), local: Atom(\'src\' type=static) }, value: Tendril<UTF8>(owned: \"./dist/browser/index.js\") }] }) in insertion mode InBody | |
DEBUG:html5ever::tree_builder: processing TagToken(Tag { kind: StartTag, name: Atom(\'script\' type=static), self_closing: false, attrs: [Attribute { name: QualName { ns: Namespace(Atom(\'\' type=static)), local: Atom(\'src\' type=static) }, value: Tendril<UTF8>(owned: \"./dist/browser/index.js\") }] }) in insertion mode InHead | |
DEBUG:html5ever::tokenizer: processing in state RawData(ScriptData) | |
DEBUG:html5ever::tokenizer: got characters Some(FromSet('<')) | |
DEBUG:html5ever::tokenizer: got character < | |
DEBUG:html5ever::tokenizer: processing in state RawLessThanSign(ScriptData) | |
DEBUG:html5ever::tokenizer: got character / | |
DEBUG:html5ever::tokenizer: processing in state RawEndTagOpen(ScriptData) | |
DEBUG:html5ever::tokenizer: got character s | |
DEBUG:html5ever::tokenizer: processing in state RawEndTagName(ScriptData) | |
DEBUG:html5ever::tokenizer: got character c | |
DEBUG:html5ever::tokenizer: got character r | |
DEBUG:html5ever::tokenizer: got character i | |
DEBUG:html5ever::tokenizer: got character p | |
DEBUG:html5ever::tokenizer: got character t | |
DEBUG:html5ever::tokenizer: got character > | |
DEBUG:html5ever::tree_builder: processing TagToken(Tag { kind: EndTag, name: Atom(\'script\' type=static), self_closing: false, attrs: [] }) in insertion mode Text | |
WARN:html5ever::tree_builder::rules: FIXME: </script> not fully implemented | |
DEBUG:script::dom::htmlscriptelement: no script type | |
DEBUG:script::dom::htmlscriptelement: no script type or language, inferring js | |
DEBUG:html5ever::tokenizer: processing in state Quiescent | |
DEBUG:net::resource_task: resource_task: loading url: http://localhost:6060/dist/browser/index.js | |
INFO:net::http_loader: requesting http://localhost:6060/dist/browser/index.js | |
DEBUG:hyper::net: http scheme | |
INFO:net::cookie_storage: === COOKIES SENT: | |
INFO:net::http_loader: GET | |
INFO:net::http_loader: - Accept: text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8 | |
INFO:net::http_loader: - Host: localhost:6060 | |
INFO:net::http_loader: - Accept-Encoding: gzip, deflate | |
INFO:net::http_loader: - User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Servo/1.0 Firefox/37.0 | |
INFO:net::http_loader: None | |
DEBUG:hyper::http::h1: request line: Get "/css/theme.css" Http11 | |
DEBUG:hyper::http::h1: headers=Headers { Accept: text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8, Host: localhost:6060, Accept-Encoding: gzip, deflate, User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Servo/1.0 Firefox/37.0, } | |
DEBUG:hyper::net: http scheme | |
DEBUG:hyper::client::response: version=Http11, status=Ok | |
DEBUG:hyper::client::response: headers=Headers { server: ecstatic-0.8.0, Connection: keep-alive, etag: "3784379-1072-Sat Aug 08 2015 09:29:55 GMT+0200 (CEST)", cache-control: max-age=0, content-length: 1072, content-type: text/css; charset=UTF-8, Date: Wed, 12 Aug 2015 07:07:32 GMT, last-modified: Sat, 08 Aug 2015 07:29:55 GMT, } | |
INFO:net::http_loader: got HTTP response 200 OK, headers: | |
INFO:net::http_loader: - server: ecstatic-0.8.0 | |
INFO:net::http_loader: - Connection: keep-alive | |
INFO:net::http_loader: - etag: "3784379-1072-Sat Aug 08 2015 09:29:55 GMT+0200 (CEST)" | |
INFO:net::http_loader: - cache-control: max-age=0 | |
INFO:net::http_loader: - content-length: 1072 | |
INFO:net::http_loader: - content-type: text/css; charset=UTF-8 | |
INFO:net::http_loader: - Date: Wed, 12 Aug 2015 07:07:32 GMT | |
INFO:net::http_loader: - last-modified: Sat, 08 Aug 2015 07:29:55 GMT | |
DEBUG:mime: starting params, len=23 | |
DEBUG:mime: param_from_str, start=9 | |
DEBUG:mime: starting params, len=23 | |
DEBUG:mime: param_from_str, start=9 | |
INFO:style::parser: 4:3 Unsupported @font-face descriptor declaration: 'font-weight: normal;' | |
INFO:style::parser: 5:3 Unsupported @font-face descriptor declaration: 'font-style: normal;' | |
INFO:net::cookie_storage: === COOKIES SENT: | |
INFO:net::http_loader: GET | |
INFO:net::http_loader: - Accept: text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8 | |
INFO:net::http_loader: - Accept-Encoding: gzip, deflate | |
INFO:net::http_loader: - User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Servo/1.0 Firefox/37.0 | |
INFO:net::http_loader: - Host: localhost:6060 | |
INFO:net::http_loader: None | |
DEBUG:hyper::http::h1: request line: Get "/dist/browser/index.js" Http11 | |
DEBUG:hyper::http::h1: headers=Headers { Accept: text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8, Accept-Encoding: gzip, deflate, User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Servo/1.0 Firefox/37.0, Host: localhost:6060, } | |
INFO:style::parser: 29:3 Unsupported property declaration: '-moz-appearance: none;' | |
INFO:style::parser: 44:1 Invalid rule: '*::-moz-focus-inner {' | |
INFO:style::parser: 54:1 Invalid rule: '@keyframes progressBarLoading ' | |
DEBUG:hyper::client::response: version=Http11, status=Ok | |
DEBUG:hyper::client::response: headers=Headers { content-type: application/javascript; charset=utf-8, etag: "3270644-1142677-Wed Aug 12 2015 09:05:23 GMT+0200 (CEST)", Date: Wed, 12 Aug 2015 07:07:32 GMT, server: ecstatic-0.8.0, content-length: 1142677, cache-control: max-age=0, Connection: keep-alive, last-modified: Wed, 12 Aug 2015 07:05:23 GMT, } | |
INFO:net::http_loader: got HTTP response 200 OK, headers: | |
INFO:net::http_loader: - content-type: application/javascript; charset=utf-8 | |
INFO:net::http_loader: - etag: "3270644-1142677-Wed Aug 12 2015 09:05:23 GMT+0200 (CEST)" | |
INFO:net::http_loader: - Date: Wed, 12 Aug 2015 07:07:32 GMT | |
INFO:net::http_loader: - server: ecstatic-0.8.0 | |
INFO:net::http_loader: - content-length: 1142677 | |
INFO:net::http_loader: - cache-control: max-age=0 | |
INFO:net::http_loader: - Connection: keep-alive | |
INFO:net::http_loader: - last-modified: Wed, 12 Aug 2015 07:05:23 GMT | |
DEBUG:mime: starting params, len=37 | |
DEBUG:mime: param_from_str, start=23 | |
DEBUG:mime: starting params, len=37 | |
DEBUG:mime: param_from_str, start=23 | |
DEBUG:net::resource_task: resource_task: loading url: http://localhost:6060/css/fontawesome-webfont.woff | |
INFO:net::http_loader: requesting http://localhost:6060/css/fontawesome-webfont.woff | |
DEBUG:hyper::net: http scheme | |
INFO:net::cookie_storage: === COOKIES SENT: | |
INFO:net::http_loader: GET | |
INFO:net::http_loader: - Host: localhost:6060 | |
INFO:net::http_loader: - Accept: text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8 | |
INFO:net::http_loader: - User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Servo/1.0 Firefox/37.0 | |
INFO:net::http_loader: - Accept-Encoding: gzip, deflate | |
INFO:net::http_loader: None | |
DEBUG:hyper::http::h1: request line: Get "/css/fontawesome-webfont.woff" Http11 | |
DEBUG:hyper::http::h1: headers=Headers { Host: localhost:6060, Accept: text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8, User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Servo/1.0 Firefox/37.0, Accept-Encoding: gzip, deflate, } | |
DEBUG:hyper::client::response: version=Http11, status=Ok | |
DEBUG:hyper::client::response: headers=Headers { last-modified: Thu, 30 Jul 2015 14:14:44 GMT, server: ecstatic-0.8.0, Date: Wed, 12 Aug 2015 07:07:32 GMT, content-length: 65452, content-type: application/font-woff; charset=utf-8, cache-control: max-age=0, etag: "1612893-65452-Thu Jul 30 2015 16:14:44 GMT+0200 (CEST)", Connection: keep-alive, } | |
INFO:net::http_loader: got HTTP response 200 OK, headers: | |
INFO:net::http_loader: - last-modified: Thu, 30 Jul 2015 14:14:44 GMT | |
INFO:net::http_loader: - server: ecstatic-0.8.0 | |
INFO:net::http_loader: - Date: Wed, 12 Aug 2015 07:07:32 GMT | |
INFO:net::http_loader: - content-length: 65452 | |
INFO:net::http_loader: - content-type: application/font-woff; charset=utf-8 | |
INFO:net::http_loader: - cache-control: max-age=0 | |
INFO:net::http_loader: - etag: "1612893-65452-Thu Jul 30 2015 16:14:44 GMT+0200 (CEST)" | |
INFO:net::http_loader: - Connection: keep-alive | |
DEBUG:mime: starting params, len=36 | |
DEBUG:mime: param_from_str, start=22 | |
DEBUG:mime: starting params, len=36 | |
DEBUG:mime: param_from_str, start=22 | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: not a dom object | |
DEBUG:script::dom::bindings::conversions: found wrapper | |
DEBUG:script::dom::bindings::conversions: unwrapped successfully | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: not a dom object | |
DEBUG:script::dom::bindings::conversions: found wrapper | |
DEBUG:script::dom::bindings::conversions: unwrapped successfully | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: not a dom object | |
DEBUG:script::dom::bindings::conversions: found wrapper | |
DEBUG:script::dom::bindings::conversions: unwrapped successfully | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters Some(NotFromSet(Tendril<UTF8>(inline: " "))) | |
DEBUG:html5ever::tree_builder: processing CharacterTokens(NotSplit, Tendril<UTF8>(inline: \" \")) in insertion mode InBody | |
DEBUG:html5ever::tokenizer: got characters None | |
DEBUG:html5ever::tokenizer: processing in state Data | |
DEBUG:html5ever::tokenizer: got characters None | |
DEBUG:html5ever::tokenizer: processing EOF in state Data | |
DEBUG:html5ever::tree_builder: processing EOFToken in insertion mode InBody | |
WARN:html5ever::tree_builder::actions: stop_parsing not implemented, full speed ahead! | |
DEBUG:script::dom::servohtmlparser: finished parsing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: not a dom object | |
DEBUG:script::dom::bindings::conversions: found wrapper | |
DEBUG:script::dom::bindings::conversions: unwrapped successfully | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: not a dom object | |
DEBUG:script::dom::bindings::conversions: found wrapper | |
DEBUG:script::dom::bindings::conversions: unwrapped successfully | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: not a dom object | |
DEBUG:script::dom::bindings::conversions: found wrapper | |
DEBUG:script::dom::bindings::conversions: unwrapped successfully | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
ERROR:js::rust: Error at http://localhost:6060/dist/browser/index.js:151: mutating the [[Prototype]] of an object will cause your code to run very slowly; instead create the object with the correct initial [[Prototype]] value using Object.create | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: not a dom object | |
DEBUG:script::dom::bindings::conversions: found wrapper | |
DEBUG:script::dom::bindings::conversions: unwrapped successfully | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: not a dom object | |
DEBUG:script::dom::bindings::conversions: found wrapper | |
DEBUG:script::dom::bindings::conversions: unwrapped successfully | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: not a dom object | |
DEBUG:script::dom::bindings::conversions: found wrapper | |
DEBUG:script::dom::bindings::conversions: unwrapped successfully | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: proxy dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:net::resource_task: resource_task: loading url: http://localhost:6060/dist/service/history-worker.js | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
Compatible session was not found, loading default | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::utils: outerizing | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:compositing::compositor: compositor: compositing | |
DEBUG:script::dom::bindings::conversions: not a dom object | |
DEBUG:script::dom::bindings::conversions: found wrapper | |
DEBUG:script::dom::bindings::conversions: unwrapped successfully | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
thread 'LayoutTask PipelineId(0)' panicked at 'called `Option::unwrap()` on a `None` value', src/libcore/option.rs:363 | |
stack backtrace: | |
INFO:net::http_loader: requesting http://localhost:6060/dist/service/history-worker.js | |
1: 0x110b803b5 - sys::backtrace::write::h2513b694e23623efOvs | |
2: 0x110b839f0 - panicking::on_panic::h399650fb5f386215wWw | |
3: 0x110b707f2 - rt::unwind::begin_unwind_inner::h208861166cac0468HEw | |
4: 0x110b70e6c - rt::unwind::begin_unwind_fmt::h2b843ddd6c3f7d25NDw | |
5: 0x110b833bc - rust_begin_unwind | |
6: 0x110ba97b5 - panicking::panic_fmt::h18e94a0fe8bc1992MhC | |
7: 0x110ba61d1 - panicking::panic::h5714745e929d6902jgC | |
DEBUG:hyper::net: http scheme | |
8: 0x10eb95e78 - option::Option<T>::unwrap::h15539155344857347110 | |
9: 0x10e8c6d65 - layout_task::LayoutTask::tick_animations::hd6d29c53ffac59dezWn | |
10: 0x10e8c6c38 - animation::tick_all_animations::h5c0db993bf4b75cbVra | |
INFO:net::cookie_storage: === COOKIES SENT: | |
INFO:net::http_loader: GET | |
INFO:net::http_loader: - Host: localhost:6060 | |
INFO:net::http_loader: - Accept-Encoding: gzip, deflate | |
INFO:net::http_loader: - Accept: text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8 | |
INFO:net::http_loader: - User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Servo/1.0 Firefox/37.0 | |
INFO:net::http_loader: None | |
DEBUG:hyper::http::h1: request line: Get "/dist/service/history-worker.js" Http11 | |
DEBUG:hyper::http::h1: headers=Headers { Host: localhost:6060, Accept-Encoding: gzip, deflate, Accept: text/html, application/xhtml+xml, application/xml; q=0.9, */*; q=0.8, User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Servo/1.0 Firefox/37.0, } | |
11: 0x10eaf3adc - layout_task::LayoutTask::tick_all_animations::h97e32e687a8ed2ee9Vn | |
DEBUG:hyper::client::response: version=Http11, status=Ok | |
DEBUG:hyper::client::response: headers=Headers { server: ecstatic-0.8.0, etag: "3270643-660389-Wed Aug 12 2015 09:01:25 GMT+0200 (CEST)", cache-control: max-age=0, Connection: keep-alive, Date: Wed, 12 Aug 2015 07:07:33 GMT, last-modified: Wed, 12 Aug 2015 07:01:25 GMT, content-length: 660389, content-type: application/javascript; charset=utf-8, } | |
INFO:net::http_loader: got HTTP response 200 OK, headers: | |
INFO:net::http_loader: - server: ecstatic-0.8.0 | |
INFO:net::http_loader: - etag: "3270643-660389-Wed Aug 12 2015 09:01:25 GMT+0200 (CEST)" | |
INFO:net::http_loader: - cache-control: max-age=0 | |
INFO:net::http_loader: - Connection: keep-alive | |
INFO:net::http_loader: - Date: Wed, 12 Aug 2015 07:07:33 GMT | |
INFO:net::http_loader: - last-modified: Wed, 12 Aug 2015 07:01:25 GMT | |
INFO:net::http_loader: - content-length: 660389 | |
INFO:net::http_loader: - content-type: application/javascript; charset=utf-8 | |
DEBUG:mime: starting params, len=37 | |
DEBUG:mime: param_from_str, start=23 | |
DEBUG:mime: starting params, len=37 | |
DEBUG:mime: param_from_str, start=23 | |
12: 0x10eae43e3 - layout_task::LayoutTask::handle_request_helper::h0a1afd0c77da8416b6m | |
13: 0x10eac9dd7 - layout_task::LayoutTask::handle_request::h178c5b1b15432a33d1m | |
14: 0x10ea941ee - layout_task::LayoutTask::start::h9d0586e60c045e24AZm | |
15: 0x10ea94094 - layout_task::LayoutTask.LayoutTaskFactory::create::closure.42939 | |
16: 0x10ea8f080 - mem::ProfilerChan::run_with_memory_reporting::h7200289128505654286 | |
17: 0x10ea4f36b - layout_task::LayoutTask.LayoutTaskFactory::create::closure.41435 | |
18: 0x10ea4e924 - task::spawn_named_with_send_on_failure::closure.41423 | |
19: 0x10ea4e844 - boxed::F.FnBox<A>::call_box::h1796628171960678197 | |
20: 0x10ea4e260 - boxed::Box<FnBox<A, Output $u3d$$u20$R$GT$$u2b$$u20$Send$u20$$u2b$$u20$$u27$a$GT$.FnOnce$LT$A$GT$::call_once::h7183705416328846374 | |
21: 0x10ea4dcd2 - thread::Builder::spawn_inner::closure.41393 | |
22: 0x10ea4dc4e - rt::unwind::try::try_fn::h3404193965539902703 | |
23: 0x110b832ef - __rust_try_inner | |
24: 0x110b8332a - __rust_try | |
25: 0x110b7e925 - rt::unwind::try::inner_try::h300fa716ddfd682bAAw | |
26: 0x10ea4db98 - rt::unwind::try::h5234693488087733466 | |
27: 0x10ea4d9dc - thread::Builder::spawn_inner::closure.41345 | |
28: 0x10ea4e57d - boxed::F.FnBox<A>::call_box::h10097952976813094063 | |
29: 0x110b8233d - sys::thread::Thread::new::thread_start::he9d2ef50096740e9bZv | |
30: 0x7fff9c42fcb2 - _pthread_body | |
31: 0x7fff9c42fc2f - _pthread_start | |
thread 'PaintTask PipelineId(0)' panicked at 'called `Result::unwrap()` on an `Err` value: RecvError', src/libcore/result.rs:732 | |
stack backtrace: | |
DEBUG:util::task: LayoutTask PipelineId(0) failed, notifying constellation | |
DEBUG:compositing::constellation: handling failure message from pipeline PipelineId(0), None | |
DEBUG:compositing::constellation: creating replacement pipeline for about:failure | |
thread 'Constellation' panicked at 'unable to find pipeline - this is a bug', src/libcore/option.rs:331 | |
1: 0x110b803b5 - sys::backtrace::write::h2513b694e23623efOvs | |
2: 0x110b839f0 - panicking::on_panic::h399650fb5f386215wWw | |
3: 0x110b707f2 - rt::unwind::begin_unwind_inner::h208861166cac0468HEw | |
4: 0x110b70e6c - rt::unwind::begin_unwind_fmt::h2b843ddd6c3f7d25NDw | |
5: 0x110b833bc - rust_begin_unwind | |
6: 0x110ba97b5 - panicking::panic_fmt::h18e94a0fe8bc1992MhC | |
7: 0x10f1edb85 - result::Result<T, E>::unwrap::h6722942393829786063 | |
8: 0x10f1dee3f - paint_task::PaintTask<C>::start::h12334902498064252648 | |
9: 0x10f1de9a1 - paint_task::PaintTask<C>::create::closure.19406 | |
10: 0x10f1dba14 - mem::ProfilerChan::run_with_memory_reporting::h15322234012243716839 | |
11: 0x10f1c9bc6 - paint_task::PaintTask<C>::create::closure.18934 | |
12: 0x10f1c90b4 - task::spawn_named_with_send_on_failure::closure.18922 | |
13: 0x10f1c8fd4 - boxed::F.FnBox<A>::call_box::h1954714912166796199 | |
DEBUG:script::dom::window: script: performing reflow for goal ForDisplay reason RefreshTick | |
thread 'ScriptTask PipelineId(0)' panicked at 'called `Result::unwrap()` on an `Err` value: "SendError(..)"', src/libcore/result.rs:732 | |
14: 0x10f186480 - boxed::Box<FnBox<A, Output $u3d$$u20$R$GT$$u2b$$u20$Send$u20$$u2b$$u20$$u27$a$GT$.FnOnce$LT$A$GT$::call_once::h11045847495309022178 | |
15: 0x10f185ef2 - thread::Builder::spawn_inner::closure.17494 | |
16: 0x10f185e6e - rt::unwind::try::try_fn::h6449145250205508641 | |
17: 0x110b832ef - __rust_try_inner | |
18: 0x110b8332a - __rust_try | |
19: 0x110b7e925 - rt::unwind::try::inner_try::h300fa716ddfd682bAAw | |
20: 0x10f185db8 - rt::unwind::try::h12102202677097795876 | |
21: 0x10f185bfc - thread::Builder::spawn_inner::closure.17446 | |
22: 0x10f18679d - boxed::F.FnBox<A>::call_box::h114972367273553992 | |
23: 0x110b8233d - sys::thread::Thread::new::thread_start::he9d2ef50096740e9bZv | |
24: 0x7fff9c42fcb2 - _pthread_body | |
25: 0x7fff9c42fc2f - _pthread_start | |
stack backtrace: | |
thread 'PaintWorker' panicked at 'called `Result::unwrap()` on an `Err` value: RecvError', src/libcore/result.rs:732 | |
thread 'PaintWorker' panicked at 'called `Result::unwrap()` on an `Err` value: RecvError', src/libcore/result.rs:732 | |
thread 'PaintWorker' panicked at 'called `Result::unwrap()` on an `Err` value: RecvError', src/libcore/result.rs:732 | |
DEBUG:util::task: PaintTask PipelineId(0) failed, notifying constellation | |
1: 0x110b803b5 - sys::backtrace::write::h2513b694e23623efOvs | |
2: 0x110b839f0 - panicking::on_panic::h399650fb5f386215wWw | |
3: 0x110b707f2 - rt::unwind::begin_unwind_inner::h208861166cac0468HEw | |
4: 0x110b70e6c - rt::unwind::begin_unwind_fmt::h2b843ddd6c3f7d25NDw | |
5: 0x110b833bc - rust_begin_unwind | |
6: 0x110ba97b5 - panicking::panic_fmt::h18e94a0fe8bc1992MhC | |
7: 0x10e720dcf - option::Option<T>::expect::h5558185363707098320 | |
8: 0x10e71f509 - constellation::Constellation<LTF, STF>::handle_failure_msg::h4368409786590170396 | |
9: 0x10e6fd147 - constellation::Constellation<LTF, STF>::handle_request::h15811129127919044340 | |
10: 0x10e6f7958 - constellation::Constellation<LTF, STF>::run::h3324120043767604433 | |
11: 0x10e6c6802 - constellation::Constellation<LTF, STF>::start::closure.7105 | |
12: 0x10e6c5744 - task::spawn_named::closure.7097 | |
13: 0x10e6c566c - boxed::F.FnBox<A>::call_box::h15434627635279510253 | |
14: 0x10e6c50e0 - boxed::Box<FnBox<A, Output $u3d$$u20$R$GT$$u2b$$u20$Send$u20$$u2b$$u20$$u27$a$GT$.FnOnce$LT$A$GT$::call_once::h8478236843435946792 | |
15: 0x10e6c4b52 - thread::Builder::spawn_inner::closure.7066 | |
16: 0x10e6c4ace - rt::unwind::try::try_fn::h9314142595301289953 | |
17: 0x110b832ef - __rust_try_inner | |
18: 0x110b8332a - __rust_try | |
19: 0x110b7e925 - rt::unwind::try::inner_try::h300fa716ddfd682bAAw | |
20: 0x10e6c4a18 - rt::unwind::try::h5638903880831745177 | |
21: 0x10e6c485c - thread::Builder::spawn_inner::closure.7015 | |
22: 0x10e6c53fd - boxed::F.FnBox<A>::call_box::h11970205697062275101 | |
23: 0x110b8233d - sys::thread::Thread::new::thread_start::he9d2ef50096740e9bZv | |
24: 0x7fff9c42fcb2 - _pthread_body | |
25: 0x7fff9c42fc2f - _pthread_start | |
stack backtrace: | |
1: 0x110b803b5 - sys::backtrace::write::h2513b694e23623efOvs | |
2: 0x110b839f0 - panicking::on_panic::h399650fb5f386215wWw | |
3: 0x110b707f2 - rt::unwind::begin_unwind_inner::h208861166cac0468HEw | |
4: 0x110b70e6c - rt::unwind::begin_unwind_fmt::h2b843ddd6c3f7d25NDw | |
5: 0x110b833bc - rust_begin_unwind | |
6: 0x110ba97b5 - panicking::panic_fmt::h18e94a0fe8bc1992MhC | |
7: 0x10f8a05b7 - result::Result<T, E>::unwrap::h11972107952833270037 | |
8: 0x10fb1ee31 - dom::window::_&'a Window.WindowHelpers::force_reflow::h9093854e26e4e6e4LO2 | |
9: 0x10f8a4f0d - dom::window::_&'a Window.WindowHelpers::reflow::h87f9acbfe3a75acbAU2 | |
10: 0x10f8a4761 - dom::document::_&'a Document.DocumentHelpers<'a>::reflow_if_reflow_timer_expired::h38cf9ee55e3b389cgCA | |
11: 0x10fa2e146 - dom::servohtmlparser::_&'a ServoHTMLParser.PrivateServoHTMLParserHelpers::parse_sync::h5a5819c9f0204187AcY | |
12: 0x10f9d82a3 - dom::servohtmlparser::_&'a ServoHTMLParser.ServoHTMLParserHelpers::resume::h952363359fa18223cfY | |
13: 0x10f9d5d4b - dom::htmlscriptelement::ScriptContext.AsyncResponseListener::response_complete::h27905f22360a64eea9O | |
14: 0x10fc9491e - ResponseAction::process::h200efa5f64a31897vDg | |
15: 0x10f9e088a - network_listener::ListenerRunnable<T>.Runnable::handler::h13315912262574552106 | |
16: 0x10fb9719e - script_task::ScriptTask::handle_msg_from_script::hd27475581dc1b8d05e8 | |
17: 0x10fba06ce - script_task::ScriptTask::handle_msgs::he96f769816dd373cw27 | |
18: 0x10fb938c5 - script_task::ScriptTask::start::hb98cb94ad56093d6m27 | |
19: 0x10fb93871 - script_task::ScriptTask.ScriptTaskFactory::create::closure.136887 | |
20: 0x10fb9352d - mem::ProfilerChan::run_with_memory_reporting::h5653983412344488514 | |
21: 0x10fb875f9 - script_task::ScriptTask.ScriptTaskFactory::create::closure.136630 | |
22: 0x10fb86754 - task::spawn_named_with_send_on_failure::closure.136623 | |
23: 0x10fb86674 - boxed::F.FnBox<A>::call_box::h11884923646255992673 | |
24: 0x10f340d00 - boxed::Box<FnBox<A, Output $u3d$$u20$R$GT$$u2b$$u20$Send$u20$$u2b$$u20$$u27$a$GT$.FnOnce$LT$A$GT$::call_once::h13368981422667932439 | |
25: 0x10f340772 - thread::Builder::spawn_inner::closure.101542 | |
26: 0x10f3406ee - rt::unwind::try::try_fn::h6677908613655694456 | |
27: 0x110b832ef - __rust_try_inner | |
28: 0x110b8332a - __rust_try | |
29: 0x110b7e925 - rt::unwind::try::inner_try::h300fa716ddfd682bAAw | |
30: 0x10f340638 - rt::unwind::try::h6991053548557018072 | |
31: 0x10f34047c - thread::Builder::spawn_inner::closure.101491 | |
32: 0x10f34108d - boxed::F.FnBox<A>::call_box::h3349485803116102935 | |
33: 0x110b8233d - sys::thread::Thread::new::thread_start::he9d2ef50096740e9bZv | |
34: 0x7fff9c42fcb2 - _pthread_body | |
35: 0x7fff9c42fc2f - _pthread_start | |
stack backtrace: | |
1: 0x110b803b5 - sys::backtrace::write::h2513b694e23623efOvs | |
DEBUG:script::dom::bindings::trace: tracing reflector LIVE_REFERENCES | |
DEBUG:script::dom::bindings::trace: tracing reflector LIVE_REFERENCES | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG :2script::dom::bindings::trace: : tracing prototype | |
DEBUG0x:110b839f0script::dom::bindings::trace - : tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::tracepanicking::on_panic::h399650fb5f386215wWw | |
: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
3: 0x110b707f2 - rt::unwind::begin_unwind_inner::h208861166cac0468HEw | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
4DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG: : script::dom::bindings::trace: tracing reflector | |
0x110b70e6c - rt::unwind::begin_unwind_fmtDEBUG::h2b843ddd6c3f7d25NDw | |
:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
5: 0x110b833bc - rust_begin_unwind | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
6: 0x110ba97b5 - panicking::panic_fmt::h18e94a0fe8bc1992MhC | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
7: 0x10f2838f0 - result::Result<T, E>::unwrap::h6751806460445038813 | |
DEBUG:script::dom::bindings::codegen::Bindings::DocumentBinding: Document finalize: 0x11d05aa80 | |
DEBUG:script::dom::bindings::codegen::Bindings::EventBinding: Event finalize: 0x11d04f120 | |
DEBUG:script::dom::bindings::codegen::Bindings::EventBinding: Event finalize: 0x11544e000 | |
DEBUG:script::dom::bindings::codegen::Bindings::HTMLDivElementBinding: HTMLDivElement finalize: 0x11a453d00 | |
DEBUG:script::dom::bindings::codegen::Bindings::DocumentFragmentBinding: DocumentFragment finalize: 0x11a438180 | |
DEBUG:script::dom::bindings::codegen::Bindings::HTMLHtmlElementBinding: HTMLHtmlElement finalize: 0x11a453bc0 | |
DEBUG:script::dom::bindings::codegen::Bindings::ServoHTMLParserBinding: ServoHTMLParser finalize: 0x11d05ae00 | |
DEBUG:script::dom::bindings::codegen::Bindings::TextBinding: Text finalize: 0x11a43c0e0 | |
DEBUG:script::dom::bindings::codegen::Bindings::HTMLScriptElementBinding: HTMLScriptElement finalize: 0x11d071180 | |
DEBUG:script::dom::bindings::codegen::Bindings::EventBinding: Event finalize: 0x11d04f4e0 | |
DEBUG:script::dom::bindings::codegen::Bindings::EventBinding: Event finalize: 0x11d04f540 | |
8: 0x10f271386 - paint_task::WorkerThread::main::h2764d5af326c986eRsi | |
9: 0x10f270f0f - paint_task::WorkerThreadProxy::spawn::closure.24737 | |
DEBUG:script::dom::bindings::trace: tracing reflector LIVE_REFERENCES | |
DEBUG:script::dom::bindings::trace: tracing reflector LIVE_REFERENCES | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG10:: script::dom::bindings::trace : tracing prototype | |
0x10f270cbb - task::spawn_named::DEBUG:script::dom::bindings::trace: tracing prototype | |
closure.24729 | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
DEBUG:script::dom::bindings::trace: tracing prototype | |
11: 0x10f270bda - boxed::F.FnBox<A>::call_box::h5688413468807999399 | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
12: 0x10f270640 - boxed::Box<FnBox<A, Output $u3d$$u20$R$GT$$u2b$$u20$Send$u20$$u2b$$u20$$u27$a$GT$.FnOnce$LT$A$GT$::call_once::h13544975863314465512 | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
13: 0x10f2700b2 - thread::Builder::spawn_inner::closure.24702 | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
DEBUG:script::dom::bindings::trace: tracing object | |
DEBUG:script::dom::bindings::trace: tracing reflector | |
14: 0x10f27002e - rt::unwind::try::try_fn::h8462857954057302017 | |
15: 0x110b832ef - __rust_try_inner | |
DEBUG:util::task: ScriptTask PipelineId(0) failed, notifying constellation | |
thread 'ScriptTask PipelineId(0)Watcher' panicked at 'called `Result::unwrap()` on an `Err` value: "SendError(..)"', src/libcore/result.rs:732 | |
16: 0x110b8332a - __rust_try | |
17: 0x110b7e925 - rt::unwind::try::inner_try::h300fa716ddfd682bAAw | |
18: 0x10f26ff78 - rt::unwind::try::h7062861114634023821 | |
19: 0x10f26fdbc - thread::Builder::spawn_inner::closure.24653 | |
20: 0x10f27095d - boxed::F.FnBox<A>::call_box::h882486209979280460 | |
21: 0x110b8233d - sys::thread::Thread::new::thread_start::he9d2ef50096740e9bZv | |
22: 0x7fff9c42fcb2 - _pthread_body | |
23: 0x7fff9c42fc2f - _pthread_start | |
stack backtrace: | |
1: 0x110b803b5 - sys::backtrace::write::h2513b694e23623efOvs | |
2: 0x110b839f0 - panicking::on_panic::h399650fb5f386215wWw | |
3: 0x110b707f2 - rt::unwind::begin_unwind_inner::h208861166cac0468HEw | |
4: 0x110b70e6c - rt::unwind::begin_unwind_fmt::h2b843ddd6c3f7d25NDw | |
5: 0x110b833bc - rust_begin_unwind | |
6: 0x110ba97b5 - panicking::panic_fmt::h18e94a0fe8bc1992MhC | |
7: 0x10f2838f0 - result::Result<T, E>::unwrap::h6751806460445038813 | |
8: 0x10f271386 - paint_task::WorkerThread::main::h2764d5af326c986eRsi | |
9: 0x10f270f0f - paint_task::WorkerThreadProxy::spawn::closure.24737 | |
10: 0x10f270cbb - task::spawn_named::closure.24729 | |
11: 0x10f270bda - boxed::F.FnBox<A>::call_box::h5688413468807999399 | |
12: 0x10f270640 - boxed::Box<FnBox<A, Output $u3d$$u20$R$GT$$u2b$$u20$Send$u20$$u2b$$u20$$u27$a$GT$.FnOnce$LT$A$GT$::call_once::h13544975863314465512 | |
13: 0x10f2700b2 - thread::Builder::spawn_inner::closure.24702 | |
14: 0x10f27002e - rt::unwind::try::try_fn::h8462857954057302017 | |
15: 0x110b832ef - __rust_try_inner | |
16: 0x110b8332a - __rust_try | |
17: 0x110b7e925 - rt::unwind::try::inner_try::h300fa716ddfd682bAAw | |
18: 0x10f26ff78 - rt::unwind::try::h7062861114634023821 | |
19: 0x10f26fdbc - thread::Builder::spawn_inner::closure.24653 | |
20: 0x10f27095d - boxed::F.FnBox<A>::call_box::h882486209979280460 | |
21: 0x110b8233d - sys::thread::Thread::new::thread_start::he9d2ef50096740e9bZv | |
22: 0x7fff9c42fcb2 - _pthread_body | |
23: 0x7fff9c42fc2f - _pthread_start | |
stack backtrace: | |
1: 0x110b803b5 - sys::backtrace::write::h2513b694e23623efOvs | |
2: 0x110b839f0 - panicking::on_panic::h399650fb5f386215wWw | |
3: 0x110b707f2 - rt::unwind::begin_unwind_inner::h208861166cac0468HEw | |
4: 0x110b70e6c - rt::unwind::begin_unwind_fmt::h2b843ddd6c3f7d25NDw | |
5: 0x110b833bc - rust_begin_unwind | |
6: 0x110ba97b5 - panicking::panic_fmt::h18e94a0fe8bc1992MhC | |
7: 0x10f2838f0 - result::Result<T, E>::unwrap::h6751806460445038813 | |
8: 0x10f271386 - paint_task::WorkerThread::main::h2764d5af326c986eRsi | |
9: 0x10f270f0f - paint_task::WorkerThreadProxy::spawn::closure.24737 | |
10: 0x10f270cbb - task::spawn_named::closure.24729 | |
11: 0x10f270bda - boxed::F.FnBox<A>::call_box::h5688413468807999399 | |
12: 0x10f270640 - boxed::Box<FnBox<A, Output $u3d$$u20$R$GT$$u2b$$u20$Send$u20$$u2b$$u20$$u27$a$GT$.FnOnce$LT$A$GT$::call_once::h13544975863314465512 | |
13: 0x10f2700b2 - thread::Builder::spawn_inner::closure.24702 | |
14: 0x10f27002e - rt::unwind::try::try_fn::h8462857954057302017 | |
15: 0x110b832ef - __rust_try_inner | |
16: 0x110b8332a - __rust_try | |
17: 0x110b7e925 - rt::unwind::try::inner_try::h300fa716ddfd682bAAw | |
18: 0x10f26ff78 - rt::unwind::try::h7062861114634023821 | |
19: 0x10f26fdbc - thread::Builder::spawn_inner::closure.24653 | |
20: 0x10f27095d - boxed::F.FnBox<A>::call_box::h882486209979280460 | |
21: 0x110b8233d - sys::thread::Thread::new::thread_start::he9d2ef50096740e9bZv | |
22: 0x7fff9c42fcb2 - _pthread_body | |
23: 0x7fff9c42fc2f - _pthread_start | |
stack backtrace: | |
thread 'FontCacheTask' panicked at 'called `Result::unwrap()` on an `Err` value: RecvError', src/libcore/result.rs:732 | |
1: 0x110b803b5 - sys::backtrace::write::h2513b694e23623efOvs | |
2: 0x110b839f0 - panicking::on_panic::h399650fb5f386215wWw | |
3: 0x110b707f2 - rt::unwind::begin_unwind_inner::h208861166cac0468HEw | |
4: 0x110b70e6c - rt::unwind::begin_unwind_fmt::h2b843ddd6c3f7d25NDw | |
5: 0x110b833bc - rust_begin_unwind | |
6: 0x110ba97b5 - panicking::panic_fmt::h18e94a0fe8bc1992MhC | |
7: 0x10f7b15a7 - result::Result<T, E>::unwrap::h16998102045537701957 | |
8: 0x10fb8821d - task::spawn_named_with_send_on_failure::closure.136635 | |
9: 0x10fb87cc4 - boxed::F.FnBox<A>::call_box::h15111524700936030927 | |
10: 0x10f340d00 - boxed::Box<FnBox<A, Output $u3d$$u20$R$GT$$u2b$$u20$Send$u20$$u2b$$u20$$u27$a$GT$.FnOnce$LT$A$GT$::call_once::h13368981422667932439 | |
11: 0x10f340772 - thread::Builder::spawn_inner::closure.101542 | |
12: 0x10f3406ee - rt::unwind::try::try_fn::h6677908613655694456 | |
13: 0x110b832ef - __rust_try_inner | |
14: 0x110b8332a - __rust_try | |
15: 0x110b7e925 - rt::unwind::try::inner_try::h300fa716ddfd682bAAw | |
16: 0x10f340638 - rt::unwind::try::h6991053548557018072 | |
17: 0x10f34047c - thread::Builder::spawn_inner::closure.101491 | |
18: 0x10f34108d - boxed::F.FnBox<A>::call_box::h3349485803116102935 | |
19: 0x110b8233d - sys::thread::Thread::new::thread_start::he9d2ef50096740e9bZv | |
20: 0x7fff9c42fcb2 - _pthread_body | |
21: 0x7fff9c42fc2f - _pthread_start | |
stack backtrace: | |
1: 0x110b803b5 - sys::backtrace::write::h2513b694e23623efOvs | |
2: 0x110b839f0 - panicking::on_panic::h399650fb5f386215wWw | |
3: 0x110b707f2 - rt::unwind::begin_unwind_inner::h208861166cac0468HEw | |
4: 0x110b70e6c - rt::unwind::begin_unwind_fmt::h2b843ddd6c3f7d25NDw | |
5: 0x110b833bc - rust_begin_unwind | |
6: 0x110ba97b5 - panicking::panic_fmt::h18e94a0fe8bc1992MhC | |
7: 0x10f2bb930 - result::Result<T, E>::unwrap::h9065109596398178978 | |
8: 0x10f2ba67f - font_cache_task::FontCache::run::haaf9190563a7e9c8v8j | |
9: 0x10f2def79 - font_cache_task::FontCacheTask::new::closure.26987 | |
10: 0x10f2deb7b - task::spawn_named::closure.26980 | |
11: 0x10f2deac1 - boxed::F.FnBox<A>::call_box::h4105606164421811349 | |
12: 0x10f270640 - boxed::Box<FnBox<A, Output $u3d$$u20$R$GT$$u2b$$u20$Send$u20$$u2b$$u20$$u27$a$GT$.FnOnce$LT$A$GT$::call_once::h13544975863314465512 | |
13: 0x10f2700b2 - thread::Builder::spawn_inner::closure.24702 | |
14: 0x10f27002e - rt::unwind::try::try_fn::h8462857954057302017 | |
15: 0x110b832ef - __rust_try_inner | |
16: 0x110b8332a - __rust_try | |
17: 0x110b7e925 - rt::unwind::try::inner_try::h300fa716ddfd682bAAw | |
18: 0x10f26ff78 - rt::unwind::try::h7062861114634023821 | |
19: 0x10f26fdbc - thread::Builder::spawn_inner::closure.24653 | |
20: 0x10f27095d - boxed::F.FnBox<A>::call_box::h882486209979280460 | |
21: 0x110b8233d - sys::thread::Thread::new::thread_start::he9d2ef50096740e9bZv | |
22: 0x7fff9c42fcb2 - _pthread_body | |
23: 0x7fff9c42fc2f - _pthread_start | |
DEBUG:js::rust: Evaluating script from http://localhost:6060/dist/service/history-worker.js with content (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ | |
module.exports = { "default": require("core-js/library/fn/array/from"), __esModule: true }; | |
},{"core-js/library/fn/array/from":12}],2:[function(require,module,exports){ | |
module.exports = { "default": require("core-js/library/fn/object/assign"), __esModule: true }; | |
},{"core-js/library/fn/object/assign":13}],3:[function(require,module,exports){ | |
module.exports = { "default": require("core-js/library/fn/object/create"), __esModule: true }; | |
},{"core-js/library/fn/object/create":14}],4:[function(require,module,exports){ | |
module.exports = { "default": require("core-js/library/fn/object/define-property"), __esModule: true }; | |
},{"core-js/library/fn/object/define-property":15}],5:[function(require,module,exports){ | |
module.exports = { "default": require("core-js/library/fn/promise"), __esModule: true }; | |
},{"core-js/library/fn/promise":16}],6:[function(require,module,exports){ | |
module.exports = { "default": require("core-js/library/fn/symbol"), __esModule: true }; | |
},{"core-js/library/fn/symbol":17}],7:[function(require,module,exports){ | |
module.exports = { "default": require("core-js/library/fn/symbol/iterator"), __esModule: true }; | |
},{"core-js/library/fn/symbol/iterator":18}],8:[function(require,module,exports){ | |
"use strict"; | |
exports["default"] = function (instance, Constructor) { | |
if (!(instance instanceof Constructor)) { | |
throw new TypeError("Cannot call a class as a function"); | |
} | |
}; | |
exports.__esModule = true; | |
},{}],9:[function(require,module,exports){ | |
"use strict"; | |
var _Object$defineProperty = require("babel-runtime/core-js/object/define-property")["default"]; | |
exports["default"] = (function () { | |
function defineProperties(target, props) { | |
for (var i = 0; i < props.length; i++) { | |
var descriptor = props[i]; | |
descriptor.enumerable = descriptor.enumerable || false; | |
descriptor.configurable = true; | |
if ("value" in descriptor) descriptor.writable = true; | |
_Object$defineProperty(target, descriptor.key, descriptor); | |
} | |
} | |
return function (Constructor, protoProps, staticProps) { | |
if (protoProps) defineProperties(Constructor.prototype, protoProps); | |
if (staticProps) defineProperties(Constructor, staticProps); | |
return Constructor; | |
}; | |
})(); | |
exports.__esModule = true; | |
},{"babel-runtime/core-js/object/define-property":4}],10:[function(require,module,exports){ | |
"use strict"; | |
var _Object$defineProperty = require("babel-runtime/core-js/object/define-property")["default"]; | |
exports["default"] = function (obj, key, value) { | |
if (key in obj) { | |
_Object$defineProperty(obj, key, { | |
value: value, | |
enumerable: true, | |
configurable: true, | |
writable: true | |
}); | |
} else { | |
obj[key] = value; | |
} | |
return obj; | |
}; | |
exports.__esModule = true; | |
},{"babel-runtime/core-js/object/define-property":4}],11:[function(require,module,exports){ | |
"use strict"; | |
var _Array$from = require("babel-runtime/core-js/array/from")["default"]; | |
exports["default"] = function (arr) { | |
if (Array.isArray(arr)) { | |
for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) arr2[i] = arr[i]; | |
return arr2; | |
} else { | |
return _Array$from(arr); | |
} | |
}; | |
exports.__esModule = true; | |
},{"babel-runtime/core-js/array/from":1}],12:[function(require,module,exports){ | |
require('../../modules/es6.string.iterator'); | |
require('../../modules/es6.array.from'); | |
module.exports = require('../../modules/$.core').Array.from; | |
},{"../../modules/$.core":24,"../../modules/es6.array.from":69,"../../modules/es6.string.iterator":74}],13:[function(require,module,exports){ | |
require('../../modules/es6.object.assign'); | |
module.exports = require('../../modules/$.core').Object.assign; | |
},{"../../modules/$.core":24,"../../modules/es6.object.assign":71}],14:[function(require,module,exports){ | |
var $ = require('../../modules/$'); | |
module.exports = function create(P, D){ | |
return $.create(P, D); | |
}; | |
},{"../../modules/$":47}],15:[function(require,module,exports){ | |
var $ = require('../../modules/$'); | |
module.exports = function defineProperty(it, key, desc){ | |
return $.setDesc(it, key, desc); | |
}; | |
},{"../../modules/$":47}],16:[function(require,module,exports){ | |
require('../modules/es6.object.to-string'); | |
require('../modules/es6.string.iterator'); | |
require('../modules/web.dom.iterable'); | |
require('../modules/es6.promise'); | |
module.exports = require('../modules/$.core').Promise; | |
},{"../modules/$.core":24,"../modules/es6.object.to-string":72,"../modules/es6.promise":73,"../modules/es6.string.iterator":74,"../modules/web.dom.iterable":76}],17:[function(require,module,exports){ | |
require('../../modules/es6.symbol'); | |
module.exports = require('../../modules/$.core').Symbol; | |
},{"../../modules/$.core":24,"../../modules/es6.symbol":75}],18:[function(require,module,exports){ | |
require('../../modules/es6.string.iterator'); | |
require('../../modules/web.dom.iterable'); | |
module.exports = require('../../modules/$.wks')('iterator'); | |
},{"../../modules/$.wks":67,"../../modules/es6.string.iterator":74,"../../modules/web.dom.iterable":76}],19:[function(require,module,exports){ | |
module.exports = function(it){ | |
if(typeof it != 'function')throw TypeError(it + ' is not a function!'); | |
return it; | |
}; | |
},{}],20:[function(require,module,exports){ | |
var isObject = require('./$.is-object'); | |
module.exports = function(it){ | |
if(!isObject(it))throw TypeError(it + ' is not an object!'); | |
return it; | |
}; | |
},{"./$.is-object":39}],21:[function(require,module,exports){ | |
var toObject = require('./$.to-object') | |
, ES5Object = require('./$.es5-object') | |
, enumKeys = require('./$.enum-keys'); | |
// 19.1.2.1 Object.assign(target, source, ...) | |
/* eslint-disable no-unused-vars */ | |
module.exports = Object.assign || function assign(target, source){ | |
/* eslint-enable no-unused-vars */ | |
var T = toObject(target, true) | |
, l = arguments.length | |
, i = 1; | |
while(l > i){ | |
var S = ES5Object(arguments[i++]) | |
, keys = enumKeys(S) | |
, length = keys.length | |
, j = 0 | |
, key; | |
while(length > j)T[key = keys[j++]] = S[key]; | |
} | |
return T; | |
}; | |
},{"./$.enum-keys":29,"./$.es5-object":30,"./$.to-object":64}],22:[function(require,module,exports){ | |
var cof = require('./$.cof') | |
, TAG = require('./$.wks')('toStringTag') | |
// ES3 wrong here | |
, ARG = cof(function(){ return arguments; }()) == 'Arguments'; | |
module.exports = function(it){ | |
var O, T, B; | |
return it === undefined ? 'Undefined' : it === null ? 'Null' | |
// @@toStringTag case | |
: typeof (T = (O = Object(it))[TAG]) == 'string' ? T | |
// builtinTag case | |
: ARG ? cof(O) | |
// ES3 arguments fallback | |
: (B = cof(O)) == 'Object' && typeof O.callee == 'function' ? 'Arguments' : B; | |
}; | |
},{"./$.cof":23,"./$.wks":67}],23:[function(require,module,exports){ | |
var toString = {}.toString; | |
module.exports = function(it){ | |
return toString.call(it).slice(8, -1); | |
}; | |
},{}],24:[function(require,module,exports){ | |
var core = module.exports = {}; | |
if(typeof __e == 'number')__e = core; // eslint-disable-line no-undef | |
},{}],25:[function(require,module,exports){ | |
// Optional / simple context binding | |
var aFunction = require('./$.a-function'); | |
module.exports = function(fn, that, length){ | |
aFunction(fn); | |
if(~length && that === undefined)return fn; | |
switch(length){ | |
case 1: return function(a){ | |
return fn.call(that, a); | |
}; | |
case 2: return function(a, b){ | |
return fn.call(that, a, b); | |
}; | |
case 3: return function(a, b, c){ | |
return fn.call(that, a, b, c); | |
}; | |
} return function(/* ...args */){ | |
return fn.apply(that, arguments); | |
}; | |
}; | |
},{"./$.a-function":19}],26:[function(require,module,exports){ | |
var global = require('./$.global') | |
, core = require('./$.core') | |
, PROTOTYPE = 'prototype'; | |
function ctx(fn, that){ | |
return function(){ | |
return fn.apply(that, arguments); | |
}; | |
} | |
// type bitmap | |
$def.F = 1; // forced | |
$def.G = 2; // global | |
$def.S = 4; // static | |
$def.P = 8; // proto | |
$def.B = 16; // bind | |
$def.W = 32; // wrap | |
function $def(type, name, source){ | |
var key, own, out, exp | |
, isGlobal = type & $def.G | |
, isProto = type & $def.P | |
, target = isGlobal ? global : type & $def.S | |
? global[name] : (global[name] || {})[PROTOTYPE] | |
, exports = isGlobal ? core : core[name] || (core[name] = {}); | |
if(isGlobal)source = name; | |
for(key in source){ | |
// contains in native | |
own = !(type & $def.F) && target && key in target; | |
if(own && key in exports)continue; | |
// export native or passed | |
out = own ? target[key] : source[key]; | |
// prevent global pollution for namespaces | |
if(isGlobal && typeof target[key] != 'function')exp = source[key]; | |
// bind timers to global for call from export context | |
else if(type & $def.B && own)exp = ctx(out, global); | |
// wrap global constructors for prevent change them in library | |
else if(type & $def.W && target[key] == out)!function(C){ | |
exp = function(param){ | |
return this instanceof C ? new C(param) : C(param); | |
}; | |
exp[PROTOTYPE] = C[PROTOTYPE]; | |
}(out); | |
else exp = isProto && typeof out == 'function' ? ctx(Function.call, out) : out; | |
// export | |
exports[key] = exp; | |
if(isProto)(exports[PROTOTYPE] || (exports[PROTOTYPE] = {}))[key] = out; | |
} | |
} | |
module.exports = $def; | |
},{"./$.core":24,"./$.global":33}],27:[function(require,module,exports){ | |
module.exports = function(it){ | |
if(it == undefined)throw TypeError("Can't call method on " + it); | |
return it; | |
}; | |
},{}],28:[function(require,module,exports){ | |
var isObject = require('./$.is-object') | |
, document = require('./$.global').document | |
// in old IE typeof document.createElement is 'object' | |
, is = isObject(document) && isObject(document.createElement); | |
module.exports = function(it){ | |
return is ? document.createElement(it) : {}; | |
}; | |
},{"./$.global":33,"./$.is-object":39}],29:[function(require,module,exports){ | |
var $ = require('./$'); | |
module.exports = function(it){ | |
var keys = $.getKeys(it) | |
, isEnum = $.isEnum | |
, getSymbols = $.getSymbols; | |
if(getSymbols)for(var symbols = getSymbols(it), i = 0, key; symbols.length > i; ){ | |
if(isEnum.call(it, key = symbols[i++]))keys.push(key); | |
} | |
return keys; | |
}; | |
},{"./$":47}],30:[function(require,module,exports){ | |
// fallback for not array-like ES3 strings | |
var cof = require('./$.cof') | |
, $Object = Object; | |
module.exports = 0 in $Object('z') ? $Object : function(it){ | |
return cof(it) == 'String' ? it.split('') : $Object(it); | |
}; | |
},{"./$.cof":23}],31:[function(require,module,exports){ | |
var ctx = require('./$.ctx') | |
, call = require('./$.iter-call') | |
, isArrayIter = require('./$.is-array-iter') | |
, anObject = require('./$.an-object') | |
, toLength = require('./$.to-length') | |
, getIterFn = require('./core.get-iterator-method'); | |
module.exports = function(iterable, entries, fn, that){ | |
var iterFn = getIterFn(iterable) | |
, f = ctx(fn, that, entries ? 2 : 1) | |
, index = 0 | |
, length, step, iterator; | |
if(typeof iterFn != 'function')throw TypeError(iterable + ' is not iterable!'); | |
// fast case for arrays with default iterator | |
if(isArrayIter(iterFn))for(length = toLength(iterable.length); length > index; index++){ | |
entries ? f(anObject(step = iterable[index])[0], step[1]) : f(iterable[index]); | |
} else for(iterator = iterFn.call(iterable); !(step = iterator.next()).done; ){ | |
call(iterator, f, step.value, entries); | |
} | |
}; | |
},{"./$.an-object":20,"./$.ctx":25,"./$.is-array-iter":38,"./$.iter-call":41,"./$.to-length":63,"./core.get-iterator-method":68}],32:[function(require,module,exports){ | |
// fallback for IE11 buggy Object.getOwnPropertyNames with iframe and window | |
var toString = {}.toString | |
, toObject = require('./$.to-object') | |
, getNames = require('./$').getNames; | |
var windowNames = typeof window == 'object' && Object.getOwnPropertyNames | |
? Object.getOwnPropertyNames(window) : []; | |
function getWindowNames(it){ | |
try { | |
return getNames(it); | |
} catch(e){ | |
return windowNames.slice(); | |
} | |
} | |
module.exports.get = function getOwnPropertyNames(it){ | |
if(windowNames && toString.call(it) == '[object Window]')return getWindowNames(it); | |
return getNames(toObject(it)); | |
}; | |
},{"./$":47,"./$.to-object":64}],33:[function(require,module,exports){ | |
var global = typeof self != 'undefined' && self.Math == Math ? self : Function('return this')(); | |
module.exports = global; | |
if(typeof __g == 'number')__g = global; // eslint-disable-line no-undef | |
},{}],34:[function(require,module,exports){ | |
var hasOwnProperty = {}.hasOwnProperty; | |
module.exports = function(it, key){ | |
return hasOwnProperty.call(it, key); | |
}; | |
},{}],35:[function(require,module,exports){ | |
var $ = require('./$') | |
, createDesc = require('./$.property-desc'); | |
module.exports = require('./$.support-desc') ? function(object, key, value){ | |
return $.setDesc(object, key, createDesc(1, value)); | |
} : function(object, key, value){ | |
object[key] = value; | |
return object; | |
}; | |
},{"./$":47,"./$.property-desc":51,"./$.support-desc":59}],36:[function(require,module,exports){ | |
module.exports = require('./$.global').document && document.documentElement; | |
},{"./$.global":33}],37:[function(require,module,exports){ | |
// Fast apply | |
// http://jsperf.lnkit.com/fast-apply/5 | |
module.exports = function(fn, args, that){ | |
var un = that === undefined; | |
switch(args.length){ | |
case 0: return un ? fn() | |
: fn.call(that); | |
case 1: return un ? fn(args[0]) | |
: fn.call(that, args[0]); | |
case 2: return un ? fn(args[0], args[1]) | |
: fn.call(that, args[0], args[1]); | |
case 3: return un ? fn(args[0], args[1], args[2]) | |
: fn.call(that, args[0], args[1], args[2]); | |
case 4: return un ? fn(args[0], args[1], args[2], args[3]) | |
: fn.call(that, args[0], args[1], args[2], args[3]); | |
case 5: return un ? fn(args[0], args[1], args[2], args[3], args[4]) | |
: fn.call(that, args[0], args[1], args[2], args[3], args[4]); | |
} return fn.apply(that, args); | |
}; | |
},{}],38:[function(require,module,exports){ | |
var Iterators = require('./$.iterators') | |
, ITERATOR = require('./$.wks')('iterator'); | |
module.exports = function(it){ | |
return ('Array' in Iterators ? Iterators.Array : Array.prototype[ITERATOR]) === it; | |
}; | |
},{"./$.iterators":46,"./$.wks":67}],39:[function(require,module,exports){ | |
// http://jsperf.com/core-js-isobject | |
module.exports = function(it){ | |
return it !== null && (typeof it == 'object' || typeof it == 'function'); | |
}; | |
},{}],40:[function(require,module,exports){ | |
// Safari has buggy iterators w/o `next` | |
module.exports = 'keys' in [] && !('next' in [].keys()); | |
},{}],41:[function(require,module,exports){ | |
var anObject = require('./$.an-object'); | |
function close(iterator){ | |
var ret = iterator['return']; | |
if(ret !== undefined)anObject(ret.call(iterator)); | |
} | |
module.exports = function(iterator, fn, value, entries){ | |
try { | |
return entries ? fn(anObject(value)[0], value[1]) : fn(value); | |
} catch(e){ | |
close(iterator); | |
throw e; | |
} | |
}; | |
},{"./$.an-object":20}],42:[function(require,module,exports){ | |
'use strict'; | |
var $ = require('./$') | |
, IteratorPrototype = {}; | |
// 25.1.2.1.1 %IteratorPrototype%[@@iterator]() | |
require('./$.hide')(IteratorPrototype, require('./$.wks')('iterator'), function(){ return this; }); | |
module.exports = function(Constructor, NAME, next){ | |
Constructor.prototype = $.create(IteratorPrototype, {next: require('./$.property-desc')(1,next)}); | |
require('./$.tag')(Constructor, NAME + ' Iterator'); | |
}; | |
},{"./$":47,"./$.hide":35,"./$.property-desc":51,"./$.tag":60,"./$.wks":67}],43:[function(require,module,exports){ | |
'use strict'; | |
var LIBRARY = require('./$.library') | |
, $def = require('./$.def') | |
, $redef = require('./$.redef') | |
, hide = require('./$.hide') | |
, has = require('./$.has') | |
, SYMBOL_ITERATOR = require('./$.wks')('iterator') | |
, Iterators = require('./$.iterators') | |
, FF_ITERATOR = '@@iterator' | |
, KEYS = 'keys' | |
, VALUES = 'values'; | |
function returnThis(){ return this; } | |
module.exports = function(Base, NAME, Constructor, next, DEFAULT, IS_SET, FORCE){ | |
require('./$.iter-create')(Constructor, NAME, next); | |
function createMethod(kind){ | |
switch(kind){ | |
case KEYS: return function keys(){ return new Constructor(this, kind); }; | |
case VALUES: return function values(){ return new Constructor(this, kind); }; | |
} return function entries(){ return new Constructor(this, kind); }; | |
} | |
var TAG = NAME + ' Iterator' | |
, proto = Base.prototype | |
, _native = proto[SYMBOL_ITERATOR] || proto[FF_ITERATOR] || DEFAULT && proto[DEFAULT] | |
, _default = _native || createMethod(DEFAULT) | |
, methods, key; | |
// Fix native | |
if(_native){ | |
var IteratorPrototype = require('./$').getProto(_default.call(new Base)); | |
// Set @@toStringTag to native iterators | |
require('./$.tag')(IteratorPrototype, TAG, true); | |
// FF fix | |
if(!LIBRARY && has(proto, FF_ITERATOR))hide(IteratorPrototype, SYMBOL_ITERATOR, returnThis); | |
} | |
// Define iterator | |
if(!LIBRARY || FORCE)hide(proto, SYMBOL_ITERATOR, _default); | |
// Plug for library | |
Iterators[NAME] = _default; | |
Iterators[TAG] = returnThis; | |
if(DEFAULT){ | |
methods = { | |
keys: IS_SET ? _default : createMethod(KEYS), | |
values: DEFAULT == VALUES ? _default : createMethod(VALUES), | |
entries: DEFAULT != VALUES ? _default : createMethod('entries') | |
}; | |
if(FORCE)for(key in methods){ | |
if(!(key in proto))$redef(proto, key, methods[key]); | |
} else $def($def.P + $def.F * require('./$.iter-buggy'), NAME, methods); | |
} | |
}; | |
},{"./$":47,"./$.def":26,"./$.has":34,"./$.hide":35,"./$.iter-buggy":40,"./$.iter-create":42,"./$.iterators":46,"./$.library":49,"./$.redef":52,"./$.tag":60,"./$.wks":67}],44:[function(require,module,exports){ | |
var SYMBOL_ITERATOR = require('./$.wks')('iterator') | |
, SAFE_CLOSING = false; | |
try { | |
var riter = [7][SYMBOL_ITERATOR](); | |
riter['return'] = function(){ SAFE_CLOSING = true; }; | |
Array.from(riter, function(){ throw 2; }); | |
} catch(e){ /* empty */ } | |
module.exports = function(exec){ | |
if(!SAFE_CLOSING)return false; | |
var safe = false; | |
try { | |
var arr = [7] | |
, iter = arr[SYMBOL_ITERATOR](); | |
iter.next = function(){ safe = true; }; | |
arr[SYMBOL_ITERATOR] = function(){ return iter; }; | |
exec(arr); | |
} catch(e){ /* empty */ } | |
return safe; | |
}; | |
},{"./$.wks":67}],45:[function(require,module,exports){ | |
module.exports = function(done, value){ | |
return {value: value, done: !!done}; | |
}; | |
},{}],46:[function(require,module,exports){ | |
module.exports = {}; | |
},{}],47:[function(require,module,exports){ | |
var $Object = Object; | |
module.exports = { | |
create: $Object.create, | |
getProto: $Object.getPrototypeOf, | |
isEnum: {}.propertyIsEnumerable, | |
getDesc: $Object.getOwnPropertyDescriptor, | |
setDesc: $Object.defineProperty, | |
setDescs: $Object.defineProperties, | |
getKeys: $Object.keys, | |
getNames: $Object.getOwnPropertyNames, | |
getSymbols: $Object.getOwnPropertySymbols, | |
each: [].forEach | |
}; | |
},{}],48:[function(require,module,exports){ | |
var $ = require('./$') | |
, toObject = require('./$.to-object'); | |
module.exports = function(object, el){ | |
var O = toObject(object) | |
, keys = $.getKeys(O) | |
, length = keys.length | |
, index = 0 | |
, key; | |
while(length > index)if(O[key = keys[index++]] === el)return key; | |
}; | |
},{"./$":47,"./$.to-object":64}],49:[function(require,module,exports){ | |
module.exports = true; | |
},{}],50:[function(require,module,exports){ | |
var $redef = require('./$.redef'); | |
module.exports = function(target, src){ | |
for(var key in src)$redef(target, key, src[key]); | |
return target; | |
}; | |
},{"./$.redef":52}],51:[function(require,module,exports){ | |
module.exports = function(bitmap, value){ | |
return { | |
enumerable : !(bitmap & 1), | |
configurable: !(bitmap & 2), | |
writable : !(bitmap & 4), | |
value : value | |
}; | |
}; | |
},{}],52:[function(require,module,exports){ | |
module.exports = require('./$.hide'); | |
},{"./$.hide":35}],53:[function(require,module,exports){ | |
module.exports = Object.is || function is(x, y){ | |
return x === y ? x !== 0 || 1 / x === 1 / y : x != x && y != y; | |
}; | |
},{}],54:[function(require,module,exports){ | |
// Works with __proto__ only. Old v8 can't work with null proto objects. | |
/* eslint-disable no-proto */ | |
var getDesc = require('./$').getDesc | |
, isObject = require('./$.is-object') | |
, anObject = require('./$.an-object'); | |
function check(O, proto){ | |
anObject(O); | |
if(!isObject(proto) && proto !== null)throw TypeError(proto + ": can't set as prototype!"); | |
} | |
module.exports = { | |
set: Object.setPrototypeOf || ('__proto__' in {} // eslint-disable-line | |
? function(buggy, set){ | |
try { | |
set = require('./$.ctx')(Function.call, getDesc(Object.prototype, '__proto__').set, 2); | |
set({}, []); | |
} catch(e){ buggy = true; } | |
return function setPrototypeOf(O, proto){ | |
check(O, proto); | |
if(buggy)O.__proto__ = proto; | |
else set(O, proto); | |
return O; | |
}; | |
}() | |
: undefined), | |
check: check | |
}; | |
},{"./$":47,"./$.an-object":20,"./$.ctx":25,"./$.is-object":39}],55:[function(require,module,exports){ | |
var global = require('./$.global') | |
, SHARED = '__core-js_shared__' | |
, store = global[SHARED] || (global[SHARED] = {}); | |
module.exports = function(key){ | |
return store[key] || (store[key] = {}); | |
}; | |
},{"./$.global":33}],56:[function(require,module,exports){ | |
var $ = require('./$') | |
, SPECIES = require('./$.wks')('species'); | |
module.exports = function(C){ | |
if(require('./$.support-desc') && !(SPECIES in C))$.setDesc(C, SPECIES, { | |
configurable: true, | |
get: function(){ return this; } | |
}); | |
}; | |
},{"./$":47,"./$.support-desc":59,"./$.wks":67}],57:[function(require,module,exports){ | |
module.exports = function(it, Constructor, name){ | |
if(!(it instanceof Constructor))throw TypeError(name + ": use the 'new' operator!"); | |
return it; | |
}; | |
},{}],58:[function(require,module,exports){ | |
// true -> String#at | |
// false -> String#codePointAt | |
var toInteger = require('./$.to-integer') | |
, defined = require('./$.defined'); | |
module.exports = function(TO_STRING){ | |
return function(that, pos){ | |
var s = String(defined(that)) | |
, i = toInteger(pos) | |
, l = s.length | |
, a, b; | |
if(i < 0 || i >= l)return TO_STRING ? '' : undefined; | |
a = s.charCodeAt(i); | |
return a < 0xd800 || a > 0xdbff || i + 1 === l | |
|| (b = s.charCodeAt(i + 1)) < 0xdc00 || b > 0xdfff | |
? TO_STRING ? s.charAt(i) : a | |
: TO_STRING ? s.slice(i, i + 2) : (a - 0xd800 << 10) + (b - 0xdc00) + 0x10000; | |
}; | |
}; | |
},{"./$.defined":27,"./$.to-integer":62}],59:[function(require,module,exports){ | |
// Thank's IE8 for his funny defineProperty | |
module.exports = !!function(){ | |
try { | |
return Object.defineProperty({}, 'a', {get: function(){ return 2; }}).a == 2; | |
} catch(e){ /* empty */ } | |
}(); | |
},{}],60:[function(require,module,exports){ | |
var has = require('./$.has') | |
, hide = require('./$.hide') | |
, TAG = require('./$.wks')('toStringTag'); | |
module.exports = function(it, tag, stat){ | |
if(it && !has(it = stat ? it : it.prototype, TAG))hide(it, TAG, tag); | |
}; | |
},{"./$.has":34,"./$.hide":35,"./$.wks":67}],61:[function(require,module,exports){ | |
'use strict'; | |
var ctx = require('./$.ctx') | |
, invoke = require('./$.invoke') | |
, html = require('./$.html') | |
, cel = require('./$.dom-create') | |
, global = require('./$.global') | |
, process = global.process | |
, setTask = global.setImmediate | |
, clearTask = global.clearImmediate | |
, MessageChannel = global.MessageChannel | |
, counter = 0 | |
, queue = {} | |
, ONREADYSTATECHANGE = 'onreadystatechange' | |
, defer, channel, port; | |
function run(){ | |
var id = +this; | |
if(queue.hasOwnProperty(id)){ | |
var fn = queue[id]; | |
delete queue[id]; | |
fn(); | |
} | |
} | |
function listner(event){ | |
run.call(event.data); | |
} | |
// Node.js 0.9+ & IE10+ has setImmediate, otherwise: | |
if(!setTask || !clearTask){ | |
setTask = function setImmediate(fn){ | |
var args = [], i = 1; | |
while(arguments.length > i)args.push(arguments[i++]); | |
queue[++counter] = function(){ | |
invoke(typeof fn == 'function' ? fn : Function(fn), args); | |
}; | |
defer(counter); | |
return counter; | |
}; | |
clearTask = function clearImmediate(id){ | |
delete queue[id]; | |
}; | |
// Node.js 0.8- | |
if(require('./$.cof')(process) == 'process'){ | |
defer = function(id){ | |
process.nextTick(ctx(run, id, 1)); | |
}; | |
// Modern browsers, skip implementation for WebWorkers | |
// IE8 has postMessage, but it's sync & typeof its postMessage is 'object' | |
} else if(global.addEventListener && typeof postMessage == 'function' && !global.importScripts){ | |
defer = function(id){ | |
global.postMessage(id, '*'); | |
}; | |
global.addEventListener('message', listner, false); | |
// WebWorkers | |
} else if(MessageChannel){ | |
channel = new MessageChannel; | |
port = channel.port2; | |
channel.port1.onmessage = listner; | |
defer = ctx(port.postMessage, port, 1); | |
// IE8- | |
} else if(ONREADYSTATECHANGE in cel('script')){ | |
defer = function(id){ | |
html.appendChild(cel('script'))[ONREADYSTATECHANGE] = function(){ | |
html.removeChild(this); | |
run.call(id); | |
}; | |
}; | |
// Rest old browsers | |
} else { | |
defer = function(id){ | |
setTimeout(ctx(run, id, 1), 0); | |
}; | |
} | |
} | |
module.exports = { | |
set: setTask, | |
clear: clearTask | |
}; | |
},{"./$.cof":23,"./$.ctx":25,"./$.dom-create":28,"./$.global":33,"./$.html":36,"./$.invoke":37}],62:[function(require,module,exports){ | |
// 7.1.4 ToInteger | |
var ceil = Math.ceil | |
, floor = Math.floor; | |
module.exports = function(it){ | |
return isNaN(it = +it) ? 0 : (it > 0 ? floor : ceil)(it); | |
}; | |
},{}],63:[function(require,module,exports){ | |
// 7.1.15 ToLength | |
var toInteger = require('./$.to-integer') | |
, min = Math.min; | |
module.exports = function(it){ | |
return it > 0 ? min(toInteger(it), 0x1fffffffffffff) : 0; // pow(2, 53) - 1 == 9007199254740991 | |
}; | |
},{"./$.to-integer":62}],64:[function(require,module,exports){ | |
var ES5Object = require('./$.es5-object') | |
, defined = require('./$.defined'); | |
module.exports = function(it, realString){ | |
return (realString ? Object : ES5Object)(defined(it)); | |
}; | |
},{"./$.defined":27,"./$.es5-object":30}],65:[function(require,module,exports){ | |
var id = 0 | |
, px = Math.random(); | |
module.exports = function(key){ | |
return 'Symbol('.concat(key === undefined ? '' : key, ')_', (++id + px).toString(36)); | |
}; | |
},{}],66:[function(require,module,exports){ | |
module.exports = function(){ /* empty */ }; | |
},{}],67:[function(require,module,exports){ | |
var store = require('./$.shared')('wks') | |
, Symbol = require('./$.global').Symbol; | |
module.exports = function(name){ | |
return store[name] || (store[name] = | |
Symbol && Symbol[name] || (Symbol || require('./$.uid'))('Symbol.' + name)); | |
}; | |
},{"./$.global":33,"./$.shared":55,"./$.uid":65}],68:[function(require,module,exports){ | |
var global = require('./$.global') | |
, classof = require('./$.classof') | |
, ITERATOR = require('./$.wks')('iterator') | |
, Iterators = require('./$.iterators'); | |
module.exports = require('./$.core').getIteratorMethod = function(it){ | |
var Symbol = global.Symbol; | |
if(it != undefined){ | |
return it[Symbol && Symbol.iterator || '@@iterator'] | |
|| it[ITERATOR] | |
|| Iterators[classof(it)]; | |
} | |
}; | |
},{"./$.classof":22,"./$.core":24,"./$.global":33,"./$.iterators":46,"./$.wks":67}],69:[function(require,module,exports){ | |
var ctx = require('./$.ctx') | |
, $def = require('./$.def') | |
, toObject = require('./$.to-object') | |
, call = require('./$.iter-call') | |
, isArrayIter = require('./$.is-array-iter') | |
, toLength = require('./$.to-length') | |
, getIterFn = require('./core.get-iterator-method'); | |
$def($def.S + $def.F * !require('./$.iter-detect')(function(iter){ Array.from(iter); }), 'Array', { | |
// 22.1.2.1 Array.from(arrayLike, mapfn = undefined, thisArg = undefined) | |
from: function from(arrayLike/*, mapfn = undefined, thisArg = undefined*/){ | |
var O = toObject(arrayLike, true) | |
, C = typeof this == 'function' ? this : Array | |
, mapfn = arguments[1] | |
, mapping = mapfn !== undefined | |
, index = 0 | |
, iterFn = getIterFn(O) | |
, length, result, step, iterator; | |
if(mapping)mapfn = ctx(mapfn, arguments[2], 2); | |
// if object isn't iterable or it's array with default iterator - use simple case | |
if(iterFn != undefined && !(C == Array && isArrayIter(iterFn))){ | |
for(iterator = iterFn.call(O), result = new C; !(step = iterator.next()).done; index++){ | |
result[index] = mapping ? call(iterator, mapfn, [step.value, index], true) : step.value; | |
} | |
} else { | |
for(result = new C(length = toLength(O.length)); length > index; index++){ | |
result[index] = mapping ? mapfn(O[index], index) : O[index]; | |
} | |
} | |
result.length = index; | |
return result; | |
} | |
}); | |
},{"./$.ctx":25,"./$.def":26,"./$.is-array-iter":38,"./$.iter-call":41,"./$.iter-detect":44,"./$.to-length":63,"./$.to-object":64,"./core.get-iterator-method":68}],70:[function(require,module,exports){ | |
var setUnscope = require('./$.unscope') | |
, step = require('./$.iter-step') | |
, Iterators = require('./$.iterators') | |
, toObject = require('./$.to-object'); | |
// 22.1.3.4 Array.prototype.entries() | |
// 22.1.3.13 Array.prototype.keys() | |
// 22.1.3.29 Array.prototype.values() | |
// 22.1.3.30 Array.prototype[@@iterator]() | |
require('./$.iter-define')(Array, 'Array', function(iterated, kind){ | |
this._t = toObject(iterated); // target | |
this._i = 0; // next index | |
this._k = kind; // kind | |
// 22.1.5.2.1 %ArrayIteratorPrototype%.next() | |
}, function(){ | |
var O = this._t | |
, kind = this._k | |
, index = this._i++; | |
if(!O || index >= O.length){ | |
this._t = undefined; | |
return step(1); | |
} | |
if(kind == 'keys' )return step(0, index); | |
if(kind == 'values')return step(0, O[index]); | |
return step(0, [index, O[index]]); | |
}, 'values'); | |
// argumentsList[@@iterator] is %ArrayProto_values% (9.4.4.6, 9.4.4.7) | |
Iterators.Arguments = Iterators.Array; | |
setUnscope('keys'); | |
setUnscope('values'); | |
setUnscope('entries'); | |
},{"./$.iter-define":43,"./$.iter-step":45,"./$.iterators":46,"./$.to-object":64,"./$.unscope":66}],71:[function(require,module,exports){ | |
// 19.1.3.1 Object.assign(target, source) | |
var $def = require('./$.def'); | |
$def($def.S, 'Object', {assign: require('./$.assign')}); | |
},{"./$.assign":21,"./$.def":26}],72:[function(require,module,exports){ | |
},{}],73:[function(require,module,exports){ | |
'use strict'; | |
var $ = require('./$') | |
, LIBRARY = require('./$.library') | |
, global = require('./$.global') | |
, ctx = require('./$.ctx') | |
, classof = require('./$.classof') | |
, $def = require('./$.def') | |
, isObject = require('./$.is-object') | |
, anObject = require('./$.an-object') | |
, aFunction = require('./$.a-function') | |
, strictNew = require('./$.strict-new') | |
, forOf = require('./$.for-of') | |
, setProto = require('./$.set-proto').set | |
, same = require('./$.same') | |
, species = require('./$.species') | |
, SPECIES = require('./$.wks')('species') | |
, RECORD = require('./$.uid')('record') | |
, PROMISE = 'Promise' | |
, process = global.process | |
, isNode = classof(process) == 'process' | |
, asap = process && process.nextTick || require('./$.task').set | |
, P = global[PROMISE] | |
, Wrapper; | |
function testResolve(sub){ | |
var test = new P(function(){}); | |
if(sub)test.constructor = Object; | |
return P.resolve(test) === test; | |
} | |
var useNative = function(){ | |
var works = false; | |
function P2(x){ | |
var self = new P(x); | |
setProto(self, P2.prototype); | |
return self; | |
} | |
try { | |
works = P && P.resolve && testResolve(); | |
setProto(P2, P); | |
P2.prototype = $.create(P.prototype, {constructor: {value: P2}}); | |
// actual Firefox has broken subclass support, test that | |
if(!(P2.resolve(5).then(function(){}) instanceof P2)){ | |
works = false; | |
} | |
// actual V8 bug, https://code.google.com/p/v8/issues/detail?id=4162 | |
if(works && require('./$.support-desc')){ | |
var thenableThenGotten = false; | |
P.resolve($.setDesc({}, 'then', { | |
get: function(){ thenableThenGotten = true; } | |
})); | |
works = thenableThenGotten; | |
} | |
} catch(e){ works = false; } | |
return works; | |
}(); | |
// helpers | |
function isPromise(it){ | |
return isObject(it) && (useNative ? classof(it) == 'Promise' : RECORD in it); | |
} | |
function sameConstructor(a, b){ | |
// library wrapper special case | |
if(LIBRARY && a === P && b === Wrapper)return true; | |
return same(a, b); | |
} | |
function getConstructor(C){ | |
var S = anObject(C)[SPECIES]; | |
return S != undefined ? S : C; | |
} | |
function isThenable(it){ | |
var then; | |
return isObject(it) && typeof (then = it.then) == 'function' ? then : false; | |
} | |
function notify(record, isReject){ | |
if(record.n)return; | |
record.n = true; | |
var chain = record.c; | |
// strange IE + webpack dev server bug - use .call(global) | |
asap.call(global, function(){ | |
var value = record.v | |
, ok = record.s == 1 | |
, i = 0; | |
function run(react){ | |
var cb = ok ? react.ok : react.fail | |
, ret, then; | |
try { | |
if(cb){ | |
if(!ok)record.h = true; | |
ret = cb === true ? value : cb(value); | |
if(ret === react.P){ | |
react.rej(TypeError('Promise-chain cycle')); | |
} else if(then = isThenable(ret)){ | |
then.call(ret, react.res, react.rej); | |
} else react.res(ret); | |
} else react.rej(value); | |
} catch(err){ | |
react.rej(err); | |
} | |
} | |
while(chain.length > i)run(chain[i++]); // variable length - can't use forEach | |
chain.length = 0; | |
record.n = false; | |
if(isReject)setTimeout(function(){ | |
// strange IE + webpack dev server bug - use .call(global) | |
asap.call(global, function(){ | |
if(isUnhandled(record.p)){ | |
if(isNode){ | |
process.emit('unhandledRejection', value, record.p); | |
} else if(global.console && console.error){ | |
console.error('Unhandled promise rejection', value); | |
} | |
} | |
record.a = undefined; | |
}); | |
}, 1); | |
}); | |
} | |
function isUnhandled(promise){ | |
var record = promise[RECORD] | |
, chain = record.a || record.c | |
, i = 0 | |
, react; | |
if(record.h)return false; | |
while(chain.length > i){ | |
react = chain[i++]; | |
if(react.fail || !isUnhandled(react.P))return false; | |
} return true; | |
} | |
function $reject(value){ | |
var record = this; | |
if(record.d)return; | |
record.d = true; | |
record = record.r || record; // unwrap | |
record.v = value; | |
record.s = 2; | |
record.a = record.c.slice(); | |
notify(record, true); | |
} | |
function $resolve(value){ | |
var record = this | |
, then; | |
if(record.d)return; | |
record.d = true; | |
record = record.r || record; // unwrap | |
try { | |
if(then = isThenable(value)){ | |
// strange IE + webpack dev server bug - use .call(global) | |
asap.call(global, function(){ | |
var wrapper = {r: record, d: false}; // wrap | |
try { | |
then.call(value, ctx($resolve, wrapper, 1), ctx($reject, wrapper, 1)); | |
} catch(e){ | |
$reject.call(wrapper, e); | |
} | |
}); | |
} else { | |
record.v = value; | |
record.s = 1; | |
notify(record, false); | |
} | |
} catch(e){ | |
$reject.call({r: record, d: false}, e); // wrap | |
} | |
} | |
// constructor polyfill | |
if(!useNative){ | |
// 25.4.3.1 Promise(executor) | |
P = function Promise(executor){ | |
aFunction(executor); | |
var record = { | |
p: strictNew(this, P, PROMISE), // <- promise | |
c: [], // <- awaiting reactions | |
a: undefined, // <- checked in isUnhandled reactions | |
s: 0, // <- state | |
d: false, // <- done | |
v: undefined, // <- value | |
h: false, // <- handled rejection | |
n: false // <- notify | |
}; | |
this[RECORD] = record; | |
try { | |
executor(ctx($resolve, record, 1), ctx($reject, record, 1)); | |
} catch(err){ | |
$reject.call(record, err); | |
} | |
}; | |
require('./$.mix')(P.prototype, { | |
// 25.4.5.3 Promise.prototype.then(onFulfilled, onRejected) | |
then: function then(onFulfilled, onRejected){ | |
var S = anObject(anObject(this).constructor)[SPECIES]; | |
var react = { | |
ok: typeof onFulfilled == 'function' ? onFulfilled : true, | |
fail: typeof onRejected == 'function' ? onRejected : false | |
}; | |
var promise = react.P = new (S != undefined ? S : P)(function(res, rej){ | |
react.res = aFunction(res); | |
react.rej = aFunction(rej); | |
}); | |
var record = this[RECORD]; | |
record.c.push(react); | |
if(record.a)record.a.push(react); | |
if(record.s)notify(record, false); | |
return promise; | |
}, | |
// 25.4.5.1 Promise.prototype.catch(onRejected) | |
'catch': function(onRejected){ | |
return this.then(undefined, onRejected); | |
} | |
}); | |
} | |
// export | |
$def($def.G + $def.W + $def.F * !useNative, {Promise: P}); | |
require('./$.tag')(P, PROMISE); | |
species(P); | |
species(Wrapper = require('./$.core')[PROMISE]); | |
// statics | |
$def($def.S + $def.F * !useNative, PROMISE, { | |
// 25.4.4.5 Promise.reject(r) | |
reject: function reject(r){ | |
return new this(function(res, rej){ rej(r); }); | |
} | |
}); | |
$def($def.S + $def.F * (!useNative || testResolve(true)), PROMISE, { | |
// 25.4.4.6 Promise.resolve(x) | |
resolve: function resolve(x){ | |
return isPromise(x) && sameConstructor(x.constructor, this) | |
? x : new this(function(res){ res(x); }); | |
} | |
}); | |
$def($def.S + $def.F * !(useNative && require('./$.iter-detect')(function(iter){ | |
P.all(iter)['catch'](function(){}); | |
})), PROMISE, { | |
// 25.4.4.1 Promise.all(iterable) | |
all: function all(iterable){ | |
var C = getConstructor(this) | |
, values = []; | |
return new C(function(res, rej){ | |
forOf(iterable, false, values.push, values); | |
var remaining = values.length | |
, results = Array(remaining); | |
if(remaining)$.each.call(values, function(promise, index){ | |
C.resolve(promise).then(function(value){ | |
results[index] = value; | |
--remaining || res(results); | |
}, rej); | |
}); | |
else res(results); | |
}); | |
}, | |
// 25.4.4.4 Promise.race(iterable) | |
race: function race(iterable){ | |
var C = getConstructor(this); | |
return new C(function(res, rej){ | |
forOf(iterable, false, function(promise){ | |
C.resolve(promise).then(res, rej); | |
}); | |
}); | |
} | |
}); | |
},{"./$":47,"./$.a-function":19,"./$.an-object":20,"./$.classof":22,"./$.core":24,"./$.ctx":25,"./$.def":26,"./$.for-of":31,"./$.global":33,"./$.is-object":39,"./$.iter-detect":44,"./$.library":49,"./$.mix":50,"./$.same":53,"./$.set-proto":54,"./$.species":56,"./$.strict-new":57,"./$.support-desc":59,"./$.tag":60,"./$.task":61,"./$.uid":65,"./$.wks":67}],74:[function(require,module,exports){ | |
var $at = require('./$.string-at')(true); | |
// 21.1.3.27 String.prototype[@@iterator]() | |
require('./$.iter-define')(String, 'String', function(iterated){ | |
this._t = String(iterated); // target | |
this._i = 0; // next index | |
// 21.1.5.2.1 %StringIteratorPrototype%.next() | |
}, function(){ | |
var O = this._t | |
, index = this._i | |
, point; | |
if(index >= O.length)return {value: undefined, done: true}; | |
point = $at(O, index); | |
this._i += point.length; | |
return {value: point, done: false}; | |
}); | |
},{"./$.iter-define":43,"./$.string-at":58}],75:[function(require,module,exports){ | |
'use strict'; | |
// ECMAScript 6 symbols shim | |
var $ = require('./$') | |
, global = require('./$.global') | |
, has = require('./$.has') | |
, SUPPORT_DESC = require('./$.support-desc') | |
, $def = require('./$.def') | |
, $redef = require('./$.redef') | |
, shared = require('./$.shared') | |
, setTag = require('./$.tag') | |
, uid = require('./$.uid') | |
, wks = require('./$.wks') | |
, keyOf = require('./$.keyof') | |
, $names = require('./$.get-names') | |
, enumKeys = require('./$.enum-keys') | |
, anObject = require('./$.an-object') | |
, toObject = require('./$.to-object') | |
, createDesc = require('./$.property-desc') | |
, getDesc = $.getDesc | |
, setDesc = $.setDesc | |
, $create = $.create | |
, getNames = $names.get | |
, $Symbol = global.Symbol | |
, setter = false | |
, HIDDEN = wks('_hidden') | |
, isEnum = $.isEnum | |
, SymbolRegistry = shared('symbol-registry') | |
, AllSymbols = shared('symbols') | |
, useNative = typeof $Symbol == 'function' | |
, ObjectProto = Object.prototype; | |
var setSymbolDesc = SUPPORT_DESC ? function(){ // fallback for old Android | |
try { | |
return $create(setDesc({}, HIDDEN, { | |
get: function(){ | |
return setDesc(this, HIDDEN, {value: false})[HIDDEN]; | |
} | |
}))[HIDDEN] || setDesc; | |
} catch(e){ | |
return function(it, key, D){ | |
var protoDesc = getDesc(ObjectProto, key); | |
if(protoDesc)delete ObjectProto[key]; | |
setDesc(it, key, D); | |
if(protoDesc && it !== ObjectProto)setDesc(ObjectProto, key, protoDesc); | |
}; | |
} | |
}() : setDesc; | |
function wrap(tag){ | |
var sym = AllSymbols[tag] = $create($Symbol.prototype); | |
sym._k = tag; | |
SUPPORT_DESC && setter && setSymbolDesc(ObjectProto, tag, { | |
configurable: true, | |
set: function(value){ | |
if(has(this, HIDDEN) && has(this[HIDDEN], tag))this[HIDDEN][tag] = false; | |
setSymbolDesc(this, tag, createDesc(1, value)); | |
} | |
}); | |
return sym; | |
} | |
function defineProperty(it, key, D){ | |
if(D && has(AllSymbols, key)){ | |
if(!D.enumerable){ | |
if(!has(it, HIDDEN))setDesc(it, HIDDEN, createDesc(1, {})); | |
it[HIDDEN][key] = true; | |
} else { | |
if(has(it, HIDDEN) && it[HIDDEN][key])it[HIDDEN][key] = false; | |
D = $create(D, {enumerable: createDesc(0, false)}); | |
} return setSymbolDesc(it, key, D); | |
} return setDesc(it, key, D); | |
} | |
function defineProperties(it, P){ | |
anObject(it); | |
var keys = enumKeys(P = toObject(P)) | |
, i = 0 | |
, l = keys.length | |
, key; | |
while(l > i)defineProperty(it, key = keys[i++], P[key]); | |
return it; | |
} | |
function create(it, P){ | |
return P === undefined ? $create(it) : defineProperties($create(it), P); | |
} | |
function propertyIsEnumerable(key){ | |
var E = isEnum.call(this, key); | |
return E || !has(this, key) || !has(AllSymbols, key) || has(this, HIDDEN) && this[HIDDEN][key] | |
? E : true; | |
} | |
function getOwnPropertyDescriptor(it, key){ | |
var D = getDesc(it = toObject(it), key); | |
if(D && has(AllSymbols, key) && !(has(it, HIDDEN) && it[HIDDEN][key]))D.enumerable = true; | |
return D; | |
} | |
function getOwnPropertyNames(it){ | |
var names = getNames(toObject(it)) | |
, result = [] | |
, i = 0 | |
, key; | |
while(names.length > i)if(!has(AllSymbols, key = names[i++]) && key != HIDDEN)result.push(key); | |
return result; | |
} | |
function getOwnPropertySymbols(it){ | |
var names = getNames(toObject(it)) | |
, result = [] | |
, i = 0 | |
, key; | |
while(names.length > i)if(has(AllSymbols, key = names[i++]))result.push(AllSymbols[key]); | |
return result; | |
} | |
// 19.4.1.1 Symbol([description]) | |
if(!useNative){ | |
$Symbol = function Symbol(){ | |
if(this instanceof $Symbol)throw TypeError('Symbol is not a constructor'); | |
return wrap(uid(arguments[0])); | |
}; | |
$redef($Symbol.prototype, 'toString', function(){ | |
return this._k; | |
}); | |
$.create = create; | |
$.isEnum = propertyIsEnumerable; | |
$.getDesc = getOwnPropertyDescriptor; | |
$.setDesc = defineProperty; | |
$.setDescs = defineProperties; | |
$.getNames = $names.get = getOwnPropertyNames; | |
$.getSymbols = getOwnPropertySymbols; | |
if(SUPPORT_DESC && !require('./$.library')){ | |
$redef(ObjectProto, 'propertyIsEnumerable', propertyIsEnumerable, true); | |
} | |
} | |
var symbolStatics = { | |
// 19.4.2.1 Symbol.for(key) | |
'for': function(key){ | |
return has(SymbolRegistry, key += '') | |
? SymbolRegistry[key] | |
: SymbolRegistry[key] = $Symbol(key); | |
}, | |
// 19.4.2.5 Symbol.keyFor(sym) | |
keyFor: function keyFor(key){ | |
return keyOf(SymbolRegistry, key); | |
}, | |
useSetter: function(){ setter = true; }, | |
useSimple: function(){ setter = false; } | |
}; | |
// 19.4.2.2 Symbol.hasInstance | |
// 19.4.2.3 Symbol.isConcatSpreadable | |
// 19.4.2.4 Symbol.iterator | |
// 19.4.2.6 Symbol.match | |
// 19.4.2.8 Symbol.replace | |
// 19.4.2.9 Symbol.search | |
// 19.4.2.10 Symbol.species | |
// 19.4.2.11 Symbol.split | |
// 19.4.2.12 Symbol.toPrimitive | |
// 19.4.2.13 Symbol.toStringTag | |
// 19.4.2.14 Symbol.unscopables | |
$.each.call(( | |
'hasInstance,isConcatSpreadable,iterator,match,replace,search,' + | |
'species,split,toPrimitive,toStringTag,unscopables' | |
).split(','), function(it){ | |
var sym = wks(it); | |
symbolStatics[it] = useNative ? sym : wrap(sym); | |
} | |
); | |
setter = true; | |
$def($def.G + $def.W, {Symbol: $Symbol}); | |
$def($def.S, 'Symbol', symbolStatics); | |
$def($def.S + $def.F * !useNative, 'Object', { | |
// 19.1.2.2 Object.create(O [, Properties]) | |
create: create, | |
// 19.1.2.4 Object.defineProperty(O, P, Attributes) | |
defineProperty: defineProperty, | |
// 19.1.2.3 Object.defineProperties(O, Properties) | |
defineProperties: defineProperties, | |
// 19.1.2.6 Object.getOwnPropertyDescriptor(O, P) | |
getOwnPropertyDescriptor: getOwnPropertyDescriptor, | |
// 19.1.2.7 Object.getOwnPropertyNames(O) | |
getOwnPropertyNames: getOwnPropertyNames, | |
// 19.1.2.8 Object.getOwnPropertySymbols(O) | |
getOwnPropertySymbols: getOwnPropertySymbols | |
}); | |
// 19.4.3.5 Symbol.prototype[@@toStringTag] | |
setTag($Symbol, 'Symbol'); | |
// 20.2.1.9 Math[@@toStringTag] | |
setTag(Math, 'Math', true); | |
// 24.3.3 JSON[@@toStringTag] | |
setTag(global.JSON, 'JSON', true); | |
},{"./$":47,"./$.an-object":20,"./$.def":26,"./$.enum-keys":29,"./$.get-names":32,"./$.global":33,"./$.has":34,"./$.keyof":48,"./$.library":49,"./$.property-desc":51,"./$.redef":52,"./$.shared":55,"./$.support-desc":59,"./$.tag":60,"./$.to-object":64,"./$.uid":65,"./$.wks":67}],76:[function(require,module,exports){ | |
require('./es6.array.iterator'); | |
var Iterators = require('./$.iterators'); | |
Iterators.NodeList = Iterators.HTMLCollection = Iterators.Array; | |
},{"./$.iterators":46,"./es6.array.iterator":70}],77:[function(require,module,exports){ | |
(function (global){ | |
// This method of obtaining a reference to the global object needs to be | |
// kept identical to the way it is obtained in runtime.js | |
var g = | |
typeof global === "object" ? global : | |
typeof window === "object" ? window : | |
typeof self === "object" ? self : this; | |
// Use `getOwnPropertyNames` because not all browsers support calling | |
// `hasOwnProperty` on the global `self` object in a worker. See #183. | |
var hadRuntime = g.regeneratorRuntime && | |
Object.getOwnPropertyNames(g).indexOf("regeneratorRuntime") >= 0; | |
// Save the old regeneratorRuntime in case it needs to be restored later. | |
var oldRuntime = hadRuntime && g.regeneratorRuntime; | |
// Force reevalutation of runtime.js. | |
g.regeneratorRuntime = undefined; | |
module.exports = require("./runtime"); | |
if (hadRuntime) { | |
// Restore the original runtime. | |
g.regeneratorRuntime = oldRuntime; | |
} else { | |
// Remove the global property added by runtime.js. | |
delete g.regeneratorRuntime; | |
} | |
module.exports = { "default": module.exports, __esModule: true }; | |
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"./runtime":78}],78:[function(require,module,exports){ | |
(function (process,global){ | |
/** | |
* Copyright (c) 2014, Facebook, Inc. | |
* All rights reserved. | |
* | |
* This source code is licensed under the BSD-style license found in the | |
* https://raw.github.com/facebook/regenerator/master/LICENSE file. An | |
* additional grant of patent rights can be found in the PATENTS file in | |
* the same directory. | |
*/ | |
"use strict"; | |
var _Symbol = require("babel-runtime/core-js/symbol")["default"]; | |
var _Symbol$iterator = require("babel-runtime/core-js/symbol/iterator")["default"]; | |
var _Object$create = require("babel-runtime/core-js/object/create")["default"]; | |
var _Promise = require("babel-runtime/core-js/promise")["default"]; | |
!(function (global) { | |
"use strict"; | |
var hasOwn = Object.prototype.hasOwnProperty; | |
var undefined; // More compressible than void 0. | |
var iteratorSymbol = typeof _Symbol === "function" && _Symbol$iterator || "@@iterator"; | |
var inModule = typeof module === "object"; | |
var runtime = global.regeneratorRuntime; | |
if (runtime) { | |
if (inModule) { | |
// If regeneratorRuntime is defined globally and we're in a module, | |
// make the exports object identical to regeneratorRuntime. | |
module.exports = runtime; | |
} | |
// Don't bother evaluating the rest of this file if the runtime was | |
// already defined globally. | |
return; | |
} | |
// Define the runtime globally (as expected by generated code) as either | |
// module.exports (if we're in a module) or a new, empty object. | |
runtime = global.regeneratorRuntime = inModule ? module.exports : {}; | |
function wrap(innerFn, outerFn, self, tryLocsList) { | |
// If outerFn provided, then outerFn.prototype instanceof Generator. | |
var generator = _Object$create((outerFn || Generator).prototype); | |
generator._invoke = makeInvokeMethod(innerFn, self || null, new Context(tryLocsList || [])); | |
return generator; | |
} | |
runtime.wrap = wrap; | |
// Try/catch helper to minimize deoptimizations. Returns a completion | |
// record like context.tryEntries[i].completion. This interface could | |
// have been (and was previously) designed to take a closure to be | |
// invoked without arguments, but in all the cases we care about we | |
// already have an existing method we want to call, so there's no need | |
// to create a new function object. We can even get away with assuming | |
// the method takes exactly one argument, since that happens to be true | |
// in every case, so we don't have to touch the arguments object. The | |
// only additional allocation required is the completion record, which | |
// has a stable shape and so hopefully should be cheap to allocate. | |
function tryCatch(fn, obj, arg) { | |
try { | |
return { type: "normal", arg: fn.call(obj, arg) }; | |
} catch (err) { | |
return { type: "throw", arg: err }; | |
} | |
} | |
var GenStateSuspendedStart = "suspendedStart"; | |
var GenStateSuspendedYield = "suspendedYield"; | |
var GenStateExecuting = "executing"; | |
var GenStateCompleted = "completed"; | |
// Returning this object from the innerFn has the same effect as | |
// breaking out of the dispatch switch statement. | |
var ContinueSentinel = {}; | |
// Dummy constructor functions that we use as the .constructor and | |
// .constructor.prototype properties for functions that return Generator | |
// objects. For full spec compliance, you may wish to configure your | |
// minifier not to mangle the names of these two functions. | |
function Generator() {} | |
function GeneratorFunction() {} | |
function GeneratorFunctionPrototype() {} | |
var Gp = GeneratorFunctionPrototype.prototype = Generator.prototype; | |
GeneratorFunction.prototype = Gp.constructor = GeneratorFunctionPrototype; | |
GeneratorFunctionPrototype.constructor = GeneratorFunction; | |
GeneratorFunction.displayName = "GeneratorFunction"; | |
// Helper for defining the .next, .throw, and .return methods of the | |
// Iterator interface in terms of a single ._invoke method. | |
function defineIteratorMethods(prototype) { | |
["next", "throw", "return"].forEach(function (method) { | |
prototype[method] = function (arg) { | |
return this._invoke(method, arg); | |
}; | |
}); | |
} | |
runtime.isGeneratorFunction = function (genFun) { | |
var ctor = typeof genFun === "function" && genFun.constructor; | |
return ctor ? ctor === GeneratorFunction || | |
// For the native GeneratorFunction constructor, the best we can | |
// do is to check its .name property. | |
(ctor.displayName || ctor.name) === "GeneratorFunction" : false; | |
}; | |
runtime.mark = function (genFun) { | |
genFun.__proto__ = GeneratorFunctionPrototype; | |
genFun.prototype = _Object$create(Gp); | |
return genFun; | |
}; | |
// Within the body of any async function, `await x` is transformed to | |
// `yield regeneratorRuntime.awrap(x)`, so that the runtime can test | |
// `value instanceof AwaitArgument` to determine if the yielded value is | |
// meant to be awaited. Some may consider the name of this method too | |
// cutesy, but they are curmudgeons. | |
runtime.awrap = function (arg) { | |
return new AwaitArgument(arg); | |
}; | |
function AwaitArgument(arg) { | |
this.arg = arg; | |
} | |
function AsyncIterator(generator) { | |
// This invoke function is written in a style that assumes some | |
// calling function (or Promise) will handle exceptions. | |
function invoke(method, arg) { | |
var result = generator[method](arg); | |
var value = result.value; | |
return value instanceof AwaitArgument ? _Promise.resolve(value.arg).then(invokeNext, invokeThrow) : _Promise.resolve(value).then(function (unwrapped) { | |
// When a yielded Promise is resolved, its final value becomes | |
// the .value of the Promise<{value,done}> result for the | |
// current iteration. If the Promise is rejected, however, the | |
// result for this iteration will be rejected with the same | |
// reason. Note that rejections of yielded Promises are not | |
// thrown back into the generator function, as is the case | |
// when an awaited Promise is rejected. This difference in | |
// behavior between yield and await is important, because it | |
// allows the consumer to decide what to do with the yielded | |
// rejection (swallow it and continue, manually .throw it back | |
// into the generator, abandon iteration, whatever). With | |
// await, by contrast, there is no opportunity to examine the | |
// rejection reason outside the generator function, so the | |
// only option is to throw it from the await expression, and | |
// let the generator function handle the exception. | |
result.value = unwrapped; | |
return result; | |
}); | |
} | |
if (typeof process === "object" && process.domain) { | |
invoke = process.domain.bind(invoke); | |
} | |
var invokeNext = invoke.bind(generator, "next"); | |
var invokeThrow = invoke.bind(generator, "throw"); | |
var invokeReturn = invoke.bind(generator, "return"); | |
var previousPromise; | |
function enqueue(method, arg) { | |
var enqueueResult = | |
// If enqueue has been called before, then we want to wait until | |
// all previous Promises have been resolved before calling invoke, | |
// so that results are always delivered in the correct order. If | |
// enqueue has not been called before, then it is important to | |
// call invoke immediately, without waiting on a callback to fire, | |
// so that the async generator function has the opportunity to do | |
// any necessary setup in a predictable way. This predictability | |
// is why the Promise constructor synchronously invokes its | |
// executor callback, and why async functions synchronously | |
// execute code before the first await. Since we implement simple | |
// async functions in terms of async generators, it is especially | |
// important to get this right, even though it requires care. | |
previousPromise ? previousPromise.then(function () { | |
return invoke(method, arg); | |
}) : new _Promise(function (resolve) { | |
resolve(invoke(method, arg)); | |
}); | |
// Avoid propagating enqueueResult failures to Promises returned by | |
// later invocations of the iterator. | |
previousPromise = enqueueResult["catch"](function (ignored) {}); | |
return enqueueResult; | |
} | |
// Define the unified helper method that is used to implement .next, | |
// .throw, and .return (see defineIteratorMethods). | |
this._invoke = enqueue; | |
} | |
defineIteratorMethods(AsyncIterator.prototype); | |
// Note that simple async functions are implemented on top of | |
// AsyncIterator objects; they just return a Promise for the value of | |
// the final result produced by the iterator. | |
runtime.async = function (innerFn, outerFn, self, tryLocsList) { | |
var iter = new AsyncIterator(wrap(innerFn, outerFn, self, tryLocsList)); | |
return runtime.isGeneratorFunction(outerFn) ? iter // If outerFn is a generator, return the full iterator. | |
: iter.next().then(function (result) { | |
return result.done ? result.value : iter.next(); | |
}); | |
}; | |
function makeInvokeMethod(innerFn, self, context) { | |
var state = GenStateSuspendedStart; | |
return function invoke(method, arg) { | |
if (state === GenStateExecuting) { | |
throw new Error("Generator is already running"); | |
} | |
if (state === GenStateCompleted) { | |
if (method === "throw") { | |
throw arg; | |
} | |
// Be forgiving, per 25.3.3.3.3 of the spec: | |
// https://people.mozilla.org/~jorendorff/es6-draft.html#sec-generatorresume | |
return doneResult(); | |
} | |
while (true) { | |
var delegate = context.delegate; | |
if (delegate) { | |
if (method === "return" || method === "throw" && delegate.iterator[method] === undefined) { | |
// A return or throw (when the delegate iterator has no throw | |
// method) always terminates the yield* loop. | |
context.delegate = null; | |
// If the delegate iterator has a return method, give it a | |
// chance to clean up. | |
var returnMethod = delegate.iterator["return"]; | |
if (returnMethod) { | |
var record = tryCatch(returnMethod, delegate.iterator, arg); | |
if (record.type === "throw") { | |
// If the return method threw an exception, let that | |
// exception prevail over the original return or throw. | |
method = "throw"; | |
arg = record.arg; | |
continue; | |
} | |
} | |
if (method === "return") { | |
// Continue with the outer return, now that the delegate | |
// iterator has been terminated. | |
continue; | |
} | |
} | |
var record = tryCatch(delegate.iterator[method], delegate.iterator, arg); | |
if (record.type === "throw") { | |
context.delegate = null; | |
// Like returning generator.throw(uncaught), but without the | |
// overhead of an extra function call. | |
method = "throw"; | |
arg = record.arg; | |
continue; | |
} | |
// Delegate generator ran and handled its own exceptions so | |
// regardless of what the method was, we continue as if it is | |
// "next" with an undefined arg. | |
method = "next"; | |
arg = undefined; | |
var info = record.arg; | |
if (info.done) { | |
context[delegate.resultName] = info.value; | |
context.next = delegate.nextLoc; | |
} else { | |
state = GenStateSuspendedYield; | |
return info; | |
} | |
context.delegate = null; | |
} | |
if (method === "next") { | |
if (state === GenStateSuspendedYield) { | |
context.sent = arg; | |
} else { | |
context.sent = undefined; | |
} | |
} else if (method === "throw") { | |
if (state === GenStateSuspendedStart) { | |
state = GenStateCompleted; | |
throw arg; | |
} | |
if (context.dispatchException(arg)) { | |
// If the dispatched exception was caught by a catch block, | |
// then let that catch block handle the exception normally. | |
method = "next"; | |
arg = undefined; | |
} | |
} else if (method === "return") { | |
context.abrupt("return", arg); | |
} | |
state = GenStateExecuting; | |
var record = tryCatch(innerFn, self, context); | |
if (record.type === "normal") { | |
// If an exception is thrown from innerFn, we leave state === | |
// GenStateExecuting and loop back for another invocation. | |
state = context.done ? GenStateCompleted : GenStateSuspendedYield; | |
var info = { | |
value: record.arg, | |
done: context.done | |
}; | |
if (record.arg === ContinueSentinel) { | |
if (context.delegate && method === "next") { | |
// Deliberately forget the last sent value so that we don't | |
// accidentally pass it on to the delegate. | |
arg = undefined; | |
} | |
} else { | |
return info; | |
} | |
} else if (record.type === "throw") { | |
state = GenStateCompleted; | |
// Dispatch the exception by looping back around to the | |
// context.dispatchException(arg) call above. | |
method = "throw"; | |
arg = record.arg; | |
} | |
} | |
}; | |
} | |
// Define Generator.prototype.{next,throw,return} in terms of the | |
// unified ._invoke helper method. | |
defineIteratorMethods(Gp); | |
Gp[iteratorSymbol] = function () { | |
return this; | |
}; | |
Gp.toString = function () { | |
return "[object Generator]"; | |
}; | |
function pushTryEntry(locs) { | |
var entry = { tryLoc: locs[0] }; | |
if (1 in locs) { | |
entry.catchLoc = locs[1]; | |
} | |
if (2 in locs) { | |
entry.finallyLoc = locs[2]; | |
entry.afterLoc = locs[3]; | |
} | |
this.tryEntries.push(entry); | |
} | |
function resetTryEntry(entry) { | |
var record = entry.completion || {}; | |
record.type = "normal"; | |
delete record.arg; | |
entry.completion = record; | |
} | |
function Context(tryLocsList) { | |
// The root entry object (effectively a try statement without a catch | |
// or a finally block) gives us a place to store values thrown from | |
// locations where there is no enclosing try statement. | |
this.tryEntries = [{ tryLoc: "root" }]; | |
tryLocsList.forEach(pushTryEntry, this); | |
this.reset(true); | |
} | |
runtime.keys = function (object) { | |
var keys = []; | |
for (var key in object) { | |
keys.push(key); | |
} | |
keys.reverse(); | |
// Rather than returning an object with a next method, we keep | |
// things simple and return the next function itself. | |
return function next() { | |
while (keys.length) { | |
var key = keys.pop(); | |
if (key in object) { | |
next.value = key; | |
next.done = false; | |
return next; | |
} | |
} | |
// To avoid creating an additional object, we just hang the .value | |
// and .done properties off the next function object itself. This | |
// also ensures that the minifier will not anonymize the function. | |
next.done = true; | |
return next; | |
}; | |
}; | |
function values(iterable) { | |
if (iterable) { | |
var iteratorMethod = iterable[iteratorSymbol]; | |
if (iteratorMethod) { | |
return iteratorMethod.call(iterable); | |
} | |
if (typeof iterable.next === "function") { | |
return iterable; | |
} | |
if (!isNaN(iterable.length)) { | |
var i = -1, | |
next = function next() { | |
while (++i < iterable.length) { | |
if (hasOwn.call(iterable, i)) { | |
next.value = iterable[i]; | |
next.done = false; | |
return next; | |
} | |
} | |
next.value = undefined; | |
next.done = true; | |
return next; | |
}; | |
return next.next = next; | |
} | |
} | |
// Return an iterator with no values. | |
return { next: doneResult }; | |
} | |
runtime.values = values; | |
function doneResult() { | |
return { value: undefined, done: true }; | |
} | |
Context.prototype = { | |
constructor: Context, | |
reset: function reset(skipTempReset) { | |
this.prev = 0; | |
this.next = 0; | |
this.sent = undefined; | |
this.done = false; | |
this.delegate = null; | |
this.tryEntries.forEach(resetTryEntry); | |
if (!skipTempReset) { | |
for (var name in this) { | |
// Not sure about the optimal order of these conditions: | |
if (name.charAt(0) === "t" && hasOwn.call(this, name) && !isNaN(+name.slice(1))) { | |
this[name] = undefined; | |
} | |
} | |
} | |
}, | |
stop: function stop() { | |
this.done = true; | |
var rootEntry = this.tryEntries[0]; | |
var rootRecord = rootEntry.completion; | |
if (rootRecord.type === "throw") { | |
throw rootRecord.arg; | |
} | |
return this.rval; | |
}, | |
dispatchException: function dispatchException(exception) { | |
if (this.done) { | |
throw exception; | |
} | |
var context = this; | |
function handle(loc, caught) { | |
record.type = "throw"; | |
record.arg = exception; | |
context.next = loc; | |
return !!caught; | |
} | |
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | |
var entry = this.tryEntries[i]; | |
var record = entry.completion; | |
if (entry.tryLoc === "root") { | |
// Exception thrown outside of any try block that could handle | |
// it, so set the completion value of the entire function to | |
// throw the exception. | |
return handle("end"); | |
} | |
if (entry.tryLoc <= this.prev) { | |
var hasCatch = hasOwn.call(entry, "catchLoc"); | |
var hasFinally = hasOwn.call(entry, "finallyLoc"); | |
if (hasCatch && hasFinally) { | |
if (this.prev < entry.catchLoc) { | |
return handle(entry.catchLoc, true); | |
} else if (this.prev < entry.finallyLoc) { | |
return handle(entry.finallyLoc); | |
} | |
} else if (hasCatch) { | |
if (this.prev < entry.catchLoc) { | |
return handle(entry.catchLoc, true); | |
} | |
} else if (hasFinally) { | |
if (this.prev < entry.finallyLoc) { | |
return handle(entry.finallyLoc); | |
} | |
} else { | |
throw new Error("try statement without catch or finally"); | |
} | |
} | |
} | |
}, | |
abrupt: function abrupt(type, arg) { | |
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | |
var entry = this.tryEntries[i]; | |
if (entry.tryLoc <= this.prev && hasOwn.call(entry, "finallyLoc") && this.prev < entry.finallyLoc) { | |
var finallyEntry = entry; | |
break; | |
} | |
} | |
if (finallyEntry && (type === "break" || type === "continue") && finallyEntry.tryLoc <= arg && arg <= finallyEntry.finallyLoc) { | |
// Ignore the finally entry if control is not jumping to a | |
// location outside the try/catch block. | |
finallyEntry = null; | |
} | |
var record = finallyEntry ? finallyEntry.completion : {}; | |
record.type = type; | |
record.arg = arg; | |
if (finallyEntry) { | |
this.next = finallyEntry.finallyLoc; | |
} else { | |
this.complete(record); | |
} | |
return ContinueSentinel; | |
}, | |
complete: function complete(record, afterLoc) { | |
if (record.type === "throw") { | |
throw record.arg; | |
} | |
if (record.type === "break" || record.type === "continue") { | |
this.next = record.arg; | |
} else if (record.type === "return") { | |
this.rval = record.arg; | |
this.next = "end"; | |
} else if (record.type === "normal" && afterLoc) { | |
this.next = afterLoc; | |
} | |
}, | |
finish: function finish(finallyLoc) { | |
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | |
var entry = this.tryEntries[i]; | |
if (entry.finallyLoc === finallyLoc) { | |
this.complete(entry.completion, entry.afterLoc); | |
resetTryEntry(entry); | |
return ContinueSentinel; | |
} | |
} | |
}, | |
"catch": function _catch(tryLoc) { | |
for (var i = this.tryEntries.length - 1; i >= 0; --i) { | |
var entry = this.tryEntries[i]; | |
if (entry.tryLoc === tryLoc) { | |
var record = entry.completion; | |
if (record.type === "throw") { | |
var thrown = record.arg; | |
resetTryEntry(entry); | |
} | |
return thrown; | |
} | |
} | |
// The context.catch method must only be called with a location | |
// argument that corresponds to a known catch block. | |
throw new Error("illegal catch attempt"); | |
}, | |
delegateYield: function delegateYield(iterable, resultName, nextLoc) { | |
this.delegate = { | |
iterator: values(iterable), | |
resultName: resultName, | |
nextLoc: nextLoc | |
}; | |
return ContinueSentinel; | |
} | |
}; | |
})( | |
// Among the various tricks for obtaining a reference to the global | |
// object, this seems to be the most reliable technique that does not | |
// use indirect eval (which violates Content Security Policy). | |
typeof global === "object" ? global : typeof window === "object" ? window : typeof self === "object" ? self : undefined); | |
}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"_process":81,"babel-runtime/core-js/object/create":3,"babel-runtime/core-js/promise":5,"babel-runtime/core-js/symbol":6,"babel-runtime/core-js/symbol/iterator":7}],79:[function(require,module,exports){ | |
arguments[4][72][0].apply(exports,arguments) | |
},{"dup":72}],80:[function(require,module,exports){ | |
// Copyright Joyent, Inc. and other Node contributors. | |
// | |
// Permission is hereby granted, free of charge, to any person obtaining a | |
// copy of this software and associated documentation files (the | |
// "Software"), to deal in the Software without restriction, including | |
// without limitation the rights to use, copy, modify, merge, publish, | |
// distribute, sublicense, and/or sell copies of the Software, and to permit | |
// persons to whom the Software is furnished to do so, subject to the | |
// following conditions: | |
// | |
// The above copyright notice and this permission notice shall be included | |
// in all copies or substantial portions of the Software. | |
// | |
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF | |
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN | |
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | |
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | |
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE | |
// USE OR OTHER DEALINGS IN THE SOFTWARE. | |
function EventEmitter() { | |
this._events = this._events || {}; | |
this._maxListeners = this._maxListeners || undefined; | |
} | |
module.exports = EventEmitter; | |
// Backwards-compat with node 0.10.x | |
EventEmitter.EventEmitter = EventEmitter; | |
EventEmitter.prototype._events = undefined; | |
EventEmitter.prototype._maxListeners = undefined; | |
// By default EventEmitters will print a warning if more than 10 listeners are | |
// added to it. This is a useful default which helps finding memory leaks. | |
EventEmitter.defaultMaxListeners = 10; | |
// Obviously not all Emitters should be limited to 10. This function allows | |
// that to be increased. Set to zero for unlimited. | |
EventEmitter.prototype.setMaxListeners = function(n) { | |
if (!isNumber(n) || n < 0 || isNaN(n)) | |
throw TypeError('n must be a positive number'); | |
this._maxListeners = n; | |
return this; | |
}; | |
EventEmitter.prototype.emit = function(type) { | |
var er, handler, len, args, i, listeners; | |
if (!this._events) | |
this._events = {}; | |
// If there is no 'error' event listener then throw. | |
if (type === 'error') { | |
if (!this._events.error || | |
(isObject(this._events.error) && !this._events.error.length)) { | |
er = arguments[1]; | |
if (er instanceof Error) { | |
throw er; // Unhandled 'error' event | |
} | |
throw TypeError('Uncaught, unspecified "error" event.'); | |
} | |
} | |
handler = this._events[type]; | |
if (isUndefined(handler)) | |
return false; | |
if (isFunction(handler)) { | |
switch (arguments.length) { | |
// fast cases | |
case 1: | |
handler.call(this); | |
break; | |
case 2: | |
handler.call(this, arguments[1]); | |
break; | |
case 3: | |
handler.call(this, arguments[1], arguments[2]); | |
break; | |
// slower | |
default: | |
len = arguments.length; | |
args = new Array(len - 1); | |
for (i = 1; i < len; i++) | |
args[i - 1] = arguments[i]; | |
handler.apply(this, args); | |
} | |
} else if (isObject(handler)) { | |
len = arguments.length; | |
args = new Array(len - 1); | |
for (i = 1; i < len; i++) | |
args[i - 1] = arguments[i]; | |
listeners = handler.slice(); | |
len = listeners.length; | |
for (i = 0; i < len; i++) | |
listeners[i].apply(this, args); | |
} | |
return true; | |
}; | |
EventEmitter.prototype.addListener = function(type, listener) { | |
var m; | |
if (!isFunction(listener)) | |
throw TypeError('listener must be a function'); | |
if (!this._events) | |
this._events = {}; | |
// To avoid recursion in the case that type === "newListener"! Before | |
// adding it to the listeners, first emit "newListener". | |
if (this._events.newListener) | |
this.emit('newListener', type, | |
isFunction(listener.listener) ? | |
listener.listener : listener); | |
if (!this._events[type]) | |
// Optimize the case of one listener. Don't need the extra array object. | |
this._events[type] = listener; | |
else if (isObject(this._events[type])) | |
// If we've already got an array, just append. | |
this._events[type].push(listener); | |
else | |
// Adding the second element, need to change to array. | |
this._events[type] = [this._events[type], listener]; | |
// Check for listener leak | |
if (isObject(this._events[type]) && !this._events[type].warned) { | |
var m; | |
if (!isUndefined(this._maxListeners)) { | |
m = this._maxListeners; | |
} else { | |
m = EventEmitter.defaultMaxListeners; | |
} | |
if (m && m > 0 && this._events[type].length > m) { | |
this._events[type].warned = true; | |
console.error('(node) warning: possible EventEmitter memory ' + | |
'leak detected. %d listeners added. ' + | |
'Use emitter.setMaxListeners() to increase limit.', | |
this._events[type].length); | |
if (typeof console.trace === 'function') { | |
// not supported in IE 10 | |
console.trace(); | |
} | |
} | |
} | |
return this; | |
}; | |
EventEmitter.prototype.on = EventEmitter.prototype.addListener; | |
EventEmitter.prototype.once = function(type, listener) { | |
if (!isFunction(listener)) | |
throw TypeError('listener must be a function'); | |
var fired = false; | |
function g() { | |
this.removeListener(type, g); | |
if (!fired) { | |
fired = true; | |
listener.apply(this, arguments); | |
} | |
} | |
g.listener = listener; | |
this.on(type, g); | |
return this; | |
}; | |
// emits a 'removeListener' event iff the listener was removed | |
EventEmitter.prototype.removeListener = function(type, listener) { | |
var list, position, length, i; | |
if (!isFunction(listener)) | |
throw TypeError('listener must be a function'); | |
if (!this._events || !this._events[type]) | |
return this; | |
list = this._events[type]; | |
length = list.length; | |
position = -1; | |
if (list === listener || | |
(isFunction(list.listener) && list.listener === listener)) { | |
delete this._events[type]; | |
if (this._events.removeListener) | |
this.emit('removeListener', type, listener); | |
} else if (isObject(list)) { | |
for (i = length; i-- > 0;) { | |
if (list[i] === listener || | |
(list[i].listener && list[i].listener === listener)) { | |
position = i; | |
break; | |
} | |
} | |
if (position < 0) | |
return this; | |
if (list.length === 1) { | |
list.length = 0; | |
delete this._events[type]; | |
} else { | |
list.splice(position, 1); | |
} | |
if (this._events.removeListener) | |
this.emit('removeListener', type, listener); | |
} | |
return this; | |
}; | |
EventEmitter.prototype.removeAllListeners = function(type) { | |
var key, listeners; | |
if (!this._events) | |
return this; | |
// not listening for removeListener, no need to emit | |
if (!this._events.removeListener) { | |
if (arguments.length === 0) | |
this._events = {}; | |
else if (this._events[type]) | |
delete this._events[type]; | |
return this; | |
} | |
// emit removeListener for all listeners on all events | |
if (arguments.length === 0) { | |
for (key in this._events) { | |
if (key === 'removeListener') continue; | |
this.removeAllListeners(key); | |
} | |
this.removeAllListeners('removeListener'); | |
this._events = {}; | |
return this; | |
} | |
listeners = this._events[type]; | |
if (isFunction(listeners)) { | |
this.removeListener(type, listeners); | |
} else { | |
// LIFO order | |
while (listeners.length) | |
this.removeListener(type, listeners[listeners.length - 1]); | |
} | |
delete this._events[type]; | |
return this; | |
}; | |
EventEmitter.prototype.listeners = function(type) { | |
var ret; | |
if (!this._events || !this._events[type]) | |
ret = []; | |
else if (isFunction(this._events[type])) | |
ret = [this._events[type]]; | |
else | |
ret = this._events[type].slice(); | |
return ret; | |
}; | |
EventEmitter.listenerCount = function(emitter, type) { | |
var ret; | |
if (!emitter._events || !emitter._events[type]) | |
ret = 0; | |
else if (isFunction(emitter._events[type])) | |
ret = 1; | |
else | |
ret = emitter._events[type].length; | |
return ret; | |
}; | |
function isFunction(arg) { | |
return typeof arg === 'function'; | |
} | |
function isNumber(arg) { | |
return typeof arg === 'number'; | |
} | |
function isObject(arg) { | |
return typeof arg === 'object' && arg !== null; | |
} | |
function isUndefined(arg) { | |
return arg === void 0; | |
} | |
},{}],81:[function(require,module,exports){ | |
// shim for using process in browser | |
var process = module.exports = {}; | |
var queue = []; | |
var draining = false; | |
var currentQueue; | |
var queueIndex = -1; | |
function cleanUpNextTick() { | |
draining = false; | |
if (currentQueue.length) { | |
queue = currentQueue.concat(queue); | |
} else { | |
queueIndex = -1; | |
} | |
if (queue.length) { | |
drainQueue(); | |
} | |
} | |
function drainQueue() { | |
if (draining) { | |
return; | |
} | |
var timeout = setTimeout(cleanUpNextTick); | |
draining = true; | |
var len = queue.length; | |
while(len) { | |
currentQueue = queue; | |
queue = []; | |
while (++queueIndex < len) { | |
currentQueue[queueIndex].run(); | |
} | |
queueIndex = -1; | |
len = queue.length; | |
} | |
currentQueue = null; | |
draining = false; | |
clearTimeout(timeout); | |
} | |
process.nextTick = function (fun) { | |
var args = new Array(arguments.length - 1); | |
if (arguments.length > 1) { | |
for (var i = 1; i < arguments.length; i++) { | |
args[i - 1] = arguments[i]; | |
} | |
} | |
queue.push(new Item(fun, args)); | |
if (queue.length === 1 && !draining) { | |
setTimeout(drainQueue, 0); | |
} | |
}; | |
// v8 likes predictible objects | |
function Item(fun, array) { | |
this.fun = fun; | |
this.array = array; | |
} | |
Item.prototype.run = function () { | |
this.fun.apply(null, this.array); | |
}; | |
process.title = 'browser'; | |
process.browser = true; | |
process.env = {}; | |
process.argv = []; | |
process.version = ''; // empty string to avoid regexp issues | |
process.versions = {}; | |
function noop() {} | |
process.on = noop; | |
process.addListener = noop; | |
process.once = noop; | |
process.off = noop; | |
process.removeListener = noop; | |
process.removeAllListeners = noop; | |
process.emit = noop; | |
process.binding = function (name) { | |
throw new Error('process.binding is not supported'); | |
}; | |
// TODO(shtylman) | |
process.cwd = function () { return '/' }; | |
process.chdir = function (dir) { | |
throw new Error('process.chdir is not supported'); | |
}; | |
process.umask = function() { return 0; }; | |
},{}],82:[function(require,module,exports){ | |
/** | |
* Copyright (c) 2014-2015, Facebook, Inc. | |
* All rights reserved. | |
* | |
* This source code is licensed under the BSD-style license found in the | |
* LICENSE file in the root directory of this source tree. An additional grant | |
* of patent rights can be found in the PATENTS file in the same directory. | |
*/ | |
(function (global, factory) { | |
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : | |
typeof define === 'function' && define.amd ? define(factory) : | |
global.Immutable = factory() | |
}(this, function () { 'use strict';var SLICE$0 = Array.prototype.slice; | |
function createClass(ctor, superClass) { | |
if (superClass) { | |
ctor.prototype = Object.create(superClass.prototype); | |
} | |
ctor.prototype.constructor = ctor; | |
} | |
// Used for setting prototype methods that IE8 chokes on. | |
var DELETE = 'delete'; | |
// Constants describing the size of trie nodes. | |
var SHIFT = 5; // Resulted in best performance after ______? | |
var SIZE = 1 << SHIFT; | |
var MASK = SIZE - 1; | |
// A consistent shared value representing "not set" which equals nothing other | |
// than itself, and nothing that could be provided externally. | |
var NOT_SET = {}; | |
// Boolean references, Rough equivalent of `bool &`. | |
var CHANGE_LENGTH = { value: false }; | |
var DID_ALTER = { value: false }; | |
function MakeRef(ref) { | |
ref.value = false; | |
return ref; | |
} | |
function SetRef(ref) { | |
ref && (ref.value = true); | |
} | |
// A function which returns a value representing an "owner" for transient writes | |
// to tries. The return value will only ever equal itself, and will not equal | |
// the return of any subsequent call of this function. | |
function OwnerID() {} | |
// http://jsperf.com/copy-array-inline | |
function arrCopy(arr, offset) { | |
offset = offset || 0; | |
var len = Math.max(0, arr.length - offset); | |
var newArr = new Array(len); | |
for (var ii = 0; ii < len; ii++) { | |
newArr[ii] = arr[ii + offset]; | |
} | |
return newArr; | |
} | |
function ensureSize(iter) { | |
if (iter.size === undefined) { | |
iter.size = iter.__iterate(returnTrue); | |
} | |
return iter.size; | |
} | |
function wrapIndex(iter, index) { | |
return index >= 0 ? (+index) : ensureSize(iter) + (+index); | |
} | |
function returnTrue() { | |
return true; | |
} | |
function wholeSlice(begin, end, size) { | |
return (begin === 0 || (size !== undefined && begin <= -size)) && | |
(end === undefined || (size !== undefined && end >= size)); | |
} | |
function resolveBegin(begin, size) { | |
return resolveIndex(begin, size, 0); | |
} | |
function resolveEnd(end, size) { | |
return resolveIndex(end, size, size); | |
} | |
function resolveIndex(index, size, defaultIndex) { | |
return index === undefined ? | |
defaultIndex : | |
index < 0 ? | |
Math.max(0, size + index) : | |
size === undefined ? | |
index : | |
Math.min(size, index); | |
} | |
function Iterable(value) { | |
return isIterable(value) ? value : Seq(value); | |
} | |
createClass(KeyedIterable, Iterable); | |
function KeyedIterable(value) { | |
return isKeyed(value) ? value : KeyedSeq(value); | |
} | |
createClass(IndexedIterable, Iterable); | |
function IndexedIterable(value) { | |
return isIndexed(value) ? value : IndexedSeq(value); | |
} | |
createClass(SetIterable, Iterable); | |
function SetIterable(value) { | |
return isIterable(value) && !isAssociative(value) ? value : SetSeq(value); | |
} | |
function isIterable(maybeIterable) { | |
return !!(maybeIterable && maybeIterable[IS_ITERABLE_SENTINEL]); | |
} | |
function isKeyed(maybeKeyed) { | |
return !!(maybeKeyed && maybeKeyed[IS_KEYED_SENTINEL]); | |
} | |
function isIndexed(maybeIndexed) { | |
return !!(maybeIndexed && maybeIndexed[IS_INDEXED_SENTINEL]); | |
} | |
function isAssociative(maybeAssociative) { | |
return isKeyed(maybeAssociative) || isIndexed(maybeAssociative); | |
} | |
function isOrdered(maybeOrdered) { | |
return !!(maybeOrdered && maybeOrdered[IS_ORDERED_SENTINEL]); | |
} | |
Iterable.isIterable = isIterable; | |
Iterable.isKeyed = isKeyed; | |
Iterable.isIndexed = isIndexed; | |
Iterable.isAssociative = isAssociative; | |
Iterable.isOrdered = isOrdered; | |
Iterable.Keyed = KeyedIterable; | |
Iterable.Indexed = IndexedIterable; | |
Iterable.Set = SetIterable; | |
var IS_ITERABLE_SENTINEL = '@@__IMMUTABLE_ITERABLE__@@'; | |
var IS_KEYED_SENTINEL = '@@__IMMUTABLE_KEYED__@@'; | |
var IS_INDEXED_SENTINEL = '@@__IMMUTABLE_INDEXED__@@'; | |
var IS_ORDERED_SENTINEL = '@@__IMMUTABLE_ORDERED__@@'; | |
/* global Symbol */ | |
var ITERATE_KEYS = 0; | |
var ITERATE_VALUES = 1; | |
var ITERATE_ENTRIES = 2; | |
var REAL_ITERATOR_SYMBOL = typeof Symbol === 'function' && Symbol.iterator; | |
var FAUX_ITERATOR_SYMBOL = '@@iterator'; | |
var ITERATOR_SYMBOL = REAL_ITERATOR_SYMBOL || FAUX_ITERATOR_SYMBOL; | |
function src_Iterator__Iterator(next) { | |
this.next = next; | |
} | |
src_Iterator__Iterator.prototype.toString = function() { | |
return '[Iterator]'; | |
}; | |
src_Iterator__Iterator.KEYS = ITERATE_KEYS; | |
src_Iterator__Iterator.VALUES = ITERATE_VALUES; | |
src_Iterator__Iterator.ENTRIES = ITERATE_ENTRIES; | |
src_Iterator__Iterator.prototype.inspect = | |
src_Iterator__Iterator.prototype.toSource = function () { return this.toString(); } | |
src_Iterator__Iterator.prototype[ITERATOR_SYMBOL] = function () { | |
return this; | |
}; | |
function iteratorValue(type, k, v, iteratorResult) { | |
var value = type === 0 ? k : type === 1 ? v : [k, v]; | |
iteratorResult ? (iteratorResult.value = value) : (iteratorResult = { | |
value: value, done: false | |
}); | |
return iteratorResult; | |
} | |
function iteratorDone() { | |
return { value: undefined, done: true }; | |
} | |
function hasIterator(maybeIterable) { | |
return !!getIteratorFn(maybeIterable); | |
} | |
function isIterator(maybeIterator) { | |
return maybeIterator && typeof maybeIterator.next === 'function'; | |
} | |
function getIterator(iterable) { | |
var iteratorFn = getIteratorFn(iterable); | |
return iteratorFn && iteratorFn.call(iterable); | |
} | |
function getIteratorFn(iterable) { | |
var iteratorFn = iterable && ( | |
(REAL_ITERATOR_SYMBOL && iterable[REAL_ITERATOR_SYMBOL]) || | |
iterable[FAUX_ITERATOR_SYMBOL] | |
); | |
if (typeof iteratorFn === 'function') { | |
return iteratorFn; | |
} | |
} | |
function isArrayLike(value) { | |
return value && typeof value.length === 'number'; | |
} | |
createClass(Seq, Iterable); | |
function Seq(value) { | |
return value === null || value === undefined ? emptySequence() : | |
isIterable(value) ? value.toSeq() : seqFromValue(value); | |
} | |
Seq.of = function(/*...values*/) { | |
return Seq(arguments); | |
}; | |
Seq.prototype.toSeq = function() { | |
return this; | |
}; | |
Seq.prototype.toString = function() { | |
return this.__toString('Seq {', '}'); | |
}; | |
Seq.prototype.cacheResult = function() { | |
if (!this._cache && this.__iterateUncached) { | |
this._cache = this.entrySeq().toArray(); | |
this.size = this._cache.length; | |
} | |
return this; | |
}; | |
// abstract __iterateUncached(fn, reverse) | |
Seq.prototype.__iterate = function(fn, reverse) { | |
return seqIterate(this, fn, reverse, true); | |
}; | |
// abstract __iteratorUncached(type, reverse) | |
Seq.prototype.__iterator = function(type, reverse) { | |
return seqIterator(this, type, reverse, true); | |
}; | |
createClass(KeyedSeq, Seq); | |
function KeyedSeq(value) { | |
return value === null || value === undefined ? | |
emptySequence().toKeyedSeq() : | |
isIterable(value) ? | |
(isKeyed(value) ? value.toSeq() : value.fromEntrySeq()) : | |
keyedSeqFromValue(value); | |
} | |
KeyedSeq.prototype.toKeyedSeq = function() { | |
return this; | |
}; | |
createClass(IndexedSeq, Seq); | |
function IndexedSeq(value) { | |
return value === null || value === undefined ? emptySequence() : | |
!isIterable(value) ? indexedSeqFromValue(value) : | |
isKeyed(value) ? value.entrySeq() : value.toIndexedSeq(); | |
} | |
IndexedSeq.of = function(/*...values*/) { | |
return IndexedSeq(arguments); | |
}; | |
IndexedSeq.prototype.toIndexedSeq = function() { | |
return this; | |
}; | |
IndexedSeq.prototype.toString = function() { | |
return this.__toString('Seq [', ']'); | |
}; | |
IndexedSeq.prototype.__iterate = function(fn, reverse) { | |
return seqIterate(this, fn, reverse, false); | |
}; | |
IndexedSeq.prototype.__iterator = function(type, reverse) { | |
return seqIterator(this, type, reverse, false); | |
}; | |
createClass(SetSeq, Seq); | |
function SetSeq(value) { | |
return ( | |
value === null || value === undefined ? emptySequence() : | |
!isIterable(value) ? indexedSeqFromValue(value) : | |
isKeyed(value) ? value.entrySeq() : value | |
).toSetSeq(); | |
} | |
SetSeq.of = function(/*...values*/) { | |
return SetSeq(arguments); | |
}; | |
SetSeq.prototype.toSetSeq = function() { | |
return this; | |
}; | |
Seq.isSeq = isSeq; | |
Seq.Keyed = KeyedSeq; | |
Seq.Set = SetSeq; | |
Seq.Indexed = IndexedSeq; | |
var IS_SEQ_SENTINEL = '@@__IMMUTABLE_SEQ__@@'; | |
Seq.prototype[IS_SEQ_SENTINEL] = true; | |
// #pragma Root Sequences | |
createClass(ArraySeq, IndexedSeq); | |
function ArraySeq(array) { | |
this._array = array; | |
this.size = array.length; | |
} | |
ArraySeq.prototype.get = function(index, notSetValue) { | |
return this.has(index) ? this._array[wrapIndex(this, index)] : notSetValue; | |
}; | |
ArraySeq.prototype.__iterate = function(fn, reverse) { | |
var array = this._array; | |
var maxIndex = array.length - 1; | |
for (var ii = 0; ii <= maxIndex; ii++) { | |
if (fn(array[reverse ? maxIndex - ii : ii], ii, this) === false) { | |
return ii + 1; | |
} | |
} | |
return ii; | |
}; | |
ArraySeq.prototype.__iterator = function(type, reverse) { | |
var array = this._array; | |
var maxIndex = array.length - 1; | |
var ii = 0; | |
return new src_Iterator__Iterator(function() | |
{return ii > maxIndex ? | |
iteratorDone() : | |
iteratorValue(type, ii, array[reverse ? maxIndex - ii++ : ii++])} | |
); | |
}; | |
createClass(ObjectSeq, KeyedSeq); | |
function ObjectSeq(object) { | |
var keys = Object.keys(object); | |
this._object = object; | |
this._keys = keys; | |
this.size = keys.length; | |
} | |
ObjectSeq.prototype.get = function(key, notSetValue) { | |
if (notSetValue !== undefined && !this.has(key)) { | |
return notSetValue; | |
} | |
return this._object[key]; | |
}; | |
ObjectSeq.prototype.has = function(key) { | |
return this._object.hasOwnProperty(key); | |
}; | |
ObjectSeq.prototype.__iterate = function(fn, reverse) { | |
var object = this._object; | |
var keys = this._keys; | |
var maxIndex = keys.length - 1; | |
for (var ii = 0; ii <= maxIndex; ii++) { | |
var key = keys[reverse ? maxIndex - ii : ii]; | |
if (fn(object[key], key, this) === false) { | |
return ii + 1; | |
} | |
} | |
return ii; | |
}; | |
ObjectSeq.prototype.__iterator = function(type, reverse) { | |
var object = this._object; | |
var keys = this._keys; | |
var maxIndex = keys.length - 1; | |
var ii = 0; | |
return new src_Iterator__Iterator(function() { | |
var key = keys[reverse ? maxIndex - ii : ii]; | |
return ii++ > maxIndex ? | |
iteratorDone() : | |
iteratorValue(type, key, object[key]); | |
}); | |
}; | |
ObjectSeq.prototype[IS_ORDERED_SENTINEL] = true; | |
createClass(IterableSeq, IndexedSeq); | |
function IterableSeq(iterable) { | |
this._iterable = iterable; | |
this.size = iterable.length || iterable.size; | |
} | |
IterableSeq.prototype.__iterateUncached = function(fn, reverse) { | |
if (reverse) { | |
return this.cacheResult().__iterate(fn, reverse); | |
} | |
var iterable = this._iterable; | |
var iterator = getIterator(iterable); | |
var iterations = 0; | |
if (isIterator(iterator)) { | |
var step; | |
while (!(step = iterator.next()).done) { | |
if (fn(step.value, iterations++, this) === false) { | |
break; | |
} | |
} | |
} | |
return iterations; | |
}; | |
IterableSeq.prototype.__iteratorUncached = function(type, reverse) { | |
if (reverse) { | |
return this.cacheResult().__iterator(type, reverse); | |
} | |
var iterable = this._iterable; | |
var iterator = getIterator(iterable); | |
if (!isIterator(iterator)) { | |
return new src_Iterator__Iterator(iteratorDone); | |
} | |
var iterations = 0; | |
return new src_Iterator__Iterator(function() { | |
var step = iterator.next(); | |
return step.done ? step : iteratorValue(type, iterations++, step.value); | |
}); | |
}; | |
createClass(IteratorSeq, IndexedSeq); | |
function IteratorSeq(iterator) { | |
this._iterator = iterator; | |
this._iteratorCache = []; | |
} | |
IteratorSeq.prototype.__iterateUncached = function(fn, reverse) { | |
if (reverse) { | |
return this.cacheResult().__iterate(fn, reverse); | |
} | |
var iterator = this._iterator; | |
var cache = this._iteratorCache; | |
var iterations = 0; | |
while (iterations < cache.length) { | |
if (fn(cache[iterations], iterations++, this) === false) { | |
return iterations; | |
} | |
} | |
var step; | |
while (!(step = iterator.next()).done) { | |
var val = step.value; | |
cache[iterations] = val; | |
if (fn(val, iterations++, this) === false) { | |
break; | |
} | |
} | |
return iterations; | |
}; | |
IteratorSeq.prototype.__iteratorUncached = function(type, reverse) { | |
if (reverse) { | |
return this.cacheResult().__iterator(type, reverse); | |
} | |
var iterator = this._iterator; | |
var cache = this._iteratorCache; | |
var iterations = 0; | |
return new src_Iterator__Iterator(function() { | |
if (iterations >= cache.length) { | |
var step = iterator.next(); | |
if (step.done) { | |
return step; | |
} | |
cache[iterations] = step.value; | |
} | |
return iteratorValue(type, iterations, cache[iterations++]); | |
}); | |
}; | |
// # pragma Helper functions | |
function isSeq(maybeSeq) { | |
return !!(maybeSeq && maybeSeq[IS_SEQ_SENTINEL]); | |
} | |
var EMPTY_SEQ; | |
function emptySequence() { | |
return EMPTY_SEQ || (EMPTY_SEQ = new ArraySeq([])); | |
} | |
function keyedSeqFromValue(value) { | |
var seq = | |
Array.isArray(value) ? new ArraySeq(value).fromEntrySeq() : | |
isIterator(value) ? new IteratorSeq(value).fromEntrySeq() : | |
hasIterator(value) ? new IterableSeq(value).fromEntrySeq() : | |
typeof value === 'object' ? new ObjectSeq(value) : | |
undefined; | |
if (!seq) { | |
throw new TypeError( | |
'Expected Array or iterable object of [k, v] entries, '+ | |
'or keyed object: ' + value | |
); | |
} | |
return seq; | |
} | |
function indexedSeqFromValue(value) { | |
var seq = maybeIndexedSeqFromValue(value); | |
if (!seq) { | |
throw new TypeError( | |
'Expected Array or iterable object of values: ' + value | |
); | |
} | |
return seq; | |
} | |
function seqFromValue(value) { | |
var seq = maybeIndexedSeqFromValue(value) || | |
(typeof value === 'object' && new ObjectSeq(value)); | |
if (!seq) { | |
throw new TypeError( | |
'Expected Array or iterable object of values, or keyed object: ' + value | |
); | |
} | |
return seq; | |
} | |
function maybeIndexedSeqFromValue(value) { | |
return ( | |
isArrayLike(value) ? new ArraySeq(value) : | |
isIterator(value) ? new IteratorSeq(value) : | |
hasIterator(value) ? new IterableSeq(value) : | |
undefined | |
); | |
} | |
function seqIterate(seq, fn, reverse, useKeys) { | |
var cache = seq._cache; | |
if (cache) { | |
var maxIndex = cache.length - 1; | |
for (var ii = 0; ii <= maxIndex; ii++) { | |
var entry = cache[reverse ? maxIndex - ii : ii]; | |
if (fn(entry[1], useKeys ? entry[0] : ii, seq) === false) { | |
return ii + 1; | |
} | |
} | |
return ii; | |
} | |
return seq.__iterateUncached(fn, reverse); | |
} | |
function seqIterator(seq, type, reverse, useKeys) { | |
var cache = seq._cache; | |
if (cache) { | |
var maxIndex = cache.length - 1; | |
var ii = 0; | |
return new src_Iterator__Iterator(function() { | |
var entry = cache[reverse ? maxIndex - ii : ii]; | |
return ii++ > maxIndex ? | |
iteratorDone() : | |
iteratorValue(type, useKeys ? entry[0] : ii - 1, entry[1]); | |
}); | |
} | |
return seq.__iteratorUncached(type, reverse); | |
} | |
createClass(Collection, Iterable); | |
function Collection() { | |
throw TypeError('Abstract'); | |
} | |
createClass(KeyedCollection, Collection);function KeyedCollection() {} | |
createClass(IndexedCollection, Collection);function IndexedCollection() {} | |
createClass(SetCollection, Collection);function SetCollection() {} | |
Collection.Keyed = KeyedCollection; | |
Collection.Indexed = IndexedCollection; | |
Collection.Set = SetCollection; | |
/** | |
* An extension of the "same-value" algorithm as [described for use by ES6 Map | |
* and Set](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map#Key_equality) | |
* | |
* NaN is considered the same as NaN, however -0 and 0 are considered the same | |
* value, which is different from the algorithm described by | |
* [`Object.is`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/is). | |
* | |
* This is extended further to allow Objects to describe the values they | |
* represent, by way of `valueOf` or `equals` (and `hashCode`). | |
* | |
* Note: because of this extension, the key equality of Immutable.Map and the | |
* value equality of Immutable.Set will differ from ES6 Map and Set. | |
* | |
* ### Defining custom values | |
* | |
* The easiest way to describe the value an object represents is by implementing | |
* `valueOf`. For example, `Date` represents a value by returning a unix | |
* timestamp for `valueOf`: | |
* | |
* var date1 = new Date(1234567890000); // Fri Feb 13 2009 ... | |
* var date2 = new Date(1234567890000); | |
* date1.valueOf(); // 1234567890000 | |
* assert( date1 !== date2 ); | |
* assert( Immutable.is( date1, date2 ) ); | |
* | |
* Note: overriding `valueOf` may have other implications if you use this object | |
* where JavaScript expects a primitive, such as implicit string coercion. | |
* | |
* For more complex types, especially collections, implementing `valueOf` may | |
* not be performant. An alternative is to implement `equals` and `hashCode`. | |
* | |
* `equals` takes another object, presumably of similar type, and returns true | |
* if the it is equal. Equality is symmetrical, so the same result should be | |
* returned if this and the argument are flipped. | |
* | |
* assert( a.equals(b) === b.equals(a) ); | |
* | |
* `hashCode` returns a 32bit integer number representing the object which will | |
* be used to determine how to store the value object in a Map or Set. You must | |
* provide both or neither methods, one must not exist without the other. | |
* | |
* Also, an important relationship between these methods must be upheld: if two | |
* values are equal, they *must* return the same hashCode. If the values are not | |
* equal, they might have the same hashCode; this is called a hash collision, | |
* and while undesirable for performance reasons, it is acceptable. | |
* | |
* if (a.equals(b)) { | |
* assert( a.hashCode() === b.hashCode() ); | |
* } | |
* | |
* All Immutable collections implement `equals` and `hashCode`. | |
* | |
*/ | |
function is(valueA, valueB) { | |
if (valueA === valueB || (valueA !== valueA && valueB !== valueB)) { | |
return true; | |
} | |
if (!valueA || !valueB) { | |
return false; | |
} | |
if (typeof valueA.valueOf === 'function' && | |
typeof valueB.valueOf === 'function') { | |
valueA = valueA.valueOf(); | |
valueB = valueB.valueOf(); | |
if (valueA === valueB || (valueA !== valueA && valueB !== valueB)) { | |
return true; | |
} | |
if (!valueA || !valueB) { | |
return false; | |
} | |
} | |
if (typeof valueA.equals === 'function' && | |
typeof valueB.equals === 'function' && | |
valueA.equals(valueB)) { | |
return true; | |
} | |
return false; | |
} | |
function fromJS(json, converter) { | |
return converter ? | |
fromJSWith(converter, json, '', {'': json}) : | |
fromJSDefault(json); | |
} | |
function fromJSWith(converter, json, key, parentJSON) { | |
if (Array.isArray(json)) { | |
return converter.call(parentJSON, key, IndexedSeq(json).map(function(v, k) {return fromJSWith(converter, v, k, json)})); | |
} | |
if (isPlainObj(json)) { | |
return converter.call(parentJSON, key, KeyedSeq(json).map(function(v, k) {return fromJSWith(converter, v, k, json)})); | |
} | |
return json; | |
} | |
function fromJSDefault(json) { | |
if (Array.isArray(json)) { | |
return IndexedSeq(json).map(fromJSDefault).toList(); | |
} | |
if (isPlainObj(json)) { | |
return KeyedSeq(json).map(fromJSDefault).toMap(); | |
} | |
return json; | |
} | |
function isPlainObj(value) { | |
return value && (value.constructor === Object || value.constructor === undefined); | |
} | |
var src_Math__imul = | |
typeof Math.imul === 'function' && Math.imul(0xffffffff, 2) === -2 ? | |
Math.imul : | |
function imul(a, b) { | |
a = a | 0; // int | |
b = b | 0; // int | |
var c = a & 0xffff; | |
var d = b & 0xffff; | |
// Shift by 0 fixes the sign on the high part. | |
return (c * d) + ((((a >>> 16) * d + c * (b >>> 16)) << 16) >>> 0) | 0; // int | |
}; | |
// v8 has an optimization for storing 31-bit signed numbers. | |
// Values which have either 00 or 11 as the high order bits qualify. | |
// This function drops the highest order bit in a signed number, maintaining | |
// the sign bit. | |
function smi(i32) { | |
return ((i32 >>> 1) & 0x40000000) | (i32 & 0xBFFFFFFF); | |
} | |
function hash(o) { | |
if (o === false || o === null || o === undefined) { | |
return 0; | |
} | |
if (typeof o.valueOf === 'function') { | |
o = o.valueOf(); | |
if (o === false || o === null || o === undefined) { | |
return 0; | |
} | |
} | |
if (o === true) { | |
return 1; | |
} | |
var type = typeof o; | |
if (type === 'number') { | |
var h = o | 0; | |
if (h !== o) { | |
h ^= o * 0xFFFFFFFF; | |
} | |
while (o > 0xFFFFFFFF) { | |
o /= 0xFFFFFFFF; | |
h ^= o; | |
} | |
return smi(h); | |
} | |
if (type === 'string') { | |
return o.length > STRING_HASH_CACHE_MIN_STRLEN ? cachedHashString(o) : hashString(o); | |
} | |
if (typeof o.hashCode === 'function') { | |
return o.hashCode(); | |
} | |
return hashJSObj(o); | |
} | |
function cachedHashString(string) { | |
var hash = stringHashCache[string]; | |
if (hash === undefined) { | |
hash = hashString(string); | |
if (STRING_HASH_CACHE_SIZE === STRING_HASH_CACHE_MAX_SIZE) { | |
STRING_HASH_CACHE_SIZE = 0; | |
stringHashCache = {}; | |
} | |
STRING_HASH_CACHE_SIZE++; | |
stringHashCache[string] = hash; | |
} | |
return hash; | |
} | |
// http://jsperf.com/hashing-strings | |
function hashString(string) { | |
// This is the hash from JVM | |
// The hash code for a string is computed as | |
// s[0] * 31 ^ (n - 1) + s[1] * 31 ^ (n - 2) + ... + s[n - 1], | |
// where s[i] is the ith character of the string and n is the length of | |
// the string. We "mod" the result to make it between 0 (inclusive) and 2^31 | |
// (exclusive) by dropping high bits. | |
var hash = 0; | |
for (var ii = 0; ii < string.length; ii++) { | |
hash = 31 * hash + string.charCodeAt(ii) | 0; | |
} | |
return smi(hash); | |
} | |
function hashJSObj(obj) { | |
var hash; | |
if (usingWeakMap) { | |
hash = weakMap.get(obj); | |
if (hash !== undefined) { | |
return hash; | |
} | |
} | |
hash = obj[UID_HASH_KEY]; | |
if (hash !== undefined) { | |
return hash; | |
} | |
if (!canDefineProperty) { | |
hash = obj.propertyIsEnumerable && obj.propertyIsEnumerable[UID_HASH_KEY]; | |
if (hash !== undefined) { | |
return hash; | |
} | |
hash = getIENodeHash(obj); | |
if (hash !== undefined) { | |
return hash; | |
} | |
} | |
hash = ++objHashUID; | |
if (objHashUID & 0x40000000) { | |
objHashUID = 0; | |
} | |
if (usingWeakMap) { | |
weakMap.set(obj, hash); | |
} else if (isExtensible !== undefined && isExtensible(obj) === false) { | |
throw new Error('Non-extensible objects are not allowed as keys.'); | |
} else if (canDefineProperty) { | |
Object.defineProperty(obj, UID_HASH_KEY, { | |
'enumerable': false, | |
'configurable': false, | |
'writable': false, | |
'value': hash | |
}); | |
} else if (obj.propertyIsEnumerable !== undefined && | |
obj.propertyIsEnumerable === obj.constructor.prototype.propertyIsEnumerable) { | |
// Since we can't define a non-enumerable property on the object | |
// we'll hijack one of the less-used non-enumerable properties to | |
// save our hash on it. Since this is a function it will not show up in | |
// `JSON.stringify` which is what we want. | |
obj.propertyIsEnumerable = function() { | |
return this.constructor.prototype.propertyIsEnumerable.apply(this, arguments); | |
}; | |
obj.propertyIsEnumerable[UID_HASH_KEY] = hash; | |
} else if (obj.nodeType !== undefined) { | |
// At this point we couldn't get the IE `uniqueID` to use as a hash | |
// and we couldn't use a non-enumerable property to exploit the | |
// dontEnum bug so we simply add the `UID_HASH_KEY` on the node | |
// itself. | |
obj[UID_HASH_KEY] = hash; | |
} else { | |
throw new Error('Unable to set a non-enumerable property on object.'); | |
} | |
return hash; | |
} | |
// Get references to ES5 object methods. | |
var isExtensible = Object.isExtensible; | |
// True if Object.defineProperty works as expected. IE8 fails this test. | |
var canDefineProperty = (function() { | |
try { | |
Object.defineProperty({}, '@', {}); | |
return true; | |
} catch (e) { | |
return false; | |
} | |
}()); | |
// IE has a `uniqueID` property on DOM nodes. We can construct the hash from it | |
// and avoid memory leaks from the IE cloneNode bug. | |
function getIENodeHash(node) { | |
if (node && node.nodeType > 0) { | |
switch (node.nodeType) { | |
case 1: // Element | |
return node.uniqueID; | |
case 9: // Document | |
return node.documentElement && node.documentElement.uniqueID; | |
} | |
} | |
} | |
// If possible, use a WeakMap. | |
var usingWeakMap = typeof WeakMap === 'function'; | |
var weakMap; | |
if (usingWeakMap) { | |
weakMap = new WeakMap(); | |
} | |
var objHashUID = 0; | |
var UID_HASH_KEY = '__immutablehash__'; | |
if (typeof Symbol === 'function') { | |
UID_HASH_KEY = Symbol(UID_HASH_KEY); | |
} | |
var STRING_HASH_CACHE_MIN_STRLEN = 16; | |
var STRING_HASH_CACHE_MAX_SIZE = 255; | |
var STRING_HASH_CACHE_SIZE = 0; | |
var stringHashCache = {}; | |
function invariant(condition, error) { | |
if (!condition) throw new Error(error); | |
} | |
function assertNotInfinite(size) { | |
invariant( | |
size !== Infinity, | |
'Cannot perform this action with an infinite size.' | |
); | |
} | |
createClass(ToKeyedSequence, KeyedSeq); | |
function ToKeyedSequence(indexed, useKeys) { | |
this._iter = indexed; | |
this._useKeys = useKeys; | |
this.size = indexed.size; | |
} | |
ToKeyedSequence.prototype.get = function(key, notSetValue) { | |
return this._iter.get(key, notSetValue); | |
}; | |
ToKeyedSequence.prototype.has = function(key) { | |
return this._iter.has(key); | |
}; | |
ToKeyedSequence.prototype.valueSeq = function() { | |
return this._iter.valueSeq(); | |
}; | |
ToKeyedSequence.prototype.reverse = function() {var this$0 = this; | |
var reversedSequence = reverseFactory(this, true); | |
if (!this._useKeys) { | |
reversedSequence.valueSeq = function() {return this$0._iter.toSeq().reverse()}; | |
} | |
return reversedSequence; | |
}; | |
ToKeyedSequence.prototype.map = function(mapper, context) {var this$0 = this; | |
var mappedSequence = mapFactory(this, mapper, context); | |
if (!this._useKeys) { | |
mappedSequence.valueSeq = function() {return this$0._iter.toSeq().map(mapper, context)}; | |
} | |
return mappedSequence; | |
}; | |
ToKeyedSequence.prototype.__iterate = function(fn, reverse) {var this$0 = this; | |
var ii; | |
return this._iter.__iterate( | |
this._useKeys ? | |
function(v, k) {return fn(v, k, this$0)} : | |
((ii = reverse ? resolveSize(this) : 0), | |
function(v ) {return fn(v, reverse ? --ii : ii++, this$0)}), | |
reverse | |
); | |
}; | |
ToKeyedSequence.prototype.__iterator = function(type, reverse) { | |
if (this._useKeys) { | |
return this._iter.__iterator(type, reverse); | |
} | |
var iterator = this._iter.__iterator(ITERATE_VALUES, reverse); | |
var ii = reverse ? resolveSize(this) : 0; | |
return new src_Iterator__Iterator(function() { | |
var step = iterator.next(); | |
return step.done ? step : | |
iteratorValue(type, reverse ? --ii : ii++, step.value, step); | |
}); | |
}; | |
ToKeyedSequence.prototype[IS_ORDERED_SENTINEL] = true; | |
createClass(ToIndexedSequence, IndexedSeq); | |
function ToIndexedSequence(iter) { | |
this._iter = iter; | |
this.size = iter.size; | |
} | |
ToIndexedSequence.prototype.contains = function(value) { | |
return this._iter.contains(value); | |
}; | |
ToIndexedSequence.prototype.__iterate = function(fn, reverse) {var this$0 = this; | |
var iterations = 0; | |
return this._iter.__iterate(function(v ) {return fn(v, iterations++, this$0)}, reverse); | |
}; | |
ToIndexedSequence.prototype.__iterator = function(type, reverse) { | |
var iterator = this._iter.__iterator(ITERATE_VALUES, reverse); | |
var iterations = 0; | |
return new src_Iterator__Iterator(function() { | |
var step = iterator.next(); | |
return step.done ? step : | |
iteratorValue(type, iterations++, step.value, step) | |
}); | |
}; | |
createClass(ToSetSequence, SetSeq); | |
function ToSetSequence(iter) { | |
this._iter = iter; | |
this.size = iter.size; | |
} | |
ToSetSequence.prototype.has = function(key) { | |
return this._iter.contains(key); | |
}; | |
ToSetSequence.prototype.__iterate = function(fn, reverse) {var this$0 = this; | |
return this._iter.__iterate(function(v ) {return fn(v, v, this$0)}, reverse); | |
}; | |
ToSetSequence.prototype.__iterator = function(type, reverse) { | |
var iterator = this._iter.__iterator(ITERATE_VALUES, reverse); | |
return new src_Iterator__Iterator(function() { | |
var step = iterator.next(); | |
return step.done ? step : | |
iteratorValue(type, step.value, step.value, step); | |
}); | |
}; | |
createClass(FromEntriesSequence, KeyedSeq); | |
function FromEntriesSequence(entries) { | |
this._iter = entries; | |
this.size = entries.size; | |
} | |
FromEntriesSequence.prototype.entrySeq = function() { | |
return this._iter.toSeq(); | |
}; | |
FromEntriesSequence.prototype.__iterate = function(fn, reverse) {var this$0 = this; | |
return this._iter.__iterate(function(entry ) { | |
// Check if entry exists first so array access doesn't throw for holes | |
// in the parent iteration. | |
if (entry) { | |
validateEntry(entry); | |
var indexedIterable = isIterable(entry); | |
return fn( | |
indexedIterable ? entry.get(1) : entry[1], | |
indexedIterable ? entry.get(0) : entry[0], | |
this$0 | |
); | |
} | |
}, reverse); | |
}; | |
FromEntriesSequence.prototype.__iterator = function(type, reverse) { | |
var iterator = this._iter.__iterator(ITERATE_VALUES, reverse); | |
return new src_Iterator__Iterator(function() { | |
while (true) { | |
var step = iterator.next(); | |
if (step.done) { | |
return step; | |
} | |
var entry = step.value; | |
// Check if entry exists first so array access doesn't throw for holes | |
// in the parent iteration. | |
if (entry) { | |
validateEntry(entry); | |
var indexedIterable = isIterable(entry); | |
return iteratorValue( | |
type, | |
indexedIterable ? entry.get(0) : entry[0], | |
indexedIterable ? entry.get(1) : entry[1], | |
step | |
); | |
} | |
} | |
}); | |
}; | |
ToIndexedSequence.prototype.cacheResult = | |
ToKeyedSequence.prototype.cacheResult = | |
ToSetSequence.prototype.cacheResult = | |
FromEntriesSequence.prototype.cacheResult = | |
cacheResultThrough; | |
function flipFactory(iterable) { | |
var flipSequence = makeSequence(iterable); | |
flipSequence._iter = iterable; | |
flipSequence.size = iterable.size; | |
flipSequence.flip = function() {return iterable}; | |
flipSequence.reverse = function () { | |
var reversedSequence = iterable.reverse.apply(this); // super.reverse() | |
reversedSequence.flip = function() {return iterable.reverse()}; | |
return reversedSequence; | |
}; | |
flipSequence.has = function(key ) {return iterable.contains(key)}; | |
flipSequence.contains = function(key ) {return iterable.has(key)}; | |
flipSequence.cacheResult = cacheResultThrough; | |
flipSequence.__iterateUncached = function (fn, reverse) {var this$0 = this; | |
return iterable.__iterate(function(v, k) {return fn(k, v, this$0) !== false}, reverse); | |
} | |
flipSequence.__iteratorUncached = function(type, reverse) { | |
if (type === ITERATE_ENTRIES) { | |
var iterator = iterable.__iterator(type, reverse); | |
return new src_Iterator__Iterator(function() { | |
var step = iterator.next(); | |
if (!step.done) { | |
var k = step.value[0]; | |
step.value[0] = step.value[1]; | |
step.value[1] = k; | |
} | |
return step; | |
}); | |
} | |
return iterable.__iterator( | |
type === ITERATE_VALUES ? ITERATE_KEYS : ITERATE_VALUES, | |
reverse | |
); | |
} | |
return flipSequence; | |
} | |
function mapFactory(iterable, mapper, context) { | |
var mappedSequence = makeSequence(iterable); | |
mappedSequence.size = iterable.size; | |
mappedSequence.has = function(key ) {return iterable.has(key)}; | |
mappedSequence.get = function(key, notSetValue) { | |
var v = iterable.get(key, NOT_SET); | |
return v === NOT_SET ? | |
notSetValue : | |
mapper.call(context, v, key, iterable); | |
}; | |
mappedSequence.__iterateUncached = function (fn, reverse) {var this$0 = this; | |
return iterable.__iterate( | |
function(v, k, c) {return fn(mapper.call(context, v, k, c), k, this$0) !== false}, | |
reverse | |
); | |
} | |
mappedSequence.__iteratorUncached = function (type, reverse) { | |
var iterator = iterable.__iterator(ITERATE_ENTRIES, reverse); | |
return new src_Iterator__Iterator(function() { | |
var step = iterator.next(); | |
if (step.done) { | |
return step; | |
} | |
var entry = step.value; | |
var key = entry[0]; | |
return iteratorValue( | |
type, | |
key, | |
mapper.call(context, entry[1], key, iterable), | |
step | |
); | |
}); | |
} | |
return mappedSequence; | |
} | |
function reverseFactory(iterable, useKeys) { | |
var reversedSequence = makeSequence(iterable); | |
reversedSequence._iter = iterable; | |
reversedSequence.size = iterable.size; | |
reversedSequence.reverse = function() {return iterable}; | |
if (iterable.flip) { | |
reversedSequence.flip = function () { | |
var flipSequence = flipFactory(iterable); | |
flipSequence.reverse = function() {return iterable.flip()}; | |
return flipSequence; | |
}; | |
} | |
reversedSequence.get = function(key, notSetValue) | |
{return iterable.get(useKeys ? key : -1 - key, notSetValue)}; | |
reversedSequence.has = function(key ) | |
{return iterable.has(useKeys ? key : -1 - key)}; | |
reversedSequence.contains = function(value ) {return iterable.contains(value)}; | |
reversedSequence.cacheResult = cacheResultThrough; | |
reversedSequence.__iterate = function (fn, reverse) {var this$0 = this; | |
return iterable.__iterate(function(v, k) {return fn(v, k, this$0)}, !reverse); | |
}; | |
reversedSequence.__iterator = | |
function(type, reverse) {return iterable.__iterator(type, !reverse)}; | |
return reversedSequence; | |
} | |
function filterFactory(iterable, predicate, context, useKeys) { | |
var filterSequence = makeSequence(iterable); | |
if (useKeys) { | |
filterSequence.has = function(key ) { | |
var v = iterable.get(key, NOT_SET); | |
return v !== NOT_SET && !!predicate.call(context, v, key, iterable); | |
}; | |
filterSequence.get = function(key, notSetValue) { | |
var v = iterable.get(key, NOT_SET); | |
return v !== NOT_SET && predicate.call(context, v, key, iterable) ? | |
v : notSetValue; | |
}; | |
} | |
filterSequence.__iterateUncached = function (fn, reverse) {var this$0 = this; | |
var iterations = 0; | |
iterable.__iterate(function(v, k, c) { | |
if (predicate.call(context, v, k, c)) { | |
iterations++; | |
return fn(v, useKeys ? k : iterations - 1, this$0); | |
} | |
}, reverse); | |
return iterations; | |
}; | |
filterSequence.__iteratorUncached = function (type, reverse) { | |
var iterator = iterable.__iterator(ITERATE_ENTRIES, reverse); | |
var iterations = 0; | |
return new src_Iterator__Iterator(function() { | |
while (true) { | |
var step = iterator.next(); | |
if (step.done) { | |
return step; | |
} | |
var entry = step.value; | |
var key = entry[0]; | |
var value = entry[1]; | |
if (predicate.call(context, value, key, iterable)) { | |
return iteratorValue(type, useKeys ? key : iterations++, value, step); | |
} | |
} | |
}); | |
} | |
return filterSequence; | |
} | |
function countByFactory(iterable, grouper, context) { | |
var groups = src_Map__Map().asMutable(); | |
iterable.__iterate(function(v, k) { | |
groups.update( | |
grouper.call(context, v, k, iterable), | |
0, | |
function(a ) {return a + 1} | |
); | |
}); | |
return groups.asImmutable(); | |
} | |
function groupByFactory(iterable, grouper, context) { | |
var isKeyedIter = isKeyed(iterable); | |
var groups = (isOrdered(iterable) ? OrderedMap() : src_Map__Map()).asMutable(); | |
iterable.__iterate(function(v, k) { | |
groups.update( | |
grouper.call(context, v, k, iterable), | |
function(a ) {return (a = a || [], a.push(isKeyedIter ? [k, v] : v), a)} | |
); | |
}); | |
var coerce = iterableClass(iterable); | |
return groups.map(function(arr ) {return reify(iterable, coerce(arr))}); | |
} | |
function sliceFactory(iterable, begin, end, useKeys) { | |
var originalSize = iterable.size; | |
if (wholeSlice(begin, end, originalSize)) { | |
return iterable; | |
} | |
var resolvedBegin = resolveBegin(begin, originalSize); | |
var resolvedEnd = resolveEnd(end, originalSize); | |
// begin or end will be NaN if they were provided as negative numbers and | |
// this iterable's size is unknown. In that case, cache first so there is | |
// a known size. | |
if (resolvedBegin !== resolvedBegin || resolvedEnd !== resolvedEnd) { | |
return sliceFactory(iterable.toSeq().cacheResult(), begin, end, useKeys); | |
} | |
var sliceSize = resolvedEnd - resolvedBegin; | |
if (sliceSize < 0) { | |
sliceSize = 0; | |
} | |
var sliceSeq = makeSequence(iterable); | |
sliceSeq.size = sliceSize === 0 ? sliceSize : iterable.size && sliceSize || undefined; | |
if (!useKeys && isSeq(iterable) && sliceSize >= 0) { | |
sliceSeq.get = function (index, notSetValue) { | |
index = wrapIndex(this, index); | |
return index >= 0 && index < sliceSize ? | |
iterable.get(index + resolvedBegin, notSetValue) : | |
notSetValue; | |
} | |
} | |
sliceSeq.__iterateUncached = function(fn, reverse) {var this$0 = this; | |
if (sliceSize === 0) { | |
return 0; | |
} | |
if (reverse) { | |
return this.cacheResult().__iterate(fn, reverse); | |
} | |
var skipped = 0; | |
var isSkipping = true; | |
var iterations = 0; | |
iterable.__iterate(function(v, k) { | |
if (!(isSkipping && (isSkipping = skipped++ < resolvedBegin))) { | |
iterations++; | |
return fn(v, useKeys ? k : iterations - 1, this$0) !== false && | |
iterations !== sliceSize; | |
} | |
}); | |
return iterations; | |
}; | |
sliceSeq.__iteratorUncached = function(type, reverse) { | |
if (sliceSize && reverse) { | |
return this.cacheResult().__iterator(type, reverse); | |
} | |
// Don't bother instantiating parent iterator if taking 0. | |
var iterator = sliceSize && iterable.__iterator(type, reverse); | |
var skipped = 0; | |
var iterations = 0; | |
return new src_Iterator__Iterator(function() { | |
while (skipped++ < resolvedBegin) { | |
iterator.next(); | |
} | |
if (++iterations > sliceSize) { | |
return iteratorDone(); | |
} | |
var step = iterator.next(); | |
if (useKeys || type === ITERATE_VALUES) { | |
return step; | |
} else if (type === ITERATE_KEYS) { | |
return iteratorValue(type, iterations - 1, undefined, step); | |
} else { | |
return iteratorValue(type, iterations - 1, step.value[1], step); | |
} | |
}); | |
} | |
return sliceSeq; | |
} | |
function takeWhileFactory(iterable, predicate, context) { | |
var takeSequence = makeSequence(iterable); | |
takeSequence.__iterateUncached = function(fn, reverse) {var this$0 = this; | |
if (reverse) { | |
return this.cacheResult().__iterate(fn, reverse); | |
} | |
var iterations = 0; | |
iterable.__iterate(function(v, k, c) | |
{return predicate.call(context, v, k, c) && ++iterations && fn(v, k, this$0)} | |
); | |
return iterations; | |
}; | |
takeSequence.__iteratorUncached = function(type, reverse) {var this$0 = this; | |
if (reverse) { | |
return this.cacheResult().__iterator(type, reverse); | |
} | |
var iterator = iterable.__iterator(ITERATE_ENTRIES, reverse); | |
var iterating = true; | |
return new src_Iterator__Iterator(function() { | |
if (!iterating) { | |
return iteratorDone(); | |
} | |
var step = iterator.next(); | |
if (step.done) { | |
return step; | |
} | |
var entry = step.value; | |
var k = entry[0]; | |
var v = entry[1]; | |
if (!predicate.call(context, v, k, this$0)) { | |
iterating = false; | |
return iteratorDone(); | |
} | |
return type === ITERATE_ENTRIES ? step : | |
iteratorValue(type, k, v, step); | |
}); | |
}; | |
return takeSequence; | |
} | |
function skipWhileFactory(iterable, predicate, context, useKeys) { | |
var skipSequence = makeSequence(iterable); | |
skipSequence.__iterateUncached = function (fn, reverse) {var this$0 = this; | |
if (reverse) { | |
return this.cacheResult().__iterate(fn, reverse); | |
} | |
var isSkipping = true; | |
var iterations = 0; | |
iterable.__iterate(function(v, k, c) { | |
if (!(isSkipping && (isSkipping = predicate.call(context, v, k, c)))) { | |
iterations++; | |
return fn(v, useKeys ? k : iterations - 1, this$0); | |
} | |
}); | |
return iterations; | |
}; | |
skipSequence.__iteratorUncached = function(type, reverse) {var this$0 = this; | |
if (reverse) { | |
return this.cacheResult().__iterator(type, reverse); | |
} | |
var iterator = iterable.__iterator(ITERATE_ENTRIES, reverse); | |
var skipping = true; | |
var iterations = 0; | |
return new src_Iterator__Iterator(function() { | |
var step, k, v; | |
do { | |
step = iterator.next(); | |
if (step.done) { | |
if (useKeys || type === ITERATE_VALUES) { | |
return step; | |
} else if (type === ITERATE_KEYS) { | |
return iteratorValue(type, iterations++, undefined, step); | |
} else { | |
return iteratorValue(type, iterations++, step.value[1], step); | |
} | |
} | |
var entry = step.value; | |
k = entry[0]; | |
v = entry[1]; | |
skipping && (skipping = predicate.call(context, v, k, this$0)); | |
} while (skipping); | |
return type === ITERATE_ENTRIES ? step : | |
iteratorValue(type, k, v, step); | |
}); | |
}; | |
return skipSequence; | |
} | |
function concatFactory(iterable, values) { | |
var isKeyedIterable = isKeyed(iterable); | |
var iters = [iterable].concat(values).map(function(v ) { | |
if (!isIterable(v)) { | |
v = isKeyedIterable ? | |
keyedSeqFromValue(v) : | |
indexedSeqFromValue(Array.isArray(v) ? v : [v]); | |
} else if (isKeyedIterable) { | |
v = KeyedIterable(v); | |
} | |
return v; | |
}).filter(function(v ) {return v.size !== 0}); | |
if (iters.length === 0) { | |
return iterable; | |
} | |
if (iters.length === 1) { | |
var singleton = iters[0]; | |
if (singleton === iterable || | |
isKeyedIterable && isKeyed(singleton) || | |
isIndexed(iterable) && isIndexed(singleton)) { | |
return singleton; | |
} | |
} | |
var concatSeq = new ArraySeq(iters); | |
if (isKeyedIterable) { | |
concatSeq = concatSeq.toKeyedSeq(); | |
} else if (!isIndexed(iterable)) { | |
concatSeq = concatSeq.toSetSeq(); | |
} | |
concatSeq = concatSeq.flatten(true); | |
concatSeq.size = iters.reduce( | |
function(sum, seq) { | |
if (sum !== undefined) { | |
var size = seq.size; | |
if (size !== undefined) { | |
return sum + size; | |
} | |
} | |
}, | |
0 | |
); | |
return concatSeq; | |
} | |
function flattenFactory(iterable, depth, useKeys) { | |
var flatSequence = makeSequence(iterable); | |
flatSequence.__iterateUncached = function(fn, reverse) { | |
var iterations = 0; | |
var stopped = false; | |
function flatDeep(iter, currentDepth) {var this$0 = this; | |
iter.__iterate(function(v, k) { | |
if ((!depth || currentDepth < depth) && isIterable(v)) { | |
flatDeep(v, currentDepth + 1); | |
} else if (fn(v, useKeys ? k : iterations++, this$0) === false) { | |
stopped = true; | |
} | |
return !stopped; | |
}, reverse); | |
} | |
flatDeep(iterable, 0); | |
return iterations; | |
} | |
flatSequence.__iteratorUncached = function(type, reverse) { | |
var iterator = iterable.__iterator(type, reverse); | |
var stack = []; | |
var iterations = 0; | |
return new src_Iterator__Iterator(function() { | |
while (iterator) { | |
var step = iterator.next(); | |
if (step.done !== false) { | |
iterator = stack.pop(); | |
continue; | |
} | |
var v = step.value; | |
if (type === ITERATE_ENTRIES) { | |
v = v[1]; | |
} | |
if ((!depth || stack.length < depth) && isIterable(v)) { | |
stack.push(iterator); | |
iterator = v.__iterator(type, reverse); | |
} else { | |
return useKeys ? step : iteratorValue(type, iterations++, v, step); | |
} | |
} | |
return iteratorDone(); | |
}); | |
} | |
return flatSequence; | |
} | |
function flatMapFactory(iterable, mapper, context) { | |
var coerce = iterableClass(iterable); | |
return iterable.toSeq().map( | |
function(v, k) {return coerce(mapper.call(context, v, k, iterable))} | |
).flatten(true); | |
} | |
function interposeFactory(iterable, separator) { | |
var interposedSequence = makeSequence(iterable); | |
interposedSequence.size = iterable.size && iterable.size * 2 -1; | |
interposedSequence.__iterateUncached = function(fn, reverse) {var this$0 = this; | |
var iterations = 0; | |
iterable.__iterate(function(v, k) | |
{return (!iterations || fn(separator, iterations++, this$0) !== false) && | |
fn(v, iterations++, this$0) !== false}, | |
reverse | |
); | |
return iterations; | |
}; | |
interposedSequence.__iteratorUncached = function(type, reverse) { | |
var iterator = iterable.__iterator(ITERATE_VALUES, reverse); | |
var iterations = 0; | |
var step; | |
return new src_Iterator__Iterator(function() { | |
if (!step || iterations % 2) { | |
step = iterator.next(); | |
if (step.done) { | |
return step; | |
} | |
} | |
return iterations % 2 ? | |
iteratorValue(type, iterations++, separator) : | |
iteratorValue(type, iterations++, step.value, step); | |
}); | |
}; | |
return interposedSequence; | |
} | |
function sortFactory(iterable, comparator, mapper) { | |
if (!comparator) { | |
comparator = defaultComparator; | |
} | |
var isKeyedIterable = isKeyed(iterable); | |
var index = 0; | |
var entries = iterable.toSeq().map( | |
function(v, k) {return [k, v, index++, mapper ? mapper(v, k, iterable) : v]} | |
).toArray(); | |
entries.sort(function(a, b) {return comparator(a[3], b[3]) || a[2] - b[2]}).forEach( | |
isKeyedIterable ? | |
function(v, i) { entries[i].length = 2; } : | |
function(v, i) { entries[i] = v[1]; } | |
); | |
return isKeyedIterable ? KeyedSeq(entries) : | |
isIndexed(iterable) ? IndexedSeq(entries) : | |
SetSeq(entries); | |
} | |
function maxFactory(iterable, comparator, mapper) { | |
if (!comparator) { | |
comparator = defaultComparator; | |
} | |
if (mapper) { | |
var entry = iterable.toSeq() | |
.map(function(v, k) {return [v, mapper(v, k, iterable)]}) | |
.reduce(function(a, b) {return maxCompare(comparator, a[1], b[1]) ? b : a}); | |
return entry && entry[0]; | |
} else { | |
return iterable.reduce(function(a, b) {return maxCompare(comparator, a, b) ? b : a}); | |
} | |
} | |
function maxCompare(comparator, a, b) { | |
var comp = comparator(b, a); | |
// b is considered the new max if the comparator declares them equal, but | |
// they are not equal and b is in fact a nullish value. | |
return (comp === 0 && b !== a && (b === undefined || b === null || b !== b)) || comp > 0; | |
} | |
function zipWithFactory(keyIter, zipper, iters) { | |
var zipSequence = makeSequence(keyIter); | |
zipSequence.size = new ArraySeq(iters).map(function(i ) {return i.size}).min(); | |
// Note: this a generic base implementation of __iterate in terms of | |
// __iterator which may be more generically useful in the future. | |
zipSequence.__iterate = function(fn, reverse) { | |
/* generic: | |
var iterator = this.__iterator(ITERATE_ENTRIES, reverse); | |
var step; | |
var iterations = 0; | |
while (!(step = iterator.next()).done) { | |
iterations++; | |
if (fn(step.value[1], step.value[0], this) === false) { | |
break; | |
} | |
} | |
return iterations; | |
*/ | |
// indexed: | |
var iterator = this.__iterator(ITERATE_VALUES, reverse); | |
var step; | |
var iterations = 0; | |
while (!(step = iterator.next()).done) { | |
if (fn(step.value, iterations++, this) === false) { | |
break; | |
} | |
} | |
return iterations; | |
}; | |
zipSequence.__iteratorUncached = function(type, reverse) { | |
var iterators = iters.map(function(i ) | |
{return (i = Iterable(i), getIterator(reverse ? i.reverse() : i))} | |
); | |
var iterations = 0; | |
var isDone = false; | |
return new src_Iterator__Iterator(function() { | |
var steps; | |
if (!isDone) { | |
steps = iterators.map(function(i ) {return i.next()}); | |
isDone = steps.some(function(s ) {return s.done}); | |
} | |
if (isDone) { | |
return iteratorDone(); | |
} | |
return iteratorValue( | |
type, | |
iterations++, | |
zipper.apply(null, steps.map(function(s ) {return s.value})) | |
); | |
}); | |
}; | |
return zipSequence | |
} | |
// #pragma Helper Functions | |
function reify(iter, seq) { | |
return isSeq(iter) ? seq : iter.constructor(seq); | |
} | |
function validateEntry(entry) { | |
if (entry !== Object(entry)) { | |
throw new TypeError('Expected [K, V] tuple: ' + entry); | |
} | |
} | |
function resolveSize(iter) { | |
assertNotInfinite(iter.size); | |
return ensureSize(iter); | |
} | |
function iterableClass(iterable) { | |
return isKeyed(iterable) ? KeyedIterable : | |
isIndexed(iterable) ? IndexedIterable : | |
SetIterable; | |
} | |
function makeSequence(iterable) { | |
return Object.create( | |
( | |
isKeyed(iterable) ? KeyedSeq : | |
isIndexed(iterable) ? IndexedSeq : | |
SetSeq | |
).prototype | |
); | |
} | |
function cacheResultThrough() { | |
if (this._iter.cacheResult) { | |
this._iter.cacheResult(); | |
this.size = this._iter.size; | |
return this; | |
} else { | |
return Seq.prototype.cacheResult.call(this); | |
} | |
} | |
function defaultComparator(a, b) { | |
return a > b ? 1 : a < b ? -1 : 0; | |
} | |
function forceIterator(keyPath) { | |
var iter = getIterator(keyPath); | |
if (!iter) { | |
// Array might not be iterable in this environment, so we need a fallback | |
// to our wrapped type. | |
if (!isArrayLike(keyPath)) { | |
throw new TypeError('Expected iterable or array-like: ' + keyPath); | |
} | |
iter = getIterator(Iterable(keyPath)); | |
} | |
return iter; | |
} | |
createClass(src_Map__Map, KeyedCollection); | |
// @pragma Construction | |
function src_Map__Map(value) { | |
return value === null || value === undefined ? emptyMap() : | |
isMap(value) ? value : | |
emptyMap().withMutations(function(map ) { | |
var iter = KeyedIterable(value); | |
assertNotInfinite(iter.size); | |
iter.forEach(function(v, k) {return map.set(k, v)}); | |
}); | |
} | |
src_Map__Map.prototype.toString = function() { | |
return this.__toString('Map {', '}'); | |
}; | |
// @pragma Access | |
src_Map__Map.prototype.get = function(k, notSetValue) { | |
return this._root ? | |
this._root.get(0, undefined, k, notSetValue) : | |
notSetValue; | |
}; | |
// @pragma Modification | |
src_Map__Map.prototype.set = function(k, v) { | |
return updateMap(this, k, v); | |
}; | |
src_Map__Map.prototype.setIn = function(keyPath, v) { | |
return this.updateIn(keyPath, NOT_SET, function() {return v}); | |
}; | |
src_Map__Map.prototype.remove = function(k) { | |
return updateMap(this, k, NOT_SET); | |
}; | |
src_Map__Map.prototype.deleteIn = function(keyPath) { | |
return this.updateIn(keyPath, function() {return NOT_SET}); | |
}; | |
src_Map__Map.prototype.update = function(k, notSetValue, updater) { | |
return arguments.length === 1 ? | |
k(this) : | |
this.updateIn([k], notSetValue, updater); | |
}; | |
src_Map__Map.prototype.updateIn = function(keyPath, notSetValue, updater) { | |
if (!updater) { | |
updater = notSetValue; | |
notSetValue = undefined; | |
} | |
var updatedValue = updateInDeepMap( | |
this, | |
forceIterator(keyPath), | |
notSetValue, | |
updater | |
); | |
return updatedValue === NOT_SET ? undefined : updatedValue; | |
}; | |
src_Map__Map.prototype.clear = function() { | |
if (this.size === 0) { | |
return this; | |
} | |
if (this.__ownerID) { | |
this.size = 0; | |
this._root = null; | |
this.__hash = undefined; | |
this.__altered = true; | |
return this; | |
} | |
return emptyMap(); | |
}; | |
// @pragma Composition | |
src_Map__Map.prototype.merge = function(/*...iters*/) { | |
return mergeIntoMapWith(this, undefined, arguments); | |
}; | |
src_Map__Map.prototype.mergeWith = function(merger) {var iters = SLICE$0.call(arguments, 1); | |
return mergeIntoMapWith(this, merger, iters); | |
}; | |
src_Map__Map.prototype.mergeIn = function(keyPath) {var iters = SLICE$0.call(arguments, 1); | |
return this.updateIn(keyPath, emptyMap(), function(m ) {return m.merge.apply(m, iters)}); | |
}; | |
src_Map__Map.prototype.mergeDeep = function(/*...iters*/) { | |
return mergeIntoMapWith(this, deepMerger(undefined), arguments); | |
}; | |
src_Map__Map.prototype.mergeDeepWith = function(merger) {var iters = SLICE$0.call(arguments, 1); | |
return mergeIntoMapWith(this, deepMerger(merger), iters); | |
}; | |
src_Map__Map.prototype.mergeDeepIn = function(keyPath) {var iters = SLICE$0.call(arguments, 1); | |
return this.updateIn(keyPath, emptyMap(), function(m ) {return m.mergeDeep.apply(m, iters)}); | |
}; | |
src_Map__Map.prototype.sort = function(comparator) { | |
// Late binding | |
return OrderedMap(sortFactory(this, comparator)); | |
}; | |
src_Map__Map.prototype.sortBy = function(mapper, comparator) { | |
// Late binding | |
return OrderedMap(sortFactory(this, comparator, mapper)); | |
}; | |
// @pragma Mutability | |
src_Map__Map.prototype.withMutations = function(fn) { | |
var mutable = this.asMutable(); | |
fn(mutable); | |
return mutable.wasAltered() ? mutable.__ensureOwner(this.__ownerID) : this; | |
}; | |
src_Map__Map.prototype.asMutable = function() { | |
return this.__ownerID ? this : this.__ensureOwner(new OwnerID()); | |
}; | |
src_Map__Map.prototype.asImmutable = function() { | |
return this.__ensureOwner(); | |
}; | |
src_Map__Map.prototype.wasAltered = function() { | |
return this.__altered; | |
}; | |
src_Map__Map.prototype.__iterator = function(type, reverse) { | |
return new MapIterator(this, type, reverse); | |
}; | |
src_Map__Map.prototype.__iterate = function(fn, reverse) {var this$0 = this; | |
var iterations = 0; | |
this._root && this._root.iterate(function(entry ) { | |
iterations++; | |
return fn(entry[1], entry[0], this$0); | |
}, reverse); | |
return iterations; | |
}; | |
src_Map__Map.prototype.__ensureOwner = function(ownerID) { | |
if (ownerID === this.__ownerID) { | |
return this; | |
} | |
if (!ownerID) { | |
this.__ownerID = ownerID; | |
this.__altered = false; | |
return this; | |
} | |
return makeMap(this.size, this._root, ownerID, this.__hash); | |
}; | |
function isMap(maybeMap) { | |
return !!(maybeMap && maybeMap[IS_MAP_SENTINEL]); | |
} | |
src_Map__Map.isMap = isMap; | |
var IS_MAP_SENTINEL = '@@__IMMUTABLE_MAP__@@'; | |
var MapPrototype = src_Map__Map.prototype; | |
MapPrototype[IS_MAP_SENTINEL] = true; | |
MapPrototype[DELETE] = MapPrototype.remove; | |
MapPrototype.removeIn = MapPrototype.deleteIn; | |
// #pragma Trie Nodes | |
function ArrayMapNode(ownerID, entries) { | |
this.ownerID = ownerID; | |
this.entries = entries; | |
} | |
ArrayMapNode.prototype.get = function(shift, keyHash, key, notSetValue) { | |
var entries = this.entries; | |
for (var ii = 0, len = entries.length; ii < len; ii++) { | |
if (is(key, entries[ii][0])) { | |
return entries[ii][1]; | |
} | |
} | |
return notSetValue; | |
}; | |
ArrayMapNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) { | |
var removed = value === NOT_SET; | |
var entries = this.entries; | |
var idx = 0; | |
for (var len = entries.length; idx < len; idx++) { | |
if (is(key, entries[idx][0])) { | |
break; | |
} | |
} | |
var exists = idx < len; | |
if (exists ? entries[idx][1] === value : removed) { | |
return this; | |
} | |
SetRef(didAlter); | |
(removed || !exists) && SetRef(didChangeSize); | |
if (removed && entries.length === 1) { | |
return; // undefined | |
} | |
if (!exists && !removed && entries.length >= MAX_ARRAY_MAP_SIZE) { | |
return createNodes(ownerID, entries, key, value); | |
} | |
var isEditable = ownerID && ownerID === this.ownerID; | |
var newEntries = isEditable ? entries : arrCopy(entries); | |
if (exists) { | |
if (removed) { | |
idx === len - 1 ? newEntries.pop() : (newEntries[idx] = newEntries.pop()); | |
} else { | |
newEntries[idx] = [key, value]; | |
} | |
} else { | |
newEntries.push([key, value]); | |
} | |
if (isEditable) { | |
this.entries = newEntries; | |
return this; | |
} | |
return new ArrayMapNode(ownerID, newEntries); | |
}; | |
function BitmapIndexedNode(ownerID, bitmap, nodes) { | |
this.ownerID = ownerID; | |
this.bitmap = bitmap; | |
this.nodes = nodes; | |
} | |
BitmapIndexedNode.prototype.get = function(shift, keyHash, key, notSetValue) { | |
if (keyHash === undefined) { | |
keyHash = hash(key); | |
} | |
var bit = (1 << ((shift === 0 ? keyHash : keyHash >>> shift) & MASK)); | |
var bitmap = this.bitmap; | |
return (bitmap & bit) === 0 ? notSetValue : | |
this.nodes[popCount(bitmap & (bit - 1))].get(shift + SHIFT, keyHash, key, notSetValue); | |
}; | |
BitmapIndexedNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) { | |
if (keyHash === undefined) { | |
keyHash = hash(key); | |
} | |
var keyHashFrag = (shift === 0 ? keyHash : keyHash >>> shift) & MASK; | |
var bit = 1 << keyHashFrag; | |
var bitmap = this.bitmap; | |
var exists = (bitmap & bit) !== 0; | |
if (!exists && value === NOT_SET) { | |
return this; | |
} | |
var idx = popCount(bitmap & (bit - 1)); | |
var nodes = this.nodes; | |
var node = exists ? nodes[idx] : undefined; | |
var newNode = updateNode(node, ownerID, shift + SHIFT, keyHash, key, value, didChangeSize, didAlter); | |
if (newNode === node) { | |
return this; | |
} | |
if (!exists && newNode && nodes.length >= MAX_BITMAP_INDEXED_SIZE) { | |
return expandNodes(ownerID, nodes, bitmap, keyHashFrag, newNode); | |
} | |
if (exists && !newNode && nodes.length === 2 && isLeafNode(nodes[idx ^ 1])) { | |
return nodes[idx ^ 1]; | |
} | |
if (exists && newNode && nodes.length === 1 && isLeafNode(newNode)) { | |
return newNode; | |
} | |
var isEditable = ownerID && ownerID === this.ownerID; | |
var newBitmap = exists ? newNode ? bitmap : bitmap ^ bit : bitmap | bit; | |
var newNodes = exists ? newNode ? | |
setIn(nodes, idx, newNode, isEditable) : | |
spliceOut(nodes, idx, isEditable) : | |
spliceIn(nodes, idx, newNode, isEditable); | |
if (isEditable) { | |
this.bitmap = newBitmap; | |
this.nodes = newNodes; | |
return this; | |
} | |
return new BitmapIndexedNode(ownerID, newBitmap, newNodes); | |
}; | |
function HashArrayMapNode(ownerID, count, nodes) { | |
this.ownerID = ownerID; | |
this.count = count; | |
this.nodes = nodes; | |
} | |
HashArrayMapNode.prototype.get = function(shift, keyHash, key, notSetValue) { | |
if (keyHash === undefined) { | |
keyHash = hash(key); | |
} | |
var idx = (shift === 0 ? keyHash : keyHash >>> shift) & MASK; | |
var node = this.nodes[idx]; | |
return node ? node.get(shift + SHIFT, keyHash, key, notSetValue) : notSetValue; | |
}; | |
HashArrayMapNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) { | |
if (keyHash === undefined) { | |
keyHash = hash(key); | |
} | |
var idx = (shift === 0 ? keyHash : keyHash >>> shift) & MASK; | |
var removed = value === NOT_SET; | |
var nodes = this.nodes; | |
var node = nodes[idx]; | |
if (removed && !node) { | |
return this; | |
} | |
var newNode = updateNode(node, ownerID, shift + SHIFT, keyHash, key, value, didChangeSize, didAlter); | |
if (newNode === node) { | |
return this; | |
} | |
var newCount = this.count; | |
if (!node) { | |
newCount++; | |
} else if (!newNode) { | |
newCount--; | |
if (newCount < MIN_HASH_ARRAY_MAP_SIZE) { | |
return packNodes(ownerID, nodes, newCount, idx); | |
} | |
} | |
var isEditable = ownerID && ownerID === this.ownerID; | |
var newNodes = setIn(nodes, idx, newNode, isEditable); | |
if (isEditable) { | |
this.count = newCount; | |
this.nodes = newNodes; | |
return this; | |
} | |
return new HashArrayMapNode(ownerID, newCount, newNodes); | |
}; | |
function HashCollisionNode(ownerID, keyHash, entries) { | |
this.ownerID = ownerID; | |
this.keyHash = keyHash; | |
this.entries = entries; | |
} | |
HashCollisionNode.prototype.get = function(shift, keyHash, key, notSetValue) { | |
var entries = this.entries; | |
for (var ii = 0, len = entries.length; ii < len; ii++) { | |
if (is(key, entries[ii][0])) { | |
return entries[ii][1]; | |
} | |
} | |
return notSetValue; | |
}; | |
HashCollisionNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) { | |
if (keyHash === undefined) { | |
keyHash = hash(key); | |
} | |
var removed = value === NOT_SET; | |
if (keyHash !== this.keyHash) { | |
if (removed) { | |
return this; | |
} | |
SetRef(didAlter); | |
SetRef(didChangeSize); | |
return mergeIntoNode(this, ownerID, shift, keyHash, [key, value]); | |
} | |
var entries = this.entries; | |
var idx = 0; | |
for (var len = entries.length; idx < len; idx++) { | |
if (is(key, entries[idx][0])) { | |
break; | |
} | |
} | |
var exists = idx < len; | |
if (exists ? entries[idx][1] === value : removed) { | |
return this; | |
} | |
SetRef(didAlter); | |
(removed || !exists) && SetRef(didChangeSize); | |
if (removed && len === 2) { | |
return new ValueNode(ownerID, this.keyHash, entries[idx ^ 1]); | |
} | |
var isEditable = ownerID && ownerID === this.ownerID; | |
var newEntries = isEditable ? entries : arrCopy(entries); | |
if (exists) { | |
if (removed) { | |
idx === len - 1 ? newEntries.pop() : (newEntries[idx] = newEntries.pop()); | |
} else { | |
newEntries[idx] = [key, value]; | |
} | |
} else { | |
newEntries.push([key, value]); | |
} | |
if (isEditable) { | |
this.entries = newEntries; | |
return this; | |
} | |
return new HashCollisionNode(ownerID, this.keyHash, newEntries); | |
}; | |
function ValueNode(ownerID, keyHash, entry) { | |
this.ownerID = ownerID; | |
this.keyHash = keyHash; | |
this.entry = entry; | |
} | |
ValueNode.prototype.get = function(shift, keyHash, key, notSetValue) { | |
return is(key, this.entry[0]) ? this.entry[1] : notSetValue; | |
}; | |
ValueNode.prototype.update = function(ownerID, shift, keyHash, key, value, didChangeSize, didAlter) { | |
var removed = value === NOT_SET; | |
var keyMatch = is(key, this.entry[0]); | |
if (keyMatch ? value === this.entry[1] : removed) { | |
return this; | |
} | |
SetRef(didAlter); | |
if (removed) { | |
SetRef(didChangeSize); | |
return; // undefined | |
} | |
if (keyMatch) { | |
if (ownerID && ownerID === this.ownerID) { | |
this.entry[1] = value; | |
return this; | |
} | |
return new ValueNode(ownerID, this.keyHash, [key, value]); | |
} | |
SetRef(didChangeSize); | |
return mergeIntoNode(this, ownerID, shift, hash(key), [key, value]); | |
}; | |
// #pragma Iterators | |
ArrayMapNode.prototype.iterate = | |
HashCollisionNode.prototype.iterate = function (fn, reverse) { | |
var entries = this.entries; | |
for (var ii = 0, maxIndex = entries.length - 1; ii <= maxIndex; ii++) { | |
if (fn(entries[reverse ? maxIndex - ii : ii]) === false) { | |
return false; | |
} | |
} | |
} | |
BitmapIndexedNode.prototype.iterate = | |
HashArrayMapNode.prototype.iterate = function (fn, reverse) { | |
var nodes = this.nodes; | |
for (var ii = 0, maxIndex = nodes.length - 1; ii <= maxIndex; ii++) { | |
var node = nodes[reverse ? maxIndex - ii : ii]; | |
if (node && node.iterate(fn, reverse) === false) { | |
return false; | |
} | |
} | |
} | |
ValueNode.prototype.iterate = function (fn, reverse) { | |
return fn(this.entry); | |
} | |
createClass(MapIterator, src_Iterator__Iterator); | |
function MapIterator(map, type, reverse) { | |
this._type = type; | |
this._reverse = reverse; | |
this._stack = map._root && mapIteratorFrame(map._root); | |
} | |
MapIterator.prototype.next = function() { | |
var type = this._type; | |
var stack = this._stack; | |
while (stack) { | |
var node = stack.node; | |
var index = stack.index++; | |
var maxIndex; | |
if (node.entry) { | |
if (index === 0) { | |
return mapIteratorValue(type, node.entry); | |
} | |
} else if (node.entries) { | |
maxIndex = node.entries.length - 1; | |
if (index <= maxIndex) { | |
return mapIteratorValue(type, node.entries[this._reverse ? maxIndex - index : index]); | |
} | |
} else { | |
maxIndex = node.nodes.length - 1; | |
if (index <= maxIndex) { | |
var subNode = node.nodes[this._reverse ? maxIndex - index : index]; | |
if (subNode) { | |
if (subNode.entry) { | |
return mapIteratorValue(type, subNode.entry); | |
} | |
stack = this._stack = mapIteratorFrame(subNode, stack); | |
} | |
continue; | |
} | |
} | |
stack = this._stack = this._stack.__prev; | |
} | |
return iteratorDone(); | |
}; | |
function mapIteratorValue(type, entry) { | |
return iteratorValue(type, entry[0], entry[1]); | |
} | |
function mapIteratorFrame(node, prev) { | |
return { | |
node: node, | |
index: 0, | |
__prev: prev | |
}; | |
} | |
function makeMap(size, root, ownerID, hash) { | |
var map = Object.create(MapPrototype); | |
map.size = size; | |
map._root = root; | |
map.__ownerID = ownerID; | |
map.__hash = hash; | |
map.__altered = false; | |
return map; | |
} | |
var EMPTY_MAP; | |
function emptyMap() { | |
return EMPTY_MAP || (EMPTY_MAP = makeMap(0)); | |
} | |
function updateMap(map, k, v) { | |
var newRoot; | |
var newSize; | |
if (!map._root) { | |
if (v === NOT_SET) { | |
return map; | |
} | |
newSize = 1; | |
newRoot = new ArrayMapNode(map.__ownerID, [[k, v]]); | |
} else { | |
var didChangeSize = MakeRef(CHANGE_LENGTH); | |
var didAlter = MakeRef(DID_ALTER); | |
newRoot = updateNode(map._root, map.__ownerID, 0, undefined, k, v, didChangeSize, didAlter); | |
if (!didAlter.value) { | |
return map; | |
} | |
newSize = map.size + (didChangeSize.value ? v === NOT_SET ? -1 : 1 : 0); | |
} | |
if (map.__ownerID) { | |
map.size = newSize; | |
map._root = newRoot; | |
map.__hash = undefined; | |
map.__altered = true; | |
return map; | |
} | |
return newRoot ? makeMap(newSize, newRoot) : emptyMap(); | |
} | |
function updateNode(node, ownerID, shift, keyHash, key, value, didChangeSize, didAlter) { | |
if (!node) { | |
if (value === NOT_SET) { | |
return node; | |
} | |
SetRef(didAlter); | |
SetRef(didChangeSize); | |
return new ValueNode(ownerID, keyHash, [key, value]); | |
} | |
return node.update(ownerID, shift, keyHash, key, value, didChangeSize, didAlter); | |
} | |
function isLeafNode(node) { | |
return node.constructor === ValueNode || node.constructor === HashCollisionNode; | |
} | |
function mergeIntoNode(node, ownerID, shift, keyHash, entry) { | |
if (node.keyHash === keyHash) { | |
return new HashCollisionNode(ownerID, keyHash, [node.entry, entry]); | |
} | |
var idx1 = (shift === 0 ? node.keyHash : node.keyHash >>> shift) & MASK; | |
var idx2 = (shift === 0 ? keyHash : keyHash >>> shift) & MASK; | |
var newNode; | |
var nodes = idx1 === idx2 ? | |
[mergeIntoNode(node, ownerID, shift + SHIFT, keyHash, entry)] : | |
((newNode = new ValueNode(ownerID, keyHash, entry)), idx1 < idx2 ? [node, newNode] : [newNode, node]); | |
return new BitmapIndexedNode(ownerID, (1 << idx1) | (1 << idx2), nodes); | |
} | |
function createNodes(ownerID, entries, key, value) { | |
if (!ownerID) { | |
ownerID = new OwnerID(); | |
} | |
var node = new ValueNode(ownerID, hash(key), [key, value]); | |
for (var ii = 0; ii < entries.length; ii++) { | |
var entry = entries[ii]; | |
node = node.update(ownerID, 0, undefined, entry[0], entry[1]); | |
} | |
return node; | |
} | |
function packNodes(ownerID, nodes, count, excluding) { | |
var bitmap = 0; | |
var packedII = 0; | |
var packedNodes = new Array(count); | |
for (var ii = 0, bit = 1, len = nodes.length; ii < len; ii++, bit <<= 1) { | |
var node = nodes[ii]; | |
if (node !== undefined && ii !== excluding) { | |
bitmap |= bit; | |
packedNodes[packedII++] = node; | |
} | |
} | |
return new BitmapIndexedNode(ownerID, bitmap, packedNodes); | |
} | |
function expandNodes(ownerID, nodes, bitmap, including, node) { | |
var count = 0; | |
var expandedNodes = new Array(SIZE); | |
for (var ii = 0; bitmap !== 0; ii++, bitmap >>>= 1) { | |
expandedNodes[ii] = bitmap & 1 ? nodes[count++] : undefined; | |
} | |
expandedNodes[including] = node; | |
return new HashArrayMapNode(ownerID, count + 1, expandedNodes); | |
} | |
function mergeIntoMapWith(map, merger, iterables) { | |
var iters = []; | |
for (var ii = 0; ii < iterables.length; ii++) { | |
var value = iterables[ii]; | |
var iter = KeyedIterable(value); | |
if (!isIterable(value)) { | |
iter = iter.map(function(v ) {return fromJS(v)}); | |
} | |
iters.push(iter); | |
} | |
return mergeIntoCollectionWith(map, merger, iters); | |
} | |
function deepMerger(merger) { | |
return function(existing, value, key) | |
{return existing && existing.mergeDeepWith && isIterable(value) ? | |
existing.mergeDeepWith(merger, value) : | |
merger ? merger(existing, value, key) : value}; | |
} | |
function mergeIntoCollectionWith(collection, merger, iters) { | |
iters = iters.filter(function(x ) {return x.size !== 0}); | |
if (iters.length === 0) { | |
return collection; | |
} | |
if (collection.size === 0 && !collection.__ownerID && iters.length === 1) { | |
return collection.constructor(iters[0]); | |
} | |
return collection.withMutations(function(collection ) { | |
var mergeIntoMap = merger ? | |
function(value, key) { | |
collection.update(key, NOT_SET, function(existing ) | |
{return existing === NOT_SET ? value : merger(existing, value, key)} | |
); | |
} : | |
function(value, key) { | |
collection.set(key, value); | |
} | |
for (var ii = 0; ii < iters.length; ii++) { | |
iters[ii].forEach(mergeIntoMap); | |
} | |
}); | |
} | |
function updateInDeepMap(existing, keyPathIter, notSetValue, updater) { | |
var isNotSet = existing === NOT_SET; | |
var step = keyPathIter.next(); | |
if (step.done) { | |
var existingValue = isNotSet ? notSetValue : existing; | |
var newValue = updater(existingValue); | |
return newValue === existingValue ? existing : newValue; | |
} | |
invariant( | |
isNotSet || (existing && existing.set), | |
'invalid keyPath' | |
); | |
var key = step.value; | |
var nextExisting = isNotSet ? NOT_SET : existing.get(key, NOT_SET); | |
var nextUpdated = updateInDeepMap( | |
nextExisting, | |
keyPathIter, | |
notSetValue, | |
updater | |
); | |
return nextUpdated === nextExisting ? existing : | |
nextUpdated === NOT_SET ? existing.remove(key) : | |
(isNotSet ? emptyMap() : existing).set(key, nextUpdated); | |
} | |
function popCount(x) { | |
x = x - ((x >> 1) & 0x55555555); | |
x = (x & 0x33333333) + ((x >> 2) & 0x33333333); | |
x = (x + (x >> 4)) & 0x0f0f0f0f; | |
x = x + (x >> 8); | |
x = x + (x >> 16); | |
return x & 0x7f; | |
} | |
function setIn(array, idx, val, canEdit) { | |
var newArray = canEdit ? array : arrCopy(array); | |
newArray[idx] = val; | |
return newArray; | |
} | |
function spliceIn(array, idx, val, canEdit) { | |
var newLen = array.length + 1; | |
if (canEdit && idx + 1 === newLen) { | |
array[idx] = val; | |
return array; | |
} | |
var newArray = new Array(newLen); | |
var after = 0; | |
for (var ii = 0; ii < newLen; ii++) { | |
if (ii === idx) { | |
newArray[ii] = val; | |
after = -1; | |
} else { | |
newArray[ii] = array[ii + after]; | |
} | |
} | |
return newArray; | |
} | |
function spliceOut(array, idx, canEdit) { | |
var newLen = array.length - 1; | |
if (canEdit && idx === newLen) { | |
array.pop(); | |
return array; | |
} | |
var newArray = new Array(newLen); | |
var after = 0; | |
for (var ii = 0; ii < newLen; ii++) { | |
if (ii === idx) { | |
after = 1; | |
} | |
newArray[ii] = array[ii + after]; | |
} | |
return newArray; | |
} | |
var MAX_ARRAY_MAP_SIZE = SIZE / 4; | |
var MAX_BITMAP_INDEXED_SIZE = SIZE / 2; | |
var MIN_HASH_ARRAY_MAP_SIZE = SIZE / 4; | |
createClass(List, IndexedCollection); | |
// @pragma Construction | |
function List(value) { | |
var empty = emptyList(); | |
if (value === null || value === undefined) { | |
return empty; | |
} | |
if (isList(value)) { | |
return value; | |
} | |
var iter = IndexedIterable(value); | |
var size = iter.size; | |
if (size === 0) { | |
return empty; | |
} | |
assertNotInfinite(size); | |
if (size > 0 && size < SIZE) { | |
return makeList(0, size, SHIFT, null, new VNode(iter.toArray())); | |
} | |
return empty.withMutations(function(list ) { | |
list.setSize(size); | |
iter.forEach(function(v, i) {return list.set(i, v)}); | |
}); | |
} | |
List.of = function(/*...values*/) { | |
return this(arguments); | |
}; | |
List.prototype.toString = function() { | |
return this.__toString('List [', ']'); | |
}; | |
// @pragma Access | |
List.prototype.get = function(index, notSetValue) { | |
index = wrapIndex(this, index); | |
if (index < 0 || index >= this.size) { | |
return notSetValue; | |
} | |
index += this._origin; | |
var node = listNodeFor(this, index); | |
return node && node.array[index & MASK]; | |
}; | |
// @pragma Modification | |
List.prototype.set = function(index, value) { | |
return updateList(this, index, value); | |
}; | |
List.prototype.remove = function(index) { | |
return !this.has(index) ? this : | |
index === 0 ? this.shift() : | |
index === this.size - 1 ? this.pop() : | |
this.splice(index, 1); | |
}; | |
List.prototype.clear = function() { | |
if (this.size === 0) { | |
return this; | |
} | |
if (this.__ownerID) { | |
this.size = this._origin = this._capacity = 0; | |
this._level = SHIFT; | |
this._root = this._tail = null; | |
this.__hash = undefined; | |
this.__altered = true; | |
return this; | |
} | |
return emptyList(); | |
}; | |
List.prototype.push = function(/*...values*/) { | |
var values = arguments; | |
var oldSize = this.size; | |
return this.withMutations(function(list ) { | |
setListBounds(list, 0, oldSize + values.length); | |
for (var ii = 0; ii < values.length; ii++) { | |
list.set(oldSize + ii, values[ii]); | |
} | |
}); | |
}; | |
List.prototype.pop = function() { | |
return setListBounds(this, 0, -1); | |
}; | |
List.prototype.unshift = function(/*...values*/) { | |
var values = arguments; | |
return this.withMutations(function(list ) { | |
setListBounds(list, -values.length); | |
for (var ii = 0; ii < values.length; ii++) { | |
list.set(ii, values[ii]); | |
} | |
}); | |
}; | |
List.prototype.shift = function() { | |
return setListBounds(this, 1); | |
}; | |
// @pragma Composition | |
List.prototype.merge = function(/*...iters*/) { | |
return mergeIntoListWith(this, undefined, arguments); | |
}; | |
List.prototype.mergeWith = function(merger) {var iters = SLICE$0.call(arguments, 1); | |
return mergeIntoListWith(this, merger, iters); | |
}; | |
List.prototype.mergeDeep = function(/*...iters*/) { | |
return mergeIntoListWith(this, deepMerger(undefined), arguments); | |
}; | |
List.prototype.mergeDeepWith = function(merger) {var iters = SLICE$0.call(arguments, 1); | |
return mergeIntoListWith(this, deepMerger(merger), iters); | |
}; | |
List.prototype.setSize = function(size) { | |
return setListBounds(this, 0, size); | |
}; | |
// @pragma Iteration | |
List.prototype.slice = function(begin, end) { | |
var size = this.size; | |
if (wholeSlice(begin, end, size)) { | |
return this; | |
} | |
return setListBounds( | |
this, | |
resolveBegin(begin, size), | |
resolveEnd(end, size) | |
); | |
}; | |
List.prototype.__iterator = function(type, reverse) { | |
var index = 0; | |
var values = iterateList(this, reverse); | |
return new src_Iterator__Iterator(function() { | |
var value = values(); | |
return value === DONE ? | |
iteratorDone() : | |
iteratorValue(type, index++, value); | |
}); | |
}; | |
List.prototype.__iterate = function(fn, reverse) { | |
var index = 0; | |
var values = iterateList(this, reverse); | |
var value; | |
while ((value = values()) !== DONE) { | |
if (fn(value, index++, this) === false) { | |
break; | |
} | |
} | |
return index; | |
}; | |
List.prototype.__ensureOwner = function(ownerID) { | |
if (ownerID === this.__ownerID) { | |
return this; | |
} | |
if (!ownerID) { | |
this.__ownerID = ownerID; | |
return this; | |
} | |
return makeList(this._origin, this._capacity, this._level, this._root, this._tail, ownerID, this.__hash); | |
}; | |
function isList(maybeList) { | |
return !!(maybeList && maybeList[IS_LIST_SENTINEL]); | |
} | |
List.isList = isList; | |
var IS_LIST_SENTINEL = '@@__IMMUTABLE_LIST__@@'; | |
var ListPrototype = List.prototype; | |
ListPrototype[IS_LIST_SENTINEL] = true; | |
ListPrototype[DELETE] = ListPrototype.remove; | |
ListPrototype.setIn = MapPrototype.setIn; | |
ListPrototype.deleteIn = | |
ListPrototype.removeIn = MapPrototype.removeIn; | |
ListPrototype.update = MapPrototype.update; | |
ListPrototype.updateIn = MapPrototype.updateIn; | |
ListPrototype.mergeIn = MapPrototype.mergeIn; | |
ListPrototype.mergeDeepIn = MapPrototype.mergeDeepIn; | |
ListPrototype.withMutations = MapPrototype.withMutations; | |
ListPrototype.asMutable = MapPrototype.asMutable; | |
ListPrototype.asImmutable = MapPrototype.asImmutable; | |
ListPrototype.wasAltered = MapPrototype.wasAltered; | |
function VNode(array, ownerID) { | |
this.array = array; | |
this.ownerID = ownerID; | |
} | |
// TODO: seems like these methods are very similar | |
VNode.prototype.removeBefore = function(ownerID, level, index) { | |
if (index === level ? 1 << level : 0 || this.array.length === 0) { | |
return this; | |
} | |
var originIndex = (index >>> level) & MASK; | |
if (originIndex >= this.array.length) { | |
return new VNode([], ownerID); | |
} | |
var removingFirst = originIndex === 0; | |
var newChild; | |
if (level > 0) { | |
var oldChild = this.array[originIndex]; | |
newChild = oldChild && oldChild.removeBefore(ownerID, level - SHIFT, index); | |
if (newChild === oldChild && removingFirst) { | |
return this; | |
} | |
} | |
if (removingFirst && !newChild) { | |
return this; | |
} | |
var editable = editableVNode(this, ownerID); | |
if (!removingFirst) { | |
for (var ii = 0; ii < originIndex; ii++) { | |
editable.array[ii] = undefined; | |
} | |
} | |
if (newChild) { | |
editable.array[originIndex] = newChild; | |
} | |
return editable; | |
}; | |
VNode.prototype.removeAfter = function(ownerID, level, index) { | |
if (index === level ? 1 << level : 0 || this.array.length === 0) { | |
return this; | |
} | |
var sizeIndex = ((index - 1) >>> level) & MASK; | |
if (sizeIndex >= this.array.length) { | |
return this; | |
} | |
var removingLast = sizeIndex === this.array.length - 1; | |
var newChild; | |
if (level > 0) { | |
var oldChild = this.array[sizeIndex]; | |
newChild = oldChild && oldChild.removeAfter(ownerID, level - SHIFT, index); | |
if (newChild === oldChild && removingLast) { | |
return this; | |
} | |
} | |
if (removingLast && !newChild) { | |
return this; | |
} | |
var editable = editableVNode(this, ownerID); | |
if (!removingLast) { | |
editable.array.pop(); | |
} | |
if (newChild) { | |
editable.array[sizeIndex] = newChild; | |
} | |
return editable; | |
}; | |
var DONE = {}; | |
function iterateList(list, reverse) { | |
var left = list._origin; | |
var right = list._capacity; | |
var tailPos = getTailOffset(right); | |
var tail = list._tail; | |
return iterateNodeOrLeaf(list._root, list._level, 0); | |
function iterateNodeOrLeaf(node, level, offset) { | |
return level === 0 ? | |
iterateLeaf(node, offset) : | |
iterateNode(node, level, offset); | |
} | |
function iterateLeaf(node, offset) { | |
var array = offset === tailPos ? tail && tail.array : node && node.array; | |
var from = offset > left ? 0 : left - offset; | |
var to = right - offset; | |
if (to > SIZE) { | |
to = SIZE; | |
} | |
return function() { | |
if (from === to) { | |
return DONE; | |
} | |
var idx = reverse ? --to : from++; | |
return array && array[idx]; | |
}; | |
} | |
function iterateNode(node, level, offset) { | |
var values; | |
var array = node && node.array; | |
var from = offset > left ? 0 : (left - offset) >> level; | |
var to = ((right - offset) >> level) + 1; | |
if (to > SIZE) { | |
to = SIZE; | |
} | |
return function() { | |
do { | |
if (values) { | |
var value = values(); | |
if (value !== DONE) { | |
return value; | |
} | |
values = null; | |
} | |
if (from === to) { | |
return DONE; | |
} | |
var idx = reverse ? --to : from++; | |
values = iterateNodeOrLeaf( | |
array && array[idx], level - SHIFT, offset + (idx << level) | |
); | |
} while (true); | |
}; | |
} | |
} | |
function makeList(origin, capacity, level, root, tail, ownerID, hash) { | |
var list = Object.create(ListPrototype); | |
list.size = capacity - origin; | |
list._origin = origin; | |
list._capacity = capacity; | |
list._level = level; | |
list._root = root; | |
list._tail = tail; | |
list.__ownerID = ownerID; | |
list.__hash = hash; | |
list.__altered = false; | |
return list; | |
} | |
var EMPTY_LIST; | |
function emptyList() { | |
return EMPTY_LIST || (EMPTY_LIST = makeList(0, 0, SHIFT)); | |
} | |
function updateList(list, index, value) { | |
index = wrapIndex(list, index); | |
if (index >= list.size || index < 0) { | |
return list.withMutations(function(list ) { | |
index < 0 ? | |
setListBounds(list, index).set(0, value) : | |
setListBounds(list, 0, index + 1).set(index, value) | |
}); | |
} | |
index += list._origin; | |
var newTail = list._tail; | |
var newRoot = list._root; | |
var didAlter = MakeRef(DID_ALTER); | |
if (index >= getTailOffset(list._capacity)) { | |
newTail = updateVNode(newTail, list.__ownerID, 0, index, value, didAlter); | |
} else { | |
newRoot = updateVNode(newRoot, list.__ownerID, list._level, index, value, didAlter); | |
} | |
if (!didAlter.value) { | |
return list; | |
} | |
if (list.__ownerID) { | |
list._root = newRoot; | |
list._tail = newTail; | |
list.__hash = undefined; | |
list.__altered = true; | |
return list; | |
} | |
return makeList(list._origin, list._capacity, list._level, newRoot, newTail); | |
} | |
function updateVNode(node, ownerID, level, index, value, didAlter) { | |
var idx = (index >>> level) & MASK; | |
var nodeHas = node && idx < node.array.length; | |
if (!nodeHas && value === undefined) { | |
return node; | |
} | |
var newNode; | |
if (level > 0) { | |
var lowerNode = node && node.array[idx]; | |
var newLowerNode = updateVNode(lowerNode, ownerID, level - SHIFT, index, value, didAlter); | |
if (newLowerNode === lowerNode) { | |
return node; | |
} | |
newNode = editableVNode(node, ownerID); | |
newNode.array[idx] = newLowerNode; | |
return newNode; | |
} | |
if (nodeHas && node.array[idx] === value) { | |
return node; | |
} | |
SetRef(didAlter); | |
newNode = editableVNode(node, ownerID); | |
if (value === undefined && idx === newNode.array.length - 1) { | |
newNode.array.pop(); | |
} else { | |
newNode.array[idx] = value; | |
} | |
return newNode; | |
} | |
function editableVNode(node, ownerID) { | |
if (ownerID && node && ownerID === node.ownerID) { | |
return node; | |
} | |
return new VNode(node ? node.array.slice() : [], ownerID); | |
} | |
function listNodeFor(list, rawIndex) { | |
if (rawIndex >= getTailOffset(list._capacity)) { | |
return list._tail; | |
} | |
if (rawIndex < 1 << (list._level + SHIFT)) { | |
var node = list._root; | |
var level = list._level; | |
while (node && level > 0) { | |
node = node.array[(rawIndex >>> level) & MASK]; | |
level -= SHIFT; | |
} | |
return node; | |
} | |
} | |
function setListBounds(list, begin, end) { | |
var owner = list.__ownerID || new OwnerID(); | |
var oldOrigin = list._origin; | |
var oldCapacity = list._capacity; | |
var newOrigin = oldOrigin + begin; | |
var newCapacity = end === undefined ? oldCapacity : end < 0 ? oldCapacity + end : oldOrigin + end; | |
if (newOrigin === oldOrigin && newCapacity === oldCapacity) { | |
return list; | |
} | |
// If it's going to end after it starts, it's empty. | |
if (newOrigin >= newCapacity) { | |
return list.clear(); | |
} | |
var newLevel = list._level; | |
var newRoot = list._root; | |
// New origin might require creating a higher root. | |
var offsetShift = 0; | |
while (newOrigin + offsetShift < 0) { | |
newRoot = new VNode(newRoot && newRoot.array.length ? [undefined, newRoot] : [], owner); | |
newLevel += SHIFT; | |
offsetShift += 1 << newLevel; | |
} | |
if (offsetShift) { | |
newOrigin += offsetShift; | |
oldOrigin += offsetShift; | |
newCapacity += offsetShift; | |
oldCapacity += offsetShift; | |
} | |
var oldTailOffset = getTailOffset(oldCapacity); | |
var newTailOffset = getTailOffset(newCapacity); | |
// New size might require creating a higher root. | |
while (newTailOffset >= 1 << (newLevel + SHIFT)) { | |
newRoot = new VNode(newRoot && newRoot.array.length ? [newRoot] : [], owner); | |
newLevel += SHIFT; | |
} | |
// Locate or create the new tail. | |
var oldTail = list._tail; | |
var newTail = newTailOffset < oldTailOffset ? | |
listNodeFor(list, newCapacity - 1) : | |
newTailOffset > oldTailOffset ? new VNode([], owner) : oldTail; | |
// Merge Tail into tree. | |
if (oldTail && newTailOffset > oldTailOffset && newOrigin < oldCapacity && oldTail.array.length) { | |
newRoot = editableVNode(newRoot, owner); | |
var node = newRoot; | |
for (var level = newLevel; level > SHIFT; level -= SHIFT) { | |
var idx = (oldTailOffset >>> level) & MASK; | |
node = node.array[idx] = editableVNode(node.array[idx], owner); | |
} | |
node.array[(oldTailOffset >>> SHIFT) & MASK] = oldTail; | |
} | |
// If the size has been reduced, there's a chance the tail needs to be trimmed. | |
if (newCapacity < oldCapacity) { | |
newTail = newTail && newTail.removeAfter(owner, 0, newCapacity); | |
} | |
// If the new origin is within the tail, then we do not need a root. | |
if (newOrigin >= newTailOffset) { | |
newOrigin -= newTailOffset; | |
newCapacity -= newTailOffset; | |
newLevel = SHIFT; | |
newRoot = null; | |
newTail = newTail && newTail.removeBefore(owner, 0, newOrigin); | |
// Otherwise, if the root has been trimmed, garbage collect. | |
} else if (newOrigin > oldOrigin || newTailOffset < oldTailOffset) { | |
offsetShift = 0; | |
// Identify the new top root node of the subtree of the old root. | |
while (newRoot) { | |
var beginIndex = (newOrigin >>> newLevel) & MASK; | |
if (beginIndex !== (newTailOffset >>> newLevel) & MASK) { | |
break; | |
} | |
if (beginIndex) { | |
offsetShift += (1 << newLevel) * beginIndex; | |
} | |
newLevel -= SHIFT; | |
newRoot = newRoot.array[beginIndex]; | |
} | |
// Trim the new sides of the new root. | |
if (newRoot && newOrigin > oldOrigin) { | |
newRoot = newRoot.removeBefore(owner, newLevel, newOrigin - offsetShift); | |
} | |
if (newRoot && newTailOffset < oldTailOffset) { | |
newRoot = newRoot.removeAfter(owner, newLevel, newTailOffset - offsetShift); | |
} | |
if (offsetShift) { | |
newOrigin -= offsetShift; | |
newCapacity -= offsetShift; | |
} | |
} | |
if (list.__ownerID) { | |
list.size = newCapacity - newOrigin; | |
list._origin = newOrigin; | |
list._capacity = newCapacity; | |
list._level = newLevel; | |
list._root = newRoot; | |
list._tail = newTail; | |
list.__hash = undefined; | |
list.__altered = true; | |
return list; | |
} | |
return makeList(newOrigin, newCapacity, newLevel, newRoot, newTail); | |
} | |
function mergeIntoListWith(list, merger, iterables) { | |
var iters = []; | |
var maxSize = 0; | |
for (var ii = 0; ii < iterables.length; ii++) { | |
var value = iterables[ii]; | |
var iter = IndexedIterable(value); | |
if (iter.size > maxSize) { | |
maxSize = iter.size; | |
} | |
if (!isIterable(value)) { | |
iter = iter.map(function(v ) {return fromJS(v)}); | |
} | |
iters.push(iter); | |
} | |
if (maxSize > list.size) { | |
list = list.setSize(maxSize); | |
} | |
return mergeIntoCollectionWith(list, merger, iters); | |
} | |
function getTailOffset(size) { | |
return size < SIZE ? 0 : (((size - 1) >>> SHIFT) << SHIFT); | |
} | |
createClass(OrderedMap, src_Map__Map); | |
// @pragma Construction | |
function OrderedMap(value) { | |
return value === null || value === undefined ? emptyOrderedMap() : | |
isOrderedMap(value) ? value : | |
emptyOrderedMap().withMutations(function(map ) { | |
var iter = KeyedIterable(value); | |
assertNotInfinite(iter.size); | |
iter.forEach(function(v, k) {return map.set(k, v)}); | |
}); | |
} | |
OrderedMap.of = function(/*...values*/) { | |
return this(arguments); | |
}; | |
OrderedMap.prototype.toString = function() { | |
return this.__toString('OrderedMap {', '}'); | |
}; | |
// @pragma Access | |
OrderedMap.prototype.get = function(k, notSetValue) { | |
var index = this._map.get(k); | |
return index !== undefined ? this._list.get(index)[1] : notSetValue; | |
}; | |
// @pragma Modification | |
OrderedMap.prototype.clear = function() { | |
if (this.size === 0) { | |
return this; | |
} | |
if (this.__ownerID) { | |
this.size = 0; | |
this._map.clear(); | |
this._list.clear(); | |
return this; | |
} | |
return emptyOrderedMap(); | |
}; | |
OrderedMap.prototype.set = function(k, v) { | |
return updateOrderedMap(this, k, v); | |
}; | |
OrderedMap.prototype.remove = function(k) { | |
return updateOrderedMap(this, k, NOT_SET); | |
}; | |
OrderedMap.prototype.wasAltered = function() { | |
return this._map.wasAltered() || this._list.wasAltered(); | |
}; | |
OrderedMap.prototype.__iterate = function(fn, reverse) {var this$0 = this; | |
return this._list.__iterate( | |
function(entry ) {return entry && fn(entry[1], entry[0], this$0)}, | |
reverse | |
); | |
}; | |
OrderedMap.prototype.__iterator = function(type, reverse) { | |
return this._list.fromEntrySeq().__iterator(type, reverse); | |
}; | |
OrderedMap.prototype.__ensureOwner = function(ownerID) { | |
if (ownerID === this.__ownerID) { | |
return this; | |
} | |
var newMap = this._map.__ensureOwner(ownerID); | |
var newList = this._list.__ensureOwner(ownerID); | |
if (!ownerID) { | |
this.__ownerID = ownerID; | |
this._map = newMap; | |
this._list = newList; | |
return this; | |
} | |
return makeOrderedMap(newMap, newList, ownerID, this.__hash); | |
}; | |
function isOrderedMap(maybeOrderedMap) { | |
return isMap(maybeOrderedMap) && isOrdered(maybeOrderedMap); | |
} | |
OrderedMap.isOrderedMap = isOrderedMap; | |
OrderedMap.prototype[IS_ORDERED_SENTINEL] = true; | |
OrderedMap.prototype[DELETE] = OrderedMap.prototype.remove; | |
function makeOrderedMap(map, list, ownerID, hash) { | |
var omap = Object.create(OrderedMap.prototype); | |
omap.size = map ? map.size : 0; | |
omap._map = map; | |
omap._list = list; | |
omap.__ownerID = ownerID; | |
omap.__hash = hash; | |
return omap; | |
} | |
var EMPTY_ORDERED_MAP; | |
function emptyOrderedMap() { | |
return EMPTY_ORDERED_MAP || (EMPTY_ORDERED_MAP = makeOrderedMap(emptyMap(), emptyList())); | |
} | |
function updateOrderedMap(omap, k, v) { | |
var map = omap._map; | |
var list = omap._list; | |
var i = map.get(k); | |
var has = i !== undefined; | |
var newMap; | |
var newList; | |
if (v === NOT_SET) { // removed | |
if (!has) { | |
return omap; | |
} | |
if (list.size >= SIZE && list.size >= map.size * 2) { | |
newList = list.filter(function(entry, idx) {return entry !== undefined && i !== idx}); | |
newMap = newList.toKeyedSeq().map(function(entry ) {return entry[0]}).flip().toMap(); | |
if (omap.__ownerID) { | |
newMap.__ownerID = newList.__ownerID = omap.__ownerID; | |
} | |
} else { | |
newMap = map.remove(k); | |
newList = i === list.size - 1 ? list.pop() : list.set(i, undefined); | |
} | |
} else { | |
if (has) { | |
if (v === list.get(i)[1]) { | |
return omap; | |
} | |
newMap = map; | |
newList = list.set(i, [k, v]); | |
} else { | |
newMap = map.set(k, list.size); | |
newList = list.set(list.size, [k, v]); | |
} | |
} | |
if (omap.__ownerID) { | |
omap.size = newMap.size; | |
omap._map = newMap; | |
omap._list = newList; | |
omap.__hash = undefined; | |
return omap; | |
} | |
return makeOrderedMap(newMap, newList); | |
} | |
createClass(Stack, IndexedCollection); | |
// @pragma Construction | |
function Stack(value) { | |
return value === null || value === undefined ? emptyStack() : | |
isStack(value) ? value : | |
emptyStack().unshiftAll(value); | |
} | |
Stack.of = function(/*...values*/) { | |
return this(arguments); | |
}; | |
Stack.prototype.toString = function() { | |
return this.__toString('Stack [', ']'); | |
}; | |
// @pragma Access | |
Stack.prototype.get = function(index, notSetValue) { | |
var head = this._head; | |
index = wrapIndex(this, index); | |
while (head && index--) { | |
head = head.next; | |
} | |
return head ? head.value : notSetValue; | |
}; | |
Stack.prototype.peek = function() { | |
return this._head && this._head.value; | |
}; | |
// @pragma Modification | |
Stack.prototype.push = function(/*...values*/) { | |
if (arguments.length === 0) { | |
return this; | |
} | |
var newSize = this.size + arguments.length; | |
var head = this._head; | |
for (var ii = arguments.length - 1; ii >= 0; ii--) { | |
head = { | |
value: arguments[ii], | |
next: head | |
}; | |
} | |
if (this.__ownerID) { | |
this.size = newSize; | |
this._head = head; | |
this.__hash = undefined; | |
this.__altered = true; | |
return this; | |
} | |
return makeStack(newSize, head); | |
}; | |
Stack.prototype.pushAll = function(iter) { | |
iter = IndexedIterable(iter); | |
if (iter.size === 0) { | |
return this; | |
} | |
assertNotInfinite(iter.size); | |
var newSize = this.size; | |
var head = this._head; | |
iter.reverse().forEach(function(value ) { | |
newSize++; | |
head = { | |
value: value, | |
next: head | |
}; | |
}); | |
if (this.__ownerID) { | |
this.size = newSize; | |
this._head = head; | |
this.__hash = undefined; | |
this.__altered = true; | |
return this; | |
} | |
return makeStack(newSize, head); | |
}; | |
Stack.prototype.pop = function() { | |
return this.slice(1); | |
}; | |
Stack.prototype.unshift = function(/*...values*/) { | |
return this.push.apply(this, arguments); | |
}; | |
Stack.prototype.unshiftAll = function(iter) { | |
return this.pushAll(iter); | |
}; | |
Stack.prototype.shift = function() { | |
return this.pop.apply(this, arguments); | |
}; | |
Stack.prototype.clear = function() { | |
if (this.size === 0) { | |
return this; | |
} | |
if (this.__ownerID) { | |
this.size = 0; | |
this._head = undefined; | |
this.__hash = undefined; | |
this.__altered = true; | |
return this; | |
} | |
return emptyStack(); | |
}; | |
Stack.prototype.slice = function(begin, end) { | |
if (wholeSlice(begin, end, this.size)) { | |
return this; | |
} | |
var resolvedBegin = resolveBegin(begin, this.size); | |
var resolvedEnd = resolveEnd(end, this.size); | |
if (resolvedEnd !== this.size) { | |
// super.slice(begin, end); | |
return IndexedCollection.prototype.slice.call(this, begin, end); | |
} | |
var newSize = this.size - resolvedBegin; | |
var head = this._head; | |
while (resolvedBegin--) { | |
head = head.next; | |
} | |
if (this.__ownerID) { | |
this.size = newSize; | |
this._head = head; | |
this.__hash = undefined; | |
this.__altered = true; | |
return this; | |
} | |
return makeStack(newSize, head); | |
}; | |
// @pragma Mutability | |
Stack.prototype.__ensureOwner = function(ownerID) { | |
if (ownerID === this.__ownerID) { | |
return this; | |
} | |
if (!ownerID) { | |
this.__ownerID = ownerID; | |
this.__altered = false; | |
return this; | |
} | |
return makeStack(this.size, this._head, ownerID, this.__hash); | |
}; | |
// @pragma Iteration | |
Stack.prototype.__iterate = function(fn, reverse) { | |
if (reverse) { | |
return this.reverse().__iterate(fn); | |
} | |
var iterations = 0; | |
var node = this._head; | |
while (node) { | |
if (fn(node.value, iterations++, this) === false) { | |
break; | |
} | |
node = node.next; | |
} | |
return iterations; | |
}; | |
Stack.prototype.__iterator = function(type, reverse) { | |
if (reverse) { | |
return this.reverse().__iterator(type); | |
} | |
var iterations = 0; | |
var node = this._head; | |
return new src_Iterator__Iterator(function() { | |
if (node) { | |
var value = node.value; | |
node = node.next; | |
return iteratorValue(type, iterations++, value); | |
} | |
return iteratorDone(); | |
}); | |
}; | |
function isStack(maybeStack) { | |
return !!(maybeStack && maybeStack[IS_STACK_SENTINEL]); | |
} | |
Stack.isStack = isStack; | |
var IS_STACK_SENTINEL = '@@__IMMUTABLE_STACK__@@'; | |
var StackPrototype = Stack.prototype; | |
StackPrototype[IS_STACK_SENTINEL] = true; | |
StackPrototype.withMutations = MapPrototype.withMutations; | |
StackPrototype.asMutable = MapPrototype.asMutable; | |
StackPrototype.asImmutable = MapPrototype.asImmutable; | |
StackPrototype.wasAltered = MapPrototype.wasAltered; | |
function makeStack(size, head, ownerID, hash) { | |
var map = Object.create(StackPrototype); | |
map.size = size; | |
map._head = head; | |
map.__ownerID = ownerID; | |
map.__hash = hash; | |
map.__altered = false; | |
return map; | |
} | |
var EMPTY_STACK; | |
function emptyStack() { | |
return EMPTY_STACK || (EMPTY_STACK = makeStack(0)); | |
} | |
createClass(src_Set__Set, SetCollection); | |
// @pragma Construction | |
function src_Set__Set(value) { | |
return value === null || value === undefined ? emptySet() : | |
isSet(value) ? value : | |
emptySet().withMutations(function(set ) { | |
var iter = SetIterable(value); | |
assertNotInfinite(iter.size); | |
iter.forEach(function(v ) {return set.add(v)}); | |
}); | |
} | |
src_Set__Set.of = function(/*...values*/) { | |
return this(arguments); | |
}; | |
src_Set__Set.fromKeys = function(value) { | |
return this(KeyedIterable(value).keySeq()); | |
}; | |
src_Set__Set.prototype.toString = function() { | |
return this.__toString('Set {', '}'); | |
}; | |
// @pragma Access | |
src_Set__Set.prototype.has = function(value) { | |
return this._map.has(value); | |
}; | |
// @pragma Modification | |
src_Set__Set.prototype.add = function(value) { | |
return updateSet(this, this._map.set(value, true)); | |
}; | |
src_Set__Set.prototype.remove = function(value) { | |
return updateSet(this, this._map.remove(value)); | |
}; | |
src_Set__Set.prototype.clear = function() { | |
return updateSet(this, this._map.clear()); | |
}; | |
// @pragma Composition | |
src_Set__Set.prototype.union = function() {var iters = SLICE$0.call(arguments, 0); | |
iters = iters.filter(function(x ) {return x.size !== 0}); | |
if (iters.length === 0) { | |
return this; | |
} | |
if (this.size === 0 && !this.__ownerID && iters.length === 1) { | |
return this.constructor(iters[0]); | |
} | |
return this.withMutations(function(set ) { | |
for (var ii = 0; ii < iters.length; ii++) { | |
SetIterable(iters[ii]).forEach(function(value ) {return set.add(value)}); | |
} | |
}); | |
}; | |
src_Set__Set.prototype.intersect = function() {var iters = SLICE$0.call(arguments, 0); | |
if (iters.length === 0) { | |
return this; | |
} | |
iters = iters.map(function(iter ) {return SetIterable(iter)}); | |
var originalSet = this; | |
return this.withMutations(function(set ) { | |
originalSet.forEach(function(value ) { | |
if (!iters.every(function(iter ) {return iter.contains(value)})) { | |
set.remove(value); | |
} | |
}); | |
}); | |
}; | |
src_Set__Set.prototype.subtract = function() {var iters = SLICE$0.call(arguments, 0); | |
if (iters.length === 0) { | |
return this; | |
} | |
iters = iters.map(function(iter ) {return SetIterable(iter)}); | |
var originalSet = this; | |
return this.withMutations(function(set ) { | |
originalSet.forEach(function(value ) { | |
if (iters.some(function(iter ) {return iter.contains(value)})) { | |
set.remove(value); | |
} | |
}); | |
}); | |
}; | |
src_Set__Set.prototype.merge = function() { | |
return this.union.apply(this, arguments); | |
}; | |
src_Set__Set.prototype.mergeWith = function(merger) {var iters = SLICE$0.call(arguments, 1); | |
return this.union.apply(this, iters); | |
}; | |
src_Set__Set.prototype.sort = function(comparator) { | |
// Late binding | |
return OrderedSet(sortFactory(this, comparator)); | |
}; | |
src_Set__Set.prototype.sortBy = function(mapper, comparator) { | |
// Late binding | |
return OrderedSet(sortFactory(this, comparator, mapper)); | |
}; | |
src_Set__Set.prototype.wasAltered = function() { | |
return this._map.wasAltered(); | |
}; | |
src_Set__Set.prototype.__iterate = function(fn, reverse) {var this$0 = this; | |
return this._map.__iterate(function(_, k) {return fn(k, k, this$0)}, reverse); | |
}; | |
src_Set__Set.prototype.__iterator = function(type, reverse) { | |
return this._map.map(function(_, k) {return k}).__iterator(type, reverse); | |
}; | |
src_Set__Set.prototype.__ensureOwner = function(ownerID) { | |
if (ownerID === this.__ownerID) { | |
return this; | |
} | |
var newMap = this._map.__ensureOwner(ownerID); | |
if (!ownerID) { | |
this.__ownerID = ownerID; | |
this._map = newMap; | |
return this; | |
} | |
return this.__make(newMap, ownerID); | |
}; | |
function isSet(maybeSet) { | |
return !!(maybeSet && maybeSet[IS_SET_SENTINEL]); | |
} | |
src_Set__Set.isSet = isSet; | |
var IS_SET_SENTINEL = '@@__IMMUTABLE_SET__@@'; | |
var SetPrototype = src_Set__Set.prototype; | |
SetPrototype[IS_SET_SENTINEL] = true; | |
SetPrototype[DELETE] = SetPrototype.remove; | |
SetPrototype.mergeDeep = SetPrototype.merge; | |
SetPrototype.mergeDeepWith = SetPrototype.mergeWith; | |
SetPrototype.withMutations = MapPrototype.withMutations; | |
SetPrototype.asMutable = MapPrototype.asMutable; | |
SetPrototype.asImmutable = MapPrototype.asImmutable; | |
SetPrototype.__empty = emptySet; | |
SetPrototype.__make = makeSet; | |
function updateSet(set, newMap) { | |
if (set.__ownerID) { | |
set.size = newMap.size; | |
set._map = newMap; | |
return set; | |
} | |
return newMap === set._map ? set : | |
newMap.size === 0 ? set.__empty() : | |
set.__make(newMap); | |
} | |
function makeSet(map, ownerID) { | |
var set = Object.create(SetPrototype); | |
set.size = map ? map.size : 0; | |
set._map = map; | |
set.__ownerID = ownerID; | |
return set; | |
} | |
var EMPTY_SET; | |
function emptySet() { | |
return EMPTY_SET || (EMPTY_SET = makeSet(emptyMap())); | |
} | |
createClass(OrderedSet, src_Set__Set); | |
// @pragma Construction | |
function OrderedSet(value) { | |
return value === null || value === undefined ? emptyOrderedSet() : | |
isOrderedSet(value) ? value : | |
emptyOrderedSet().withMutations(function(set ) { | |
var iter = SetIterable(value); | |
assertNotInfinite(iter.size); | |
iter.forEach(function(v ) {return set.add(v)}); | |
}); | |
} | |
OrderedSet.of = function(/*...values*/) { | |
return this(arguments); | |
}; | |
OrderedSet.fromKeys = function(value) { | |
return this(KeyedIterable(value).keySeq()); | |
}; | |
OrderedSet.prototype.toString = function() { | |
return this.__toString('OrderedSet {', '}'); | |
}; | |
function isOrderedSet(maybeOrderedSet) { | |
return isSet(maybeOrderedSet) && isOrdered(maybeOrderedSet); | |
} | |
OrderedSet.isOrderedSet = isOrderedSet; | |
var OrderedSetPrototype = OrderedSet.prototype; | |
OrderedSetPrototype[IS_ORDERED_SENTINEL] = true; | |
OrderedSetPrototype.__empty = emptyOrderedSet; | |
OrderedSetPrototype.__make = makeOrderedSet; | |
function makeOrderedSet(map, ownerID) { | |
var set = Object.create(OrderedSetPrototype); | |
set.size = map ? map.size : 0; | |
set._map = map; | |
set.__ownerID = ownerID; | |
return set; | |
} | |
var EMPTY_ORDERED_SET; | |
function emptyOrderedSet() { | |
return EMPTY_ORDERED_SET || (EMPTY_ORDERED_SET = makeOrderedSet(emptyOrderedMap())); | |
} | |
createClass(Record, KeyedCollection); | |
function Record(defaultValues, name) { | |
var hasInitialized; | |
var RecordType = function Record(values) { | |
if (values instanceof RecordType) { | |
return values; | |
} | |
if (!(this instanceof RecordType)) { | |
return new RecordType(values); | |
} | |
if (!hasInitialized) { | |
hasInitialized = true; | |
var keys = Object.keys(defaultValues); | |
setProps(RecordTypePrototype, keys); | |
RecordTypePrototype.size = keys.length; | |
RecordTypePrototype._name = name; | |
RecordTypePrototype._keys = keys; | |
RecordTypePrototype._defaultValues = defaultValues; | |
} | |
this._map = src_Map__Map(values); | |
}; | |
var RecordTypePrototype = RecordType.prototype = Object.create(RecordPrototype); | |
RecordTypePrototype.constructor = RecordType; | |
return RecordType; | |
} | |
Record.prototype.toString = function() { | |
return this.__toString(recordName(this) + ' {', '}'); | |
}; | |
// @pragma Access | |
Record.prototype.has = function(k) { | |
return this._defaultValues.hasOwnProperty(k); | |
}; | |
Record.prototype.get = function(k, notSetValue) { | |
if (!this.has(k)) { | |
return notSetValue; | |
} | |
var defaultVal = this._defaultValues[k]; | |
return this._map ? this._map.get(k, defaultVal) : defaultVal; | |
}; | |
// @pragma Modification | |
Record.prototype.clear = function() { | |
if (this.__ownerID) { | |
this._map && this._map.clear(); | |
return this; | |
} | |
var RecordType = this.constructor; | |
return RecordType._empty || (RecordType._empty = makeRecord(this, emptyMap())); | |
}; | |
Record.prototype.set = function(k, v) { | |
if (!this.has(k)) { | |
throw new Error('Cannot set unknown key "' + k + '" on ' + recordName(this)); | |
} | |
var newMap = this._map && this._map.set(k, v); | |
if (this.__ownerID || newMap === this._map) { | |
return this; | |
} | |
return makeRecord(this, newMap); | |
}; | |
Record.prototype.remove = function(k) { | |
if (!this.has(k)) { | |
return this; | |
} | |
var newMap = this._map && this._map.remove(k); | |
if (this.__ownerID || newMap === this._map) { | |
return this; | |
} | |
return makeRecord(this, newMap); | |
}; | |
Record.prototype.wasAltered = function() { | |
return this._map.wasAltered(); | |
}; | |
Record.prototype.__iterator = function(type, reverse) {var this$0 = this; | |
return KeyedIterable(this._defaultValues).map(function(_, k) {return this$0.get(k)}).__iterator(type, reverse); | |
}; | |
Record.prototype.__iterate = function(fn, reverse) {var this$0 = this; | |
return KeyedIterable(this._defaultValues).map(function(_, k) {return this$0.get(k)}).__iterate(fn, reverse); | |
}; | |
Record.prototype.__ensureOwner = function(ownerID) { | |
if (ownerID === this.__ownerID) { | |
return this; | |
} | |
var newMap = this._map && this._map.__ensureOwner(ownerID); | |
if (!ownerID) { | |
this.__ownerID = ownerID; | |
this._map = newMap; | |
return this; | |
} | |
return makeRecord(this, newMap, ownerID); | |
}; | |
var RecordPrototype = Record.prototype; | |
RecordPrototype[DELETE] = RecordPrototype.remove; | |
RecordPrototype.deleteIn = | |
RecordPrototype.removeIn = MapPrototype.removeIn; | |
RecordPrototype.merge = MapPrototype.merge; | |
RecordPrototype.mergeWith = MapPrototype.mergeWith; | |
RecordPrototype.mergeIn = MapPrototype.mergeIn; | |
RecordPrototype.mergeDeep = MapPrototype.mergeDeep; | |
RecordPrototype.mergeDeepWith = MapPrototype.mergeDeepWith; | |
RecordPrototype.mergeDeepIn = MapPrototype.mergeDeepIn; | |
RecordPrototype.setIn = MapPrototype.setIn; | |
RecordPrototype.update = MapPrototype.update; | |
RecordPrototype.updateIn = MapPrototype.updateIn; | |
RecordPrototype.withMutations = MapPrototype.withMutations; | |
RecordPrototype.asMutable = MapPrototype.asMutable; | |
RecordPrototype.asImmutable = MapPrototype.asImmutable; | |
function makeRecord(likeRecord, map, ownerID) { | |
var record = Object.create(Object.getPrototypeOf(likeRecord)); | |
record._map = map; | |
record.__ownerID = ownerID; | |
return record; | |
} | |
function recordName(record) { | |
return record._name || record.constructor.name || 'Record'; | |
} | |
function setProps(prototype, names) { | |
try { | |
names.forEach(setProp.bind(undefined, prototype)); | |
} catch (error) { | |
// Object.defineProperty failed. Probably IE8. | |
} | |
} | |
function setProp(prototype, name) { | |
Object.defineProperty(prototype, name, { | |
get: function() { | |
return this.get(name); | |
}, | |
set: function(value) { | |
invariant(this.__ownerID, 'Cannot set on an immutable record.'); | |
this.set(name, value); | |
} | |
}); | |
} | |
function deepEqual(a, b) { | |
if (a === b) { | |
return true; | |
} | |
if ( | |
!isIterable(b) || | |
a.size !== undefined && b.size !== undefined && a.size !== b.size || | |
a.__hash !== undefined && b.__hash !== undefined && a.__hash !== b.__hash || | |
isKeyed(a) !== isKeyed(b) || | |
isIndexed(a) !== isIndexed(b) || | |
isOrdered(a) !== isOrdered(b) | |
) { | |
return false; | |
} | |
if (a.size === 0 && b.size === 0) { | |
return true; | |
} | |
var notAssociative = !isAssociative(a); | |
if (isOrdered(a)) { | |
var entries = a.entries(); | |
return b.every(function(v, k) { | |
var entry = entries.next().value; | |
return entry && is(entry[1], v) && (notAssociative || is(entry[0], k)); | |
}) && entries.next().done; | |
} | |
var flipped = false; | |
if (a.size === undefined) { | |
if (b.size === undefined) { | |
if (typeof a.cacheResult === 'function') { | |
a.cacheResult(); | |
} | |
} else { | |
flipped = true; | |
var _ = a; | |
a = b; | |
b = _; | |
} | |
} | |
var allEqual = true; | |
var bSize = b.__iterate(function(v, k) { | |
if (notAssociative ? !a.has(v) : | |
flipped ? !is(v, a.get(k, NOT_SET)) : !is(a.get(k, NOT_SET), v)) { | |
allEqual = false; | |
return false; | |
} | |
}); | |
return allEqual && a.size === bSize; | |
} | |
createClass(Range, IndexedSeq); | |
function Range(start, end, step) { | |
if (!(this instanceof Range)) { | |
return new Range(start, end, step); | |
} | |
invariant(step !== 0, 'Cannot step a Range by 0'); | |
start = start || 0; | |
if (end === undefined) { | |
end = Infinity; | |
} | |
step = step === undefined ? 1 : Math.abs(step); | |
if (end < start) { | |
step = -step; | |
} | |
this._start = start; | |
this._end = end; | |
this._step = step; | |
this.size = Math.max(0, Math.ceil((end - start) / step - 1) + 1); | |
if (this.size === 0) { | |
if (EMPTY_RANGE) { | |
return EMPTY_RANGE; | |
} | |
EMPTY_RANGE = this; | |
} | |
} | |
Range.prototype.toString = function() { | |
if (this.size === 0) { | |
return 'Range []'; | |
} | |
return 'Range [ ' + | |
this._start + '...' + this._end + | |
(this._step > 1 ? ' by ' + this._step : '') + | |
' ]'; | |
}; | |
Range.prototype.get = function(index, notSetValue) { | |
return this.has(index) ? | |
this._start + wrapIndex(this, index) * this._step : | |
notSetValue; | |
}; | |
Range.prototype.contains = function(searchValue) { | |
var possibleIndex = (searchValue - this._start) / this._step; | |
return possibleIndex >= 0 && | |
possibleIndex < this.size && | |
possibleIndex === Math.floor(possibleIndex); | |
}; | |
Range.prototype.slice = function(begin, end) { | |
if (wholeSlice(begin, end, this.size)) { | |
return this; | |
} | |
begin = resolveBegin(begin, this.size); | |
end = resolveEnd(end, this.size); | |
if (end <= begin) { | |
return new Range(0, 0); | |
} | |
return new Range(this.get(begin, this._end), this.get(end, this._end), this._step); | |
}; | |
Range.prototype.indexOf = function(searchValue) { | |
var offsetValue = searchValue - this._start; | |
if (offsetValue % this._step === 0) { | |
var index = offsetValue / this._step; | |
if (index >= 0 && index < this.size) { | |
return index | |
} | |
} | |
return -1; | |
}; | |
Range.prototype.lastIndexOf = function(searchValue) { | |
return this.indexOf(searchValue); | |
}; | |
Range.prototype.__iterate = function(fn, reverse) { | |
var maxIndex = this.size - 1; | |
var step = this._step; | |
var value = reverse ? this._start + maxIndex * step : this._start; | |
for (var ii = 0; ii <= maxIndex; ii++) { | |
if (fn(value, ii, this) === false) { | |
return ii + 1; | |
} | |
value += reverse ? -step : step; | |
} | |
return ii; | |
}; | |
Range.prototype.__iterator = function(type, reverse) { | |
var maxIndex = this.size - 1; | |
var step = this._step; | |
var value = reverse ? this._start + maxIndex * step : this._start; | |
var ii = 0; | |
return new src_Iterator__Iterator(function() { | |
var v = value; | |
value += reverse ? -step : step; | |
return ii > maxIndex ? iteratorDone() : iteratorValue(type, ii++, v); | |
}); | |
}; | |
Range.prototype.equals = function(other) { | |
return other instanceof Range ? | |
this._start === other._start && | |
this._end === other._end && | |
this._step === other._step : | |
deepEqual(this, other); | |
}; | |
var EMPTY_RANGE; | |
createClass(Repeat, IndexedSeq); | |
function Repeat(value, times) { | |
if (!(this instanceof Repeat)) { | |
return new Repeat(value, times); | |
} | |
this._value = value; | |
this.size = times === undefined ? Infinity : Math.max(0, times); | |
if (this.size === 0) { | |
if (EMPTY_REPEAT) { | |
return EMPTY_REPEAT; | |
} | |
EMPTY_REPEAT = this; | |
} | |
} | |
Repeat.prototype.toString = function() { | |
if (this.size === 0) { | |
return 'Repeat []'; | |
} | |
return 'Repeat [ ' + this._value + ' ' + this.size + ' times ]'; | |
}; | |
Repeat.prototype.get = function(index, notSetValue) { | |
return this.has(index) ? this._value : notSetValue; | |
}; | |
Repeat.prototype.contains = function(searchValue) { | |
return is(this._value, searchValue); | |
}; | |
Repeat.prototype.slice = function(begin, end) { | |
var size = this.size; | |
return wholeSlice(begin, end, size) ? this : | |
new Repeat(this._value, resolveEnd(end, size) - resolveBegin(begin, size)); | |
}; | |
Repeat.prototype.reverse = function() { | |
return this; | |
}; | |
Repeat.prototype.indexOf = function(searchValue) { | |
if (is(this._value, searchValue)) { | |
return 0; | |
} | |
return -1; | |
}; | |
Repeat.prototype.lastIndexOf = function(searchValue) { | |
if (is(this._value, searchValue)) { | |
return this.size; | |
} | |
return -1; | |
}; | |
Repeat.prototype.__iterate = function(fn, reverse) { | |
for (var ii = 0; ii < this.size; ii++) { | |
if (fn(this._value, ii, this) === false) { | |
return ii + 1; | |
} | |
} | |
return ii; | |
}; | |
Repeat.prototype.__iterator = function(type, reverse) {var this$0 = this; | |
var ii = 0; | |
return new src_Iterator__Iterator(function() | |
{return ii < this$0.size ? iteratorValue(type, ii++, this$0._value) : iteratorDone()} | |
); | |
}; | |
Repeat.prototype.equals = function(other) { | |
return other instanceof Repeat ? | |
is(this._value, other._value) : | |
deepEqual(other); | |
}; | |
var EMPTY_REPEAT; | |
/** | |
* Contributes additional methods to a constructor | |
*/ | |
function mixin(ctor, methods) { | |
var keyCopier = function(key ) { ctor.prototype[key] = methods[key]; }; | |
Object.keys(methods).forEach(keyCopier); | |
Object.getOwnPropertySymbols && | |
Object.getOwnPropertySymbols(methods).forEach(keyCopier); | |
return ctor; | |
} | |
Iterable.Iterator = src_Iterator__Iterator; | |
mixin(Iterable, { | |
// ### Conversion to other types | |
toArray: function() { | |
assertNotInfinite(this.size); | |
var array = new Array(this.size || 0); | |
this.valueSeq().__iterate(function(v, i) { array[i] = v; }); | |
return array; | |
}, | |
toIndexedSeq: function() { | |
return new ToIndexedSequence(this); | |
}, | |
toJS: function() { | |
return this.toSeq().map( | |
function(value ) {return value && typeof value.toJS === 'function' ? value.toJS() : value} | |
).__toJS(); | |
}, | |
toJSON: function() { | |
return this.toSeq().map( | |
function(value ) {return value && typeof value.toJSON === 'function' ? value.toJSON() : value} | |
).__toJS(); | |
}, | |
toKeyedSeq: function() { | |
return new ToKeyedSequence(this, true); | |
}, | |
toMap: function() { | |
// Use Late Binding here to solve the circular dependency. | |
return src_Map__Map(this.toKeyedSeq()); | |
}, | |
toObject: function() { | |
assertNotInfinite(this.size); | |
var object = {}; | |
this.__iterate(function(v, k) { object[k] = v; }); | |
return object; | |
}, | |
toOrderedMap: function() { | |
// Use Late Binding here to solve the circular dependency. | |
return OrderedMap(this.toKeyedSeq()); | |
}, | |
toOrderedSet: function() { | |
// Use Late Binding here to solve the circular dependency. | |
return OrderedSet(isKeyed(this) ? this.valueSeq() : this); | |
}, | |
toSet: function() { | |
// Use Late Binding here to solve the circular dependency. | |
return src_Set__Set(isKeyed(this) ? this.valueSeq() : this); | |
}, | |
toSetSeq: function() { | |
return new ToSetSequence(this); | |
}, | |
toSeq: function() { | |
return isIndexed(this) ? this.toIndexedSeq() : | |
isKeyed(this) ? this.toKeyedSeq() : | |
this.toSetSeq(); | |
}, | |
toStack: function() { | |
// Use Late Binding here to solve the circular dependency. | |
return Stack(isKeyed(this) ? this.valueSeq() : this); | |
}, | |
toList: function() { | |
// Use Late Binding here to solve the circular dependency. | |
return List(isKeyed(this) ? this.valueSeq() : this); | |
}, | |
// ### Common JavaScript methods and properties | |
toString: function() { | |
return '[Iterable]'; | |
}, | |
__toString: function(head, tail) { | |
if (this.size === 0) { | |
return head + tail; | |
} | |
return head + ' ' + this.toSeq().map(this.__toStringMapper).join(', ') + ' ' + tail; | |
}, | |
// ### ES6 Collection methods (ES6 Array and Map) | |
concat: function() {var values = SLICE$0.call(arguments, 0); | |
return reify(this, concatFactory(this, values)); | |
}, | |
contains: function(searchValue) { | |
return this.some(function(value ) {return is(value, searchValue)}); | |
}, | |
entries: function() { | |
return this.__iterator(ITERATE_ENTRIES); | |
}, | |
every: function(predicate, context) { | |
assertNotInfinite(this.size); | |
var returnValue = true; | |
this.__iterate(function(v, k, c) { | |
if (!predicate.call(context, v, k, c)) { | |
returnValue = false; | |
return false; | |
} | |
}); | |
return returnValue; | |
}, | |
filter: function(predicate, context) { | |
return reify(this, filterFactory(this, predicate, context, true)); | |
}, | |
find: function(predicate, context, notSetValue) { | |
var entry = this.findEntry(predicate, context); | |
return entry ? entry[1] : notSetValue; | |
}, | |
findEntry: function(predicate, context) { | |
var found; | |
this.__iterate(function(v, k, c) { | |
if (predicate.call(context, v, k, c)) { | |
found = [k, v]; | |
return false; | |
} | |
}); | |
return found; | |
}, | |
findLastEntry: function(predicate, context) { | |
return this.toSeq().reverse().findEntry(predicate, context); | |
}, | |
forEach: function(sideEffect, context) { | |
assertNotInfinite(this.size); | |
return this.__iterate(context ? sideEffect.bind(context) : sideEffect); | |
}, | |
join: function(separator) { | |
assertNotInfinite(this.size); | |
separator = separator !== undefined ? '' + separator : ','; | |
var joined = ''; | |
var isFirst = true; | |
this.__iterate(function(v ) { | |
isFirst ? (isFirst = false) : (joined += separator); | |
joined += v !== null && v !== undefined ? v.toString() : ''; | |
}); | |
return joined; | |
}, | |
keys: function() { | |
return this.__iterator(ITERATE_KEYS); | |
}, | |
map: function(mapper, context) { | |
return reify(this, mapFactory(this, mapper, context)); | |
}, | |
reduce: function(reducer, initialReduction, context) { | |
assertNotInfinite(this.size); | |
var reduction; | |
var useFirst; | |
if (arguments.length < 2) { | |
useFirst = true; | |
} else { | |
reduction = initialReduction; | |
} | |
this.__iterate(function(v, k, c) { | |
if (useFirst) { | |
useFirst = false; | |
reduction = v; | |
} else { | |
reduction = reducer.call(context, reduction, v, k, c); | |
} | |
}); | |
return reduction; | |
}, | |
reduceRight: function(reducer, initialReduction, context) { | |
var reversed = this.toKeyedSeq().reverse(); | |
return reversed.reduce.apply(reversed, arguments); | |
}, | |
reverse: function() { | |
return reify(this, reverseFactory(this, true)); | |
}, | |
slice: function(begin, end) { | |
return reify(this, sliceFactory(this, begin, end, true)); | |
}, | |
some: function(predicate, context) { | |
return !this.every(not(predicate), context); | |
}, | |
sort: function(comparator) { | |
return reify(this, sortFactory(this, comparator)); | |
}, | |
values: function() { | |
return this.__iterator(ITERATE_VALUES); | |
}, | |
// ### More sequential methods | |
butLast: function() { | |
return this.slice(0, -1); | |
}, | |
isEmpty: function() { | |
return this.size !== undefined ? this.size === 0 : !this.some(function() {return true}); | |
}, | |
count: function(predicate, context) { | |
return ensureSize( | |
predicate ? this.toSeq().filter(predicate, context) : this | |
); | |
}, | |
countBy: function(grouper, context) { | |
return countByFactory(this, grouper, context); | |
}, | |
equals: function(other) { | |
return deepEqual(this, other); | |
}, | |
entrySeq: function() { | |
var iterable = this; | |
if (iterable._cache) { | |
// We cache as an entries array, so we can just return the cache! | |
return new ArraySeq(iterable._cache); | |
} | |
var entriesSequence = iterable.toSeq().map(entryMapper).toIndexedSeq(); | |
entriesSequence.fromEntrySeq = function() {return iterable.toSeq()}; | |
return entriesSequence; | |
}, | |
filterNot: function(predicate, context) { | |
return this.filter(not(predicate), context); | |
}, | |
findLast: function(predicate, context, notSetValue) { | |
return this.toKeyedSeq().reverse().find(predicate, context, notSetValue); | |
}, | |
first: function() { | |
return this.find(returnTrue); | |
}, | |
flatMap: function(mapper, context) { | |
return reify(this, flatMapFactory(this, mapper, context)); | |
}, | |
flatten: function(depth) { | |
return reify(this, flattenFactory(this, depth, true)); | |
}, | |
fromEntrySeq: function() { | |
return new FromEntriesSequence(this); | |
}, | |
get: function(searchKey, notSetValue) { | |
return this.find(function(_, key) {return is(key, searchKey)}, undefined, notSetValue); | |
}, | |
getIn: function(searchKeyPath, notSetValue) { | |
var nested = this; | |
// Note: in an ES6 environment, we would prefer: | |
// for (var key of searchKeyPath) { | |
var iter = forceIterator(searchKeyPath); | |
var step; | |
while (!(step = iter.next()).done) { | |
var key = step.value; | |
nested = nested && nested.get ? nested.get(key, NOT_SET) : NOT_SET; | |
if (nested === NOT_SET) { | |
return notSetValue; | |
} | |
} | |
return nested; | |
}, | |
groupBy: function(grouper, context) { | |
return groupByFactory(this, grouper, context); | |
}, | |
has: function(searchKey) { | |
return this.get(searchKey, NOT_SET) !== NOT_SET; | |
}, | |
hasIn: function(searchKeyPath) { | |
return this.getIn(searchKeyPath, NOT_SET) !== NOT_SET; | |
}, | |
isSubset: function(iter) { | |
iter = typeof iter.contains === 'function' ? iter : Iterable(iter); | |
return this.every(function(value ) {return iter.contains(value)}); | |
}, | |
isSuperset: function(iter) { | |
return iter.isSubset(this); | |
}, | |
keySeq: function() { | |
return this.toSeq().map(keyMapper).toIndexedSeq(); | |
}, | |
last: function() { | |
return this.toSeq().reverse().first(); | |
}, | |
max: function(comparator) { | |
return maxFactory(this, comparator); | |
}, | |
maxBy: function(mapper, comparator) { | |
return maxFactory(this, comparator, mapper); | |
}, | |
min: function(comparator) { | |
return maxFactory(this, comparator ? neg(comparator) : defaultNegComparator); | |
}, | |
minBy: function(mapper, comparator) { | |
return maxFactory(this, comparator ? neg(comparator) : defaultNegComparator, mapper); | |
}, | |
rest: function() { | |
return this.slice(1); | |
}, | |
skip: function(amount) { | |
return this.slice(Math.max(0, amount)); | |
}, | |
skipLast: function(amount) { | |
return reify(this, this.toSeq().reverse().skip(amount).reverse()); | |
}, | |
skipWhile: function(predicate, context) { | |
return reify(this, skipWhileFactory(this, predicate, context, true)); | |
}, | |
skipUntil: function(predicate, context) { | |
return this.skipWhile(not(predicate), context); | |
}, | |
sortBy: function(mapper, comparator) { | |
return reify(this, sortFactory(this, comparator, mapper)); | |
}, | |
take: function(amount) { | |
return this.slice(0, Math.max(0, amount)); | |
}, | |
takeLast: function(amount) { | |
return reify(this, this.toSeq().reverse().take(amount).reverse()); | |
}, | |
takeWhile: function(predicate, context) { | |
return reify(this, takeWhileFactory(this, predicate, context)); | |
}, | |
takeUntil: function(predicate, context) { | |
return this.takeWhile(not(predicate), context); | |
}, | |
valueSeq: function() { | |
return this.toIndexedSeq(); | |
}, | |
// ### Hashable Object | |
hashCode: function() { | |
return this.__hash || (this.__hash = hashIterable(this)); | |
}, | |
// ### Internal | |
// abstract __iterate(fn, reverse) | |
// abstract __iterator(type, reverse) | |
}); | |
// var IS_ITERABLE_SENTINEL = '@@__IMMUTABLE_ITERABLE__@@'; | |
// var IS_KEYED_SENTINEL = '@@__IMMUTABLE_KEYED__@@'; | |
// var IS_INDEXED_SENTINEL = '@@__IMMUTABLE_INDEXED__@@'; | |
// var IS_ORDERED_SENTINEL = '@@__IMMUTABLE_ORDERED__@@'; | |
var IterablePrototype = Iterable.prototype; | |
IterablePrototype[IS_ITERABLE_SENTINEL] = true; | |
IterablePrototype[ITERATOR_SYMBOL] = IterablePrototype.values; | |
IterablePrototype.__toJS = IterablePrototype.toArray; | |
IterablePrototype.__toStringMapper = quoteString; | |
IterablePrototype.inspect = | |
IterablePrototype.toSource = function() { return this.toString(); }; | |
IterablePrototype.chain = IterablePrototype.flatMap; | |
// Temporary warning about using length | |
(function () { | |
try { | |
Object.defineProperty(IterablePrototype, 'length', { | |
get: function () { | |
if (!Iterable.noLengthWarning) { | |
var stack; | |
try { | |
throw new Error(); | |
} catch (error) { | |
stack = error.stack; | |
} | |
if (stack.indexOf('_wrapObject') === -1) { | |
console && console.warn && console.warn( | |
'iterable.length has been deprecated, '+ | |
'use iterable.size or iterable.count(). '+ | |
'This warning will become a silent error in a future version. ' + | |
stack | |
); | |
return this.size; | |
} | |
} | |
} | |
}); | |
} catch (e) {} | |
})(); | |
mixin(KeyedIterable, { | |
// ### More sequential methods | |
flip: function() { | |
return reify(this, flipFactory(this)); | |
}, | |
findKey: function(predicate, context) { | |
var entry = this.findEntry(predicate, context); | |
return entry && entry[0]; | |
}, | |
findLastKey: function(predicate, context) { | |
return this.toSeq().reverse().findKey(predicate, context); | |
}, | |
keyOf: function(searchValue) { | |
return this.findKey(function(value ) {return is(value, searchValue)}); | |
}, | |
lastKeyOf: function(searchValue) { | |
return this.findLastKey(function(value ) {return is(value, searchValue)}); | |
}, | |
mapEntries: function(mapper, context) {var this$0 = this; | |
var iterations = 0; | |
return reify(this, | |
this.toSeq().map( | |
function(v, k) {return mapper.call(context, [k, v], iterations++, this$0)} | |
).fromEntrySeq() | |
); | |
}, | |
mapKeys: function(mapper, context) {var this$0 = this; | |
return reify(this, | |
this.toSeq().flip().map( | |
function(k, v) {return mapper.call(context, k, v, this$0)} | |
).flip() | |
); | |
}, | |
}); | |
var KeyedIterablePrototype = KeyedIterable.prototype; | |
KeyedIterablePrototype[IS_KEYED_SENTINEL] = true; | |
KeyedIterablePrototype[ITERATOR_SYMBOL] = IterablePrototype.entries; | |
KeyedIterablePrototype.__toJS = IterablePrototype.toObject; | |
KeyedIterablePrototype.__toStringMapper = function(v, k) {return JSON.stringify(k) + ': ' + quoteString(v)}; | |
mixin(IndexedIterable, { | |
// ### Conversion to other types | |
toKeyedSeq: function() { | |
return new ToKeyedSequence(this, false); | |
}, | |
// ### ES6 Collection methods (ES6 Array and Map) | |
filter: function(predicate, context) { | |
return reify(this, filterFactory(this, predicate, context, false)); | |
}, | |
findIndex: function(predicate, context) { | |
var entry = this.findEntry(predicate, context); | |
return entry ? entry[0] : -1; | |
}, | |
indexOf: function(searchValue) { | |
var key = this.toKeyedSeq().keyOf(searchValue); | |
return key === undefined ? -1 : key; | |
}, | |
lastIndexOf: function(searchValue) { | |
return this.toSeq().reverse().indexOf(searchValue); | |
}, | |
reverse: function() { | |
return reify(this, reverseFactory(this, false)); | |
}, | |
slice: function(begin, end) { | |
return reify(this, sliceFactory(this, begin, end, false)); | |
}, | |
splice: function(index, removeNum /*, ...values*/) { | |
var numArgs = arguments.length; | |
removeNum = Math.max(removeNum | 0, 0); | |
if (numArgs === 0 || (numArgs === 2 && !removeNum)) { | |
return this; | |
} | |
index = resolveBegin(index, this.size); | |
var spliced = this.slice(0, index); | |
return reify( | |
this, | |
numArgs === 1 ? | |
spliced : | |
spliced.concat(arrCopy(arguments, 2), this.slice(index + removeNum)) | |
); | |
}, | |
// ### More collection methods | |
findLastIndex: function(predicate, context) { | |
var key = this.toKeyedSeq().findLastKey(predicate, context); | |
return key === undefined ? -1 : key; | |
}, | |
first: function() { | |
return this.get(0); | |
}, | |
flatten: function(depth) { | |
return reify(this, flattenFactory(this, depth, false)); | |
}, | |
get: function(index, notSetValue) { | |
index = wrapIndex(this, index); | |
return (index < 0 || (this.size === Infinity || | |
(this.size !== undefined && index > this.size))) ? | |
notSetValue : | |
this.find(function(_, key) {return key === index}, undefined, notSetValue); | |
}, | |
has: function(index) { | |
index = wrapIndex(this, index); | |
return index >= 0 && (this.size !== undefined ? | |
this.size === Infinity || index < this.size : | |
this.indexOf(index) !== -1 | |
); | |
}, | |
interpose: function(separator) { | |
return reify(this, interposeFactory(this, separator)); | |
}, | |
interleave: function(/*...iterables*/) { | |
var iterables = [this].concat(arrCopy(arguments)); | |
var zipped = zipWithFactory(this.toSeq(), IndexedSeq.of, iterables); | |
var interleaved = zipped.flatten(true); | |
if (zipped.size) { | |
interleaved.size = zipped.size * iterables.length; | |
} | |
return reify(this, interleaved); | |
}, | |
last: function() { | |
return this.get(-1); | |
}, | |
skipWhile: function(predicate, context) { | |
return reify(this, skipWhileFactory(this, predicate, context, false)); | |
}, | |
zip: function(/*, ...iterables */) { | |
var iterables = [this].concat(arrCopy(arguments)); | |
return reify(this, zipWithFactory(this, defaultZipper, iterables)); | |
}, | |
zipWith: function(zipper/*, ...iterables */) { | |
var iterables = arrCopy(arguments); | |
iterables[0] = this; | |
return reify(this, zipWithFactory(this, zipper, iterables)); | |
}, | |
}); | |
IndexedIterable.prototype[IS_INDEXED_SENTINEL] = true; | |
IndexedIterable.prototype[IS_ORDERED_SENTINEL] = true; | |
mixin(SetIterable, { | |
// ### ES6 Collection methods (ES6 Array and Map) | |
get: function(value, notSetValue) { | |
return this.has(value) ? value : notSetValue; | |
}, | |
contains: function(value) { | |
return this.has(value); | |
}, | |
// ### More sequential methods | |
keySeq: function() { | |
return this.valueSeq(); | |
}, | |
}); | |
SetIterable.prototype.has = IterablePrototype.contains; | |
// Mixin subclasses | |
mixin(KeyedSeq, KeyedIterable.prototype); | |
mixin(IndexedSeq, IndexedIterable.prototype); | |
mixin(SetSeq, SetIterable.prototype); | |
mixin(KeyedCollection, KeyedIterable.prototype); | |
mixin(IndexedCollection, IndexedIterable.prototype); | |
mixin(SetCollection, SetIterable.prototype); | |
// #pragma Helper functions | |
function keyMapper(v, k) { | |
return k; | |
} | |
function entryMapper(v, k) { | |
return [k, v]; | |
} | |
function not(predicate) { | |
return function() { | |
return !predicate.apply(this, arguments); | |
} | |
} | |
function neg(predicate) { | |
return function() { | |
return -predicate.apply(this, arguments); | |
} | |
} | |
function quoteString(value) { | |
return typeof value === 'string' ? JSON.stringify(value) : value; | |
} | |
function defaultZipper() { | |
return arrCopy(arguments); | |
} | |
function defaultNegComparator(a, b) { | |
return a < b ? 1 : a > b ? -1 : 0; | |
} | |
function hashIterable(iterable) { | |
if (iterable.size === Infinity) { | |
return 0; | |
} | |
var ordered = isOrdered(iterable); | |
var keyed = isKeyed(iterable); | |
var h = ordered ? 1 : 0; | |
var size = iterable.__iterate( | |
keyed ? | |
ordered ? | |
function(v, k) { h = 31 * h + hashMerge(hash(v), hash(k)) | 0; } : | |
function(v, k) { h = h + hashMerge(hash(v), hash(k)) | 0; } : | |
ordered ? | |
function(v ) { h = 31 * h + hash(v) | 0; } : | |
function(v ) { h = h + hash(v) | 0; } | |
); | |
return murmurHashOfSize(size, h); | |
} | |
function murmurHashOfSize(size, h) { | |
h = src_Math__imul(h, 0xCC9E2D51); | |
h = src_Math__imul(h << 15 | h >>> -15, 0x1B873593); | |
h = src_Math__imul(h << 13 | h >>> -13, 5); | |
h = (h + 0xE6546B64 | 0) ^ size; | |
h = src_Math__imul(h ^ h >>> 16, 0x85EBCA6B); | |
h = src_Math__imul(h ^ h >>> 13, 0xC2B2AE35); | |
h = smi(h ^ h >>> 16); | |
return h; | |
} | |
function hashMerge(a, b) { | |
return a ^ b + 0x9E3779B9 + (a << 6) + (a >> 2) | 0; // int | |
} | |
var Immutable = { | |
Iterable: Iterable, | |
Seq: Seq, | |
Collection: Collection, | |
Map: src_Map__Map, | |
OrderedMap: OrderedMap, | |
List: List, | |
Stack: Stack, | |
Set: src_Set__Set, | |
OrderedSet: OrderedSet, | |
Record: Record, | |
Range: Range, | |
Repeat: Repeat, | |
is: is, | |
fromJS: fromJS, | |
}; | |
return Immutable; | |
})); | |
},{}],83:[function(require,module,exports){ | |
"use strict"; | |
var utils = require('./utils'); | |
var merge = require('./merge'); | |
var errors = require('./deps/errors'); | |
var EventEmitter = require('events').EventEmitter; | |
var upsert = require('./deps/upsert'); | |
var Changes = require('./changes'); | |
var Promise = utils.Promise; | |
/* | |
* A generic pouch adapter | |
*/ | |
// returns first element of arr satisfying callback predicate | |
function arrayFirst(arr, callback) { | |
for (var i = 0; i < arr.length; i++) { | |
if (callback(arr[i], i) === true) { | |
return arr[i]; | |
} | |
} | |
return false; | |
} | |
// Wrapper for functions that call the bulkdocs api with a single doc, | |
// if the first result is an error, return an error | |
function yankError(callback) { | |
return function (err, results) { | |
if (err || (results[0] && results[0].error)) { | |
callback(err || results[0]); | |
} else { | |
callback(null, results.length ? results[0] : results); | |
} | |
}; | |
} | |
// for every node in a revision tree computes its distance from the closest | |
// leaf | |
function computeHeight(revs) { | |
var height = {}; | |
var edges = []; | |
merge.traverseRevTree(revs, function (isLeaf, pos, id, prnt) { | |
var rev = pos + "-" + id; | |
if (isLeaf) { | |
height[rev] = 0; | |
} | |
if (prnt !== undefined) { | |
edges.push({from: prnt, to: rev}); | |
} | |
return rev; | |
}); | |
edges.reverse(); | |
edges.forEach(function (edge) { | |
if (height[edge.from] === undefined) { | |
height[edge.from] = 1 + height[edge.to]; | |
} else { | |
height[edge.from] = Math.min(height[edge.from], 1 + height[edge.to]); | |
} | |
}); | |
return height; | |
} | |
function allDocsKeysQuery(api, opts, callback) { | |
var keys = ('limit' in opts) ? | |
opts.keys.slice(opts.skip, opts.limit + opts.skip) : | |
(opts.skip > 0) ? opts.keys.slice(opts.skip) : opts.keys; | |
if (opts.descending) { | |
keys.reverse(); | |
} | |
if (!keys.length) { | |
return api._allDocs({limit: 0}, callback); | |
} | |
var finalResults = { | |
offset: opts.skip | |
}; | |
return Promise.all(keys.map(function (key) { | |
var subOpts = utils.extend(true, {key: key, deleted: 'ok'}, opts); | |
['limit', 'skip', 'keys'].forEach(function (optKey) { | |
delete subOpts[optKey]; | |
}); | |
return new Promise(function (resolve, reject) { | |
api._allDocs(subOpts, function (err, res) { | |
if (err) { | |
return reject(err); | |
} | |
finalResults.total_rows = res.total_rows; | |
resolve(res.rows[0] || {key: key, error: 'not_found'}); | |
}); | |
}); | |
})).then(function (results) { | |
finalResults.rows = results; | |
return finalResults; | |
}); | |
} | |
utils.inherits(AbstractPouchDB, EventEmitter); | |
module.exports = AbstractPouchDB; | |
function AbstractPouchDB() { | |
EventEmitter.call(this); | |
} | |
AbstractPouchDB.prototype.post = | |
utils.adapterFun('post', function (doc, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
if (typeof doc !== 'object' || Array.isArray(doc)) { | |
return callback(errors.error(errors.NOT_AN_OBJECT)); | |
} | |
this.bulkDocs({docs: [doc]}, opts, yankError(callback)); | |
}); | |
AbstractPouchDB.prototype.put = | |
utils.adapterFun('put', utils.getArguments(function (args) { | |
var temp, temptype, opts, callback; | |
var doc = args.shift(); | |
var id = '_id' in doc; | |
if (typeof doc !== 'object' || Array.isArray(doc)) { | |
callback = args.pop(); | |
return callback(errors.error(errors.NOT_AN_OBJECT)); | |
} | |
doc = utils.clone(doc); | |
while (true) { | |
temp = args.shift(); | |
temptype = typeof temp; | |
if (temptype === "string" && !id) { | |
doc._id = temp; | |
id = true; | |
} else if (temptype === "string" && id && !('_rev' in doc)) { | |
doc._rev = temp; | |
} else if (temptype === "object") { | |
opts = temp; | |
} else if (temptype === "function") { | |
callback = temp; | |
} | |
if (!args.length) { | |
break; | |
} | |
} | |
opts = opts || {}; | |
var error = utils.invalidIdError(doc._id); | |
if (error) { | |
return callback(error); | |
} | |
if (utils.isLocalId(doc._id) && typeof this._putLocal === 'function') { | |
if (doc._deleted) { | |
return this._removeLocal(doc, callback); | |
} else { | |
return this._putLocal(doc, callback); | |
} | |
} | |
this.bulkDocs({docs: [doc]}, opts, yankError(callback)); | |
})); | |
AbstractPouchDB.prototype.putAttachment = | |
utils.adapterFun('putAttachment', function (docId, attachmentId, rev, | |
blob, type, callback) { | |
var api = this; | |
if (typeof type === 'function') { | |
callback = type; | |
type = blob; | |
blob = rev; | |
rev = null; | |
} | |
if (typeof type === 'undefined') { | |
type = blob; | |
blob = rev; | |
rev = null; | |
} | |
function createAttachment(doc) { | |
doc._attachments = doc._attachments || {}; | |
doc._attachments[attachmentId] = { | |
content_type: type, | |
data: blob | |
}; | |
return api.put(doc); | |
} | |
return api.get(docId).then(function (doc) { | |
if (doc._rev !== rev) { | |
throw errors.error(errors.REV_CONFLICT); | |
} | |
return createAttachment(doc); | |
}, function (err) { | |
// create new doc | |
if (err.reason === errors.MISSING_DOC.message) { | |
return createAttachment({_id: docId}); | |
} else { | |
throw err; | |
} | |
}); | |
}); | |
AbstractPouchDB.prototype.removeAttachment = | |
utils.adapterFun('removeAttachment', function (docId, attachmentId, rev, | |
callback) { | |
var self = this; | |
self.get(docId, function (err, obj) { | |
if (err) { | |
callback(err); | |
return; | |
} | |
if (obj._rev !== rev) { | |
callback(errors.error(errors.REV_CONFLICT)); | |
return; | |
} | |
if (!obj._attachments) { | |
return callback(); | |
} | |
delete obj._attachments[attachmentId]; | |
if (Object.keys(obj._attachments).length === 0) { | |
delete obj._attachments; | |
} | |
self.put(obj, callback); | |
}); | |
}); | |
AbstractPouchDB.prototype.remove = | |
utils.adapterFun('remove', function (docOrId, optsOrRev, opts, callback) { | |
var doc; | |
if (typeof optsOrRev === 'string') { | |
// id, rev, opts, callback style | |
doc = { | |
_id: docOrId, | |
_rev: optsOrRev | |
}; | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
} else { | |
// doc, opts, callback style | |
doc = docOrId; | |
if (typeof optsOrRev === 'function') { | |
callback = optsOrRev; | |
opts = {}; | |
} else { | |
callback = opts; | |
opts = optsOrRev; | |
} | |
} | |
opts = utils.clone(opts || {}); | |
opts.was_delete = true; | |
var newDoc = {_id: doc._id, _rev: (doc._rev || opts.rev)}; | |
newDoc._deleted = true; | |
if (utils.isLocalId(newDoc._id) && typeof this._removeLocal === 'function') { | |
return this._removeLocal(doc, callback); | |
} | |
this.bulkDocs({docs: [newDoc]}, opts, yankError(callback)); | |
}); | |
AbstractPouchDB.prototype.revsDiff = | |
utils.adapterFun('revsDiff', function (req, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
opts = utils.clone(opts); | |
var ids = Object.keys(req); | |
if (!ids.length) { | |
return callback(null, {}); | |
} | |
var count = 0; | |
var missing = new utils.Map(); | |
function addToMissing(id, revId) { | |
if (!missing.has(id)) { | |
missing.set(id, {missing: []}); | |
} | |
missing.get(id).missing.push(revId); | |
} | |
function processDoc(id, rev_tree) { | |
// Is this fast enough? Maybe we should switch to a set simulated by a map | |
var missingForId = req[id].slice(0); | |
merge.traverseRevTree(rev_tree, function (isLeaf, pos, revHash, ctx, | |
opts) { | |
var rev = pos + '-' + revHash; | |
var idx = missingForId.indexOf(rev); | |
if (idx === -1) { | |
return; | |
} | |
missingForId.splice(idx, 1); | |
if (opts.status !== 'available') { | |
addToMissing(id, rev); | |
} | |
}); | |
// Traversing the tree is synchronous, so now `missingForId` contains | |
// revisions that were not found in the tree | |
missingForId.forEach(function (rev) { | |
addToMissing(id, rev); | |
}); | |
} | |
ids.map(function (id) { | |
this._getRevisionTree(id, function (err, rev_tree) { | |
if (err && err.status === 404 && err.message === 'missing') { | |
missing.set(id, {missing: req[id]}); | |
} else if (err) { | |
return callback(err); | |
} else { | |
processDoc(id, rev_tree); | |
} | |
if (++count === ids.length) { | |
// convert LazyMap to object | |
var missingObj = {}; | |
missing.forEach(function (value, key) { | |
missingObj[key] = value; | |
}); | |
return callback(null, missingObj); | |
} | |
}); | |
}, this); | |
}); | |
// compact one document and fire callback | |
// by compacting we mean removing all revisions which | |
// are further from the leaf in revision tree than max_height | |
AbstractPouchDB.prototype.compactDocument = | |
utils.adapterFun('compactDocument', function (docId, maxHeight, callback) { | |
var self = this; | |
this._getRevisionTree(docId, function (err, revTree) { | |
if (err) { | |
return callback(err); | |
} | |
var height = computeHeight(revTree); | |
var candidates = []; | |
var revs = []; | |
Object.keys(height).forEach(function (rev) { | |
if (height[rev] > maxHeight) { | |
candidates.push(rev); | |
} | |
}); | |
merge.traverseRevTree(revTree, function (isLeaf, pos, revHash, ctx, opts) { | |
var rev = pos + '-' + revHash; | |
if (opts.status === 'available' && candidates.indexOf(rev) !== -1) { | |
revs.push(rev); | |
} | |
}); | |
self._doCompaction(docId, revs, callback); | |
}); | |
}); | |
// compact the whole database using single document | |
// compaction | |
AbstractPouchDB.prototype.compact = | |
utils.adapterFun('compact', function (opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
var self = this; | |
opts = utils.clone(opts || {}); | |
self.get('_local/compaction').catch(function () { | |
return false; | |
}).then(function (doc) { | |
if (typeof self._compact === 'function') { | |
if (doc && doc.last_seq) { | |
opts.last_seq = doc.last_seq; | |
} | |
return self._compact(opts, callback); | |
} | |
}); | |
}); | |
AbstractPouchDB.prototype._compact = function (opts, callback) { | |
var self = this; | |
var changesOpts = { | |
returnDocs: false, | |
last_seq: opts.last_seq || 0 | |
}; | |
var promises = []; | |
function onChange(row) { | |
promises.push(self.compactDocument(row.id, 0)); | |
} | |
function onComplete(resp) { | |
var lastSeq = resp.last_seq; | |
Promise.all(promises).then(function () { | |
return upsert(self, '_local/compaction', function deltaFunc(doc) { | |
if (!doc.last_seq || doc.last_seq < lastSeq) { | |
doc.last_seq = lastSeq; | |
return doc; | |
} | |
return false; // somebody else got here first, don't update | |
}); | |
}).then(function () { | |
callback(null, {ok: true}); | |
}).catch(callback); | |
} | |
self.changes(changesOpts) | |
.on('change', onChange) | |
.on('complete', onComplete) | |
.on('error', callback); | |
}; | |
/* Begin api wrappers. Specific functionality to storage belongs in the | |
_[method] */ | |
AbstractPouchDB.prototype.get = | |
utils.adapterFun('get', function (id, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
if (typeof id !== 'string') { | |
return callback(errors.error(errors.INVALID_ID)); | |
} | |
if (utils.isLocalId(id) && typeof this._getLocal === 'function') { | |
return this._getLocal(id, callback); | |
} | |
var leaves = [], self = this; | |
function finishOpenRevs() { | |
var result = []; | |
var count = leaves.length; | |
if (!count) { | |
return callback(null, result); | |
} | |
// order with open_revs is unspecified | |
leaves.forEach(function (leaf) { | |
self.get(id, { | |
rev: leaf, | |
revs: opts.revs, | |
attachments: opts.attachments | |
}, function (err, doc) { | |
if (!err) { | |
result.push({ok: doc}); | |
} else { | |
result.push({missing: leaf}); | |
} | |
count--; | |
if (!count) { | |
callback(null, result); | |
} | |
}); | |
}); | |
} | |
if (opts.open_revs) { | |
if (opts.open_revs === "all") { | |
this._getRevisionTree(id, function (err, rev_tree) { | |
if (err) { | |
return callback(err); | |
} | |
leaves = merge.collectLeaves(rev_tree).map(function (leaf) { | |
return leaf.rev; | |
}); | |
finishOpenRevs(); | |
}); | |
} else { | |
if (Array.isArray(opts.open_revs)) { | |
leaves = opts.open_revs; | |
for (var i = 0; i < leaves.length; i++) { | |
var l = leaves[i]; | |
// looks like it's the only thing couchdb checks | |
if (!(typeof(l) === "string" && /^\d+-/.test(l))) { | |
return callback(errors.error(errors.INVALID_REV)); | |
} | |
} | |
finishOpenRevs(); | |
} else { | |
return callback(errors.error(errors.UNKNOWN_ERROR, | |
'function_clause')); | |
} | |
} | |
return; // open_revs does not like other options | |
} | |
return this._get(id, opts, function (err, result) { | |
opts = utils.clone(opts); | |
if (err) { | |
return callback(err); | |
} | |
var doc = result.doc; | |
var metadata = result.metadata; | |
var ctx = result.ctx; | |
if (opts.conflicts) { | |
var conflicts = merge.collectConflicts(metadata); | |
if (conflicts.length) { | |
doc._conflicts = conflicts; | |
} | |
} | |
if (utils.isDeleted(metadata, doc._rev)) { | |
doc._deleted = true; | |
} | |
if (opts.revs || opts.revs_info) { | |
var paths = merge.rootToLeaf(metadata.rev_tree); | |
var path = arrayFirst(paths, function (arr) { | |
return arr.ids.map(function (x) { return x.id; }) | |
.indexOf(doc._rev.split('-')[1]) !== -1; | |
}); | |
var indexOfRev = path.ids.map(function (x) {return x.id; }) | |
.indexOf(doc._rev.split('-')[1]) + 1; | |
var howMany = path.ids.length - indexOfRev; | |
path.ids.splice(indexOfRev, howMany); | |
path.ids.reverse(); | |
if (opts.revs) { | |
doc._revisions = { | |
start: (path.pos + path.ids.length) - 1, | |
ids: path.ids.map(function (rev) { | |
return rev.id; | |
}) | |
}; | |
} | |
if (opts.revs_info) { | |
var pos = path.pos + path.ids.length; | |
doc._revs_info = path.ids.map(function (rev) { | |
pos--; | |
return { | |
rev: pos + '-' + rev.id, | |
status: rev.opts.status | |
}; | |
}); | |
} | |
} | |
if (opts.local_seq) { | |
utils.info('The "local_seq" option is deprecated and will be removed'); | |
doc._local_seq = result.metadata.seq; | |
} | |
if (opts.attachments && doc._attachments) { | |
var attachments = doc._attachments; | |
var count = Object.keys(attachments).length; | |
if (count === 0) { | |
return callback(null, doc); | |
} | |
Object.keys(attachments).forEach(function (key) { | |
this._getAttachment(attachments[key], | |
{encode: true, ctx: ctx}, function (err, data) { | |
var att = doc._attachments[key]; | |
att.data = data; | |
delete att.stub; | |
delete att.length; | |
if (!--count) { | |
callback(null, doc); | |
} | |
}); | |
}, self); | |
} else { | |
if (doc._attachments) { | |
for (var key in doc._attachments) { | |
if (doc._attachments.hasOwnProperty(key)) { | |
doc._attachments[key].stub = true; | |
} | |
} | |
} | |
callback(null, doc); | |
} | |
}); | |
}); | |
AbstractPouchDB.prototype.getAttachment = | |
utils.adapterFun('getAttachment', function (docId, attachmentId, opts, | |
callback) { | |
var self = this; | |
if (opts instanceof Function) { | |
callback = opts; | |
opts = {}; | |
} | |
opts = utils.clone(opts); | |
this._get(docId, opts, function (err, res) { | |
if (err) { | |
return callback(err); | |
} | |
if (res.doc._attachments && res.doc._attachments[attachmentId]) { | |
opts.ctx = res.ctx; | |
self._getAttachment(res.doc._attachments[attachmentId], opts, callback); | |
} else { | |
return callback(errors.error(errors.MISSING_DOC)); | |
} | |
}); | |
}); | |
AbstractPouchDB.prototype.allDocs = | |
utils.adapterFun('allDocs', function (opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
opts = utils.clone(opts); | |
opts.skip = typeof opts.skip !== 'undefined' ? opts.skip : 0; | |
if ('keys' in opts) { | |
if (!Array.isArray(opts.keys)) { | |
return callback(new TypeError('options.keys must be an array')); | |
} | |
var incompatibleOpt = | |
['startkey', 'endkey', 'key'].filter(function (incompatibleOpt) { | |
return incompatibleOpt in opts; | |
})[0]; | |
if (incompatibleOpt) { | |
callback(errors.error(errors.QUERY_PARSE_ERROR, | |
'Query parameter `' + incompatibleOpt + | |
'` is not compatible with multi-get' | |
)); | |
return; | |
} | |
if (this.type() !== 'http') { | |
return allDocsKeysQuery(this, opts, callback); | |
} | |
} | |
return this._allDocs(opts, callback); | |
}); | |
AbstractPouchDB.prototype.changes = function (opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
return new Changes(this, opts, callback); | |
}; | |
AbstractPouchDB.prototype.close = | |
utils.adapterFun('close', function (callback) { | |
this._closed = true; | |
return this._close(callback); | |
}); | |
AbstractPouchDB.prototype.info = utils.adapterFun('info', function (callback) { | |
var self = this; | |
this._info(function (err, info) { | |
if (err) { | |
return callback(err); | |
} | |
// assume we know better than the adapter, unless it informs us | |
info.db_name = info.db_name || self._db_name; | |
info.auto_compaction = !!(self.auto_compaction && self.type() !== 'http'); | |
callback(null, info); | |
}); | |
}); | |
AbstractPouchDB.prototype.id = utils.adapterFun('id', function (callback) { | |
return this._id(callback); | |
}); | |
AbstractPouchDB.prototype.type = function () { | |
return (typeof this._type === 'function') ? this._type() : this.adapter; | |
}; | |
AbstractPouchDB.prototype.bulkDocs = | |
utils.adapterFun('bulkDocs', function (req, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
opts = utils.clone(opts); | |
if (Array.isArray(req)) { | |
req = { | |
docs: req | |
}; | |
} | |
if (!req || !req.docs || !Array.isArray(req.docs)) { | |
return callback(errors.error(errors.MISSING_BULK_DOCS)); | |
} | |
for (var i = 0; i < req.docs.length; ++i) { | |
if (typeof req.docs[i] !== 'object' || Array.isArray(req.docs[i])) { | |
return callback(errors.error(errors.NOT_AN_OBJECT)); | |
} | |
} | |
req = utils.clone(req); | |
if (!('new_edits' in opts)) { | |
if ('new_edits' in req) { | |
opts.new_edits = req.new_edits; | |
} else { | |
opts.new_edits = true; | |
} | |
} | |
if (!opts.new_edits && this.type() !== 'http') { | |
// ensure revisions of the same doc are sorted, so that | |
// the local adapter processes them correctly (#2935) | |
req.docs.sort(function (a, b) { | |
var idCompare = utils.compare(a._id, b._id); | |
if (idCompare !== 0) { | |
return idCompare; | |
} | |
var aStart = a._revisions ? a._revisions.start : 0; | |
var bStart = b._revisions ? b._revisions.start : 0; | |
return utils.compare(aStart, bStart); | |
}); | |
} | |
req.docs.forEach(function (doc) { | |
if (doc._deleted) { | |
delete doc._attachments; // ignore atts for deleted docs | |
} | |
}); | |
return this._bulkDocs(req, opts, function (err, res) { | |
if (err) { | |
return callback(err); | |
} | |
if (!opts.new_edits) { | |
// this is what couch does when new_edits is false | |
res = res.filter(function (x) { | |
return x.error; | |
}); | |
} | |
callback(null, res); | |
}); | |
}); | |
AbstractPouchDB.prototype.registerDependentDatabase = | |
utils.adapterFun('registerDependentDatabase', function (dependentDb, | |
callback) { | |
var depDB = new this.constructor(dependentDb, this.__opts); | |
function diffFun(doc) { | |
doc.dependentDbs = doc.dependentDbs || {}; | |
if (doc.dependentDbs[dependentDb]) { | |
return false; // no update required | |
} | |
doc.dependentDbs[dependentDb] = true; | |
return doc; | |
} | |
upsert(this, '_local/_pouch_dependentDbs', diffFun, function (err) { | |
if (err) { | |
return callback(err); | |
} | |
return callback(null, {db: depDB}); | |
}); | |
}); | |
AbstractPouchDB.prototype.destroy = | |
utils.adapterFun('destroy', function (callback) { | |
var self = this; | |
var usePrefix = 'use_prefix' in self ? self.use_prefix : true; | |
function destroyDb() { | |
// call destroy method of the particular adaptor | |
self._destroy(function (err, resp) { | |
if (err) { | |
return callback(err); | |
} | |
self.emit('destroyed'); | |
callback(null, resp || { 'ok': true }); | |
}); | |
} | |
self.get('_local/_pouch_dependentDbs', function (err, localDoc) { | |
if (err) { | |
if (err.status !== 404) { | |
return callback(err); | |
} else { // no dependencies | |
return destroyDb(); | |
} | |
} | |
var dependentDbs = localDoc.dependentDbs; | |
var PouchDB = self.constructor; | |
var deletedMap = Object.keys(dependentDbs).map(function (name) { | |
var trueName = usePrefix ? | |
name.replace(new RegExp('^' + PouchDB.prefix), '') : name; | |
return new PouchDB(trueName, self.__opts).destroy(); | |
}); | |
Promise.all(deletedMap).then(destroyDb, function (error) { | |
callback(error); | |
}); | |
}); | |
}); | |
},{"./changes":95,"./deps/errors":101,"./deps/upsert":109,"./merge":114,"./utils":119,"events":80}],84:[function(require,module,exports){ | |
(function (process){ | |
"use strict"; | |
var CHANGES_BATCH_SIZE = 25; | |
// according to http://stackoverflow.com/a/417184/680742, | |
// the de factor URL length limit is 2000 characters. | |
// but since most of our measurements don't take the full | |
// URL into account, we fudge it a bit. | |
// TODO: we could measure the full URL to enforce exactly 2000 chars | |
var MAX_URL_LENGTH = 1800; | |
var utils = require('../../utils'); | |
var errors = require('../../deps/errors'); | |
var log = require('debug')('pouchdb:http'); | |
var isBrowser = typeof process === 'undefined' || process.browser; | |
var buffer = require('../../deps/buffer'); | |
function encodeDocId(id) { | |
if (/^_design/.test(id)) { | |
return '_design/' + encodeURIComponent(id.slice(8)); | |
} | |
if (/^_local/.test(id)) { | |
return '_local/' + encodeURIComponent(id.slice(7)); | |
} | |
return encodeURIComponent(id); | |
} | |
function preprocessAttachments(doc) { | |
if (!doc._attachments || !Object.keys(doc._attachments)) { | |
return utils.Promise.resolve(); | |
} | |
return utils.Promise.all(Object.keys(doc._attachments).map(function (key) { | |
var attachment = doc._attachments[key]; | |
if (attachment.data && typeof attachment.data !== 'string') { | |
if (isBrowser) { | |
return new utils.Promise(function (resolve) { | |
utils.readAsBinaryString(attachment.data, function (binary) { | |
attachment.data = utils.btoa(binary); | |
resolve(); | |
}); | |
}); | |
} else { | |
attachment.data = attachment.data.toString('base64'); | |
} | |
} | |
})); | |
} | |
// Get all the information you possibly can about the URI given by name and | |
// return it as a suitable object. | |
function getHost(name, opts) { | |
// If the given name contains "http:" | |
if (/http(s?):/.test(name)) { | |
// Prase the URI into all its little bits | |
var uri = utils.parseUri(name); | |
// Store the fact that it is a remote URI | |
uri.remote = true; | |
// Store the user and password as a separate auth object | |
if (uri.user || uri.password) { | |
uri.auth = {username: uri.user, password: uri.password}; | |
} | |
// Split the path part of the URI into parts using '/' as the delimiter | |
// after removing any leading '/' and any trailing '/' | |
var parts = uri.path.replace(/(^\/|\/$)/g, '').split('/'); | |
// Store the first part as the database name and remove it from the parts | |
// array | |
uri.db = parts.pop(); | |
// Restore the path by joining all the remaining parts (all the parts | |
// except for the database name) with '/'s | |
uri.path = parts.join('/'); | |
opts = opts || {}; | |
opts = utils.clone(opts); | |
uri.headers = opts.headers || (opts.ajax && opts.ajax.headers) || {}; | |
if (opts.auth || uri.auth) { | |
var nAuth = opts.auth || uri.auth; | |
var token = utils.btoa(nAuth.username + ':' + nAuth.password); | |
uri.headers.Authorization = 'Basic ' + token; | |
} | |
if (opts.headers) { | |
uri.headers = opts.headers; | |
} | |
return uri; | |
} | |
// If the given name does not contain 'http:' then return a very basic object | |
// with no host, the current path, the given name as the database name and no | |
// username/password | |
return {host: '', path: '/', db: name, auth: false}; | |
} | |
// Generate a URL with the host data given by opts and the given path | |
function genDBUrl(opts, path) { | |
return genUrl(opts, opts.db + '/' + path); | |
} | |
// Generate a URL with the host data given by opts and the given path | |
function genUrl(opts, path) { | |
if (opts.remote) { | |
// If the host already has a path, then we need to have a path delimiter | |
// Otherwise, the path delimiter is the empty string | |
var pathDel = !opts.path ? '' : '/'; | |
// If the host already has a path, then we need to have a path delimiter | |
// Otherwise, the path delimiter is the empty string | |
return opts.protocol + '://' + opts.host + ':' + opts.port + '/' + | |
opts.path + pathDel + path; | |
} | |
return '/' + path; | |
} | |
// Implements the PouchDB API for dealing with CouchDB instances over HTTP | |
function HttpPouch(opts, callback) { | |
// The functions that will be publicly available for HttpPouch | |
var api = this; | |
api.getHost = opts.getHost ? opts.getHost : getHost; | |
// Parse the URI given by opts.name into an easy-to-use object | |
var host = api.getHost(opts.name, opts); | |
// Generate the database URL based on the host | |
var dbUrl = genDBUrl(host, ''); | |
api.getUrl = function () {return dbUrl; }; | |
api.getHeaders = function () {return utils.clone(host.headers); }; | |
var ajaxOpts = opts.ajax || {}; | |
opts = utils.clone(opts); | |
function ajax(options, callback) { | |
var reqOpts = utils.extend(true, utils.clone(ajaxOpts), options); | |
log(reqOpts.method + ' ' + reqOpts.url); | |
return utils.ajax(reqOpts, callback); | |
} | |
// Create a new CouchDB database based on the given opts | |
var createDB = function () { | |
ajax({headers: host.headers, method: 'PUT', url: dbUrl}, function (err) { | |
// If we get an "Unauthorized" error | |
if (err && err.status === 401) { | |
// Test if the database already exists | |
ajax({headers: host.headers, method: 'HEAD', url: dbUrl}, | |
function (err) { | |
// If there is still an error | |
if (err) { | |
// Give the error to the callback to deal with | |
callback(err); | |
} else { | |
// Continue as if there had been no errors | |
callback(null, api); | |
} | |
}); | |
// If there were no errros or if the only error is "Precondition Failed" | |
// (note: "Precondition Failed" occurs when we try to create a database | |
// that already exists) | |
} else if (!err || err.status === 412) { | |
// Continue as if there had been no errors | |
callback(null, api); | |
} else { | |
callback(err); | |
} | |
}); | |
}; | |
if (!opts.skipSetup) { | |
ajax({headers: host.headers, method: 'GET', url: dbUrl}, function (err) { | |
//check if the db exists | |
if (err) { | |
if (err.status === 404) { | |
utils.explain404( | |
'PouchDB is just detecting if the remote DB exists.'); | |
//if it doesn't, create it | |
createDB(); | |
} else { | |
callback(err); | |
} | |
} else { | |
//go do stuff with the db | |
callback(null, api); | |
} | |
}); | |
} | |
api.type = function () { | |
return 'http'; | |
}; | |
api.id = utils.adapterFun('id', function (callback) { | |
ajax({ | |
headers: host.headers, | |
method: 'GET', | |
url: genUrl(host, '') | |
}, function (err, result) { | |
var uuid = (result && result.uuid) ? | |
result.uuid + host.db : genDBUrl(host, ''); | |
callback(null, uuid); | |
}); | |
}); | |
api.request = utils.adapterFun('request', function (options, callback) { | |
options.headers = host.headers; | |
options.url = genDBUrl(host, options.url); | |
ajax(options, callback); | |
}); | |
// Sends a POST request to the host calling the couchdb _compact function | |
// version: The version of CouchDB it is running | |
api.compact = utils.adapterFun('compact', function (opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
opts = utils.clone(opts); | |
ajax({ | |
headers: host.headers, | |
url: genDBUrl(host, '_compact'), | |
method: 'POST' | |
}, function () { | |
function ping() { | |
api.info(function (err, res) { | |
if (!res.compact_running) { | |
callback(null, {ok: true}); | |
} else { | |
setTimeout(ping, opts.interval || 200); | |
} | |
}); | |
} | |
// Ping the http if it's finished compaction | |
if (typeof callback === "function") { | |
ping(); | |
} | |
}); | |
}); | |
// Calls GET on the host, which gets back a JSON string containing | |
// couchdb: A welcome string | |
// version: The version of CouchDB it is running | |
api._info = function (callback) { | |
ajax({ | |
headers: host.headers, | |
method: 'GET', | |
url: genDBUrl(host, '') | |
}, function (err, res) { | |
if (err) { | |
callback(err); | |
} else { | |
res.host = genDBUrl(host, ''); | |
callback(null, res); | |
} | |
}); | |
}; | |
// Get the document with the given id from the database given by host. | |
// The id could be solely the _id in the database, or it may be a | |
// _design/ID or _local/ID path | |
api.get = utils.adapterFun('get', function (id, opts, callback) { | |
// If no options were given, set the callback to the second parameter | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
opts = utils.clone(opts); | |
if (opts.auto_encode === undefined) { | |
opts.auto_encode = true; | |
} | |
// List of parameters to add to the GET request | |
var params = []; | |
// If it exists, add the opts.revs value to the list of parameters. | |
// If revs=true then the resulting JSON will include a field | |
// _revisions containing an array of the revision IDs. | |
if (opts.revs) { | |
params.push('revs=true'); | |
} | |
// If it exists, add the opts.revs_info value to the list of parameters. | |
// If revs_info=true then the resulting JSON will include the field | |
// _revs_info containing an array of objects in which each object | |
// representing an available revision. | |
if (opts.revs_info) { | |
params.push('revs_info=true'); | |
} | |
if (opts.local_seq) { | |
params.push('local_seq=true'); | |
} | |
// If it exists, add the opts.open_revs value to the list of parameters. | |
// If open_revs=all then the resulting JSON will include all the leaf | |
// revisions. If open_revs=["rev1", "rev2",...] then the resulting JSON | |
// will contain an array of objects containing data of all revisions | |
if (opts.open_revs) { | |
if (opts.open_revs !== "all") { | |
opts.open_revs = JSON.stringify(opts.open_revs); | |
} | |
params.push('open_revs=' + opts.open_revs); | |
} | |
// If it exists, add the opts.attachments value to the list of parameters. | |
// If attachments=true the resulting JSON will include the base64-encoded | |
// contents in the "data" property of each attachment. | |
if (opts.attachments) { | |
params.push('attachments=true'); | |
} | |
// If it exists, add the opts.rev value to the list of parameters. | |
// If rev is given a revision number then get the specified revision. | |
if (opts.rev) { | |
params.push('rev=' + opts.rev); | |
} | |
// If it exists, add the opts.conflicts value to the list of parameters. | |
// If conflicts=true then the resulting JSON will include the field | |
// _conflicts containing all the conflicting revisions. | |
if (opts.conflicts) { | |
params.push('conflicts=' + opts.conflicts); | |
} | |
// Format the list of parameters into a valid URI query string | |
params = params.join('&'); | |
params = params === '' ? '' : '?' + params; | |
if (opts.auto_encode) { | |
id = encodeDocId(id); | |
} | |
// Set the options for the ajax call | |
var options = { | |
headers: host.headers, | |
method: 'GET', | |
url: genDBUrl(host, id + params) | |
}; | |
var getRequestAjaxOpts = opts.ajax || {}; | |
utils.extend(true, options, getRequestAjaxOpts); | |
// If the given id contains at least one '/' and the part before the '/' | |
// is NOT "_design" and is NOT "_local" | |
// OR | |
// If the given id contains at least two '/' and the part before the first | |
// '/' is "_design". | |
// TODO This second condition seems strange since if parts[0] === '_design' | |
// then we already know that parts[0] !== '_local'. | |
var parts = id.split('/'); | |
if ((parts.length > 1 && parts[0] !== '_design' && parts[0] !== '_local') || | |
(parts.length > 2 && parts[0] === '_design' && parts[0] !== '_local')) { | |
// Binary is expected back from the server | |
options.binary = true; | |
} | |
// Get the document | |
ajax(options, function (err, doc, xhr) { | |
// If the document does not exist, send an error to the callback | |
if (err) { | |
return callback(err); | |
} | |
// Send the document to the callback | |
callback(null, doc, xhr); | |
}); | |
}); | |
// Delete the document given by doc from the database given by host. | |
api.remove = utils.adapterFun('remove', | |
function (docOrId, optsOrRev, opts, callback) { | |
var doc; | |
if (typeof optsOrRev === 'string') { | |
// id, rev, opts, callback style | |
doc = { | |
_id: docOrId, | |
_rev: optsOrRev | |
}; | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
} else { | |
// doc, opts, callback style | |
doc = docOrId; | |
if (typeof optsOrRev === 'function') { | |
callback = optsOrRev; | |
opts = {}; | |
} else { | |
callback = opts; | |
opts = optsOrRev; | |
} | |
} | |
var rev = (doc._rev || opts.rev); | |
// Delete the document | |
ajax({ | |
headers: host.headers, | |
method: 'DELETE', | |
url: genDBUrl(host, encodeDocId(doc._id)) + '?rev=' + rev | |
}, callback); | |
}); | |
function encodeAttachmentId(attachmentId) { | |
return attachmentId.split("/").map(encodeURIComponent).join("/"); | |
} | |
// Get the attachment | |
api.getAttachment = | |
utils.adapterFun('getAttachment', function (docId, attachmentId, opts, | |
callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
opts = utils.clone(opts); | |
if (opts.auto_encode === undefined) { | |
opts.auto_encode = true; | |
} | |
if (opts.auto_encode) { | |
docId = encodeDocId(docId); | |
} | |
opts.auto_encode = false; | |
api.get(docId + '/' + encodeAttachmentId(attachmentId), opts, callback); | |
}); | |
// Remove the attachment given by the id and rev | |
api.removeAttachment = | |
utils.adapterFun('removeAttachment', function (docId, attachmentId, rev, | |
callback) { | |
var url = genDBUrl(host, encodeDocId(docId) + '/' + | |
encodeAttachmentId(attachmentId)) + '?rev=' + rev; | |
ajax({ | |
headers: host.headers, | |
method: 'DELETE', | |
url: url | |
}, callback); | |
}); | |
// Add the attachment given by blob and its contentType property | |
// to the document with the given id, the revision given by rev, and | |
// add it to the database given by host. | |
api.putAttachment = | |
utils.adapterFun('putAttachment', function (docId, attachmentId, rev, blob, | |
type, callback) { | |
if (typeof type === 'function') { | |
callback = type; | |
type = blob; | |
blob = rev; | |
rev = null; | |
} | |
if (typeof type === 'undefined') { | |
type = blob; | |
blob = rev; | |
rev = null; | |
} | |
var id = encodeDocId(docId) + '/' + encodeAttachmentId(attachmentId); | |
var url = genDBUrl(host, id); | |
if (rev) { | |
url += '?rev=' + rev; | |
} | |
if (typeof blob === 'string') { | |
var binary; | |
try { | |
binary = utils.atob(blob); | |
} catch (err) { | |
// it's not base64-encoded, so throw error | |
return callback(errors.error(errors.BAD_ARG, | |
'Attachments need to be base64 encoded')); | |
} | |
if (isBrowser) { | |
blob = utils.createBlob([utils.fixBinary(binary)], {type: type}); | |
} else { | |
blob = binary ? new buffer(binary, 'binary') : ''; | |
} | |
} | |
var opts = { | |
headers: utils.clone(host.headers), | |
method: 'PUT', | |
url: url, | |
processData: false, | |
body: blob, | |
timeout: 60000 | |
}; | |
opts.headers['Content-Type'] = type; | |
// Add the attachment | |
ajax(opts, callback); | |
}); | |
// Add the document given by doc (in JSON string format) to the database | |
// given by host. This fails if the doc has no _id field. | |
api.put = utils.adapterFun('put', utils.getArguments(function (args) { | |
var temp, temptype, opts; | |
var doc = args.shift(); | |
var id = '_id' in doc; | |
var callback = args.pop(); | |
if (typeof doc !== 'object' || Array.isArray(doc)) { | |
return callback(errors.error(errors.NOT_AN_OBJECT)); | |
} | |
doc = utils.clone(doc); | |
preprocessAttachments(doc).then(function () { | |
while (true) { | |
temp = args.shift(); | |
temptype = typeof temp; | |
if (temptype === "string" && !id) { | |
doc._id = temp; | |
id = true; | |
} else if (temptype === "string" && id && !('_rev' in doc)) { | |
doc._rev = temp; | |
} else if (temptype === "object") { | |
opts = utils.clone(temp); | |
} | |
if (!args.length) { | |
break; | |
} | |
} | |
opts = opts || {}; | |
var error = utils.invalidIdError(doc._id); | |
if (error) { | |
throw error; | |
} | |
// List of parameter to add to the PUT request | |
var params = []; | |
// If it exists, add the opts.new_edits value to the list of parameters. | |
// If new_edits = false then the database will NOT assign this document a | |
// new revision number | |
if (opts && typeof opts.new_edits !== 'undefined') { | |
params.push('new_edits=' + opts.new_edits); | |
} | |
// Format the list of parameters into a valid URI query string | |
params = params.join('&'); | |
if (params !== '') { | |
params = '?' + params; | |
} | |
// Add the document | |
ajax({ | |
headers: host.headers, | |
method: 'PUT', | |
url: genDBUrl(host, encodeDocId(doc._id)) + params, | |
body: doc | |
}, function (err, res) { | |
if (err) { | |
return callback(err); | |
} | |
res.ok = true; | |
callback(null, res); | |
}); | |
}).catch(callback); | |
})); | |
// Add the document given by doc (in JSON string format) to the database | |
// given by host. This does not assume that doc is a new document | |
// (i.e. does not have a _id or a _rev field.) | |
api.post = utils.adapterFun('post', function (doc, opts, callback) { | |
// If no options were given, set the callback to be the second parameter | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
opts = utils.clone(opts); | |
if (typeof doc !== 'object') { | |
return callback(errors.error(errors.NOT_AN_OBJECT)); | |
} | |
if (! ("_id" in doc)) { | |
doc._id = utils.uuid(); | |
} | |
api.put(doc, opts, function (err, res) { | |
if (err) { | |
return callback(err); | |
} | |
res.ok = true; | |
callback(null, res); | |
}); | |
}); | |
// Update/create multiple documents given by req in the database | |
// given by host. | |
api._bulkDocs = function (req, opts, callback) { | |
// If opts.new_edits exists add it to the document data to be | |
// send to the database. | |
// If new_edits=false then it prevents the database from creating | |
// new revision numbers for the documents. Instead it just uses | |
// the old ones. This is used in database replication. | |
if (typeof opts.new_edits !== 'undefined') { | |
req.new_edits = opts.new_edits; | |
} | |
utils.Promise.all(req.docs.map(preprocessAttachments)).then(function () { | |
// Update/create the documents | |
ajax({ | |
headers: host.headers, | |
method: 'POST', | |
url: genDBUrl(host, '_bulk_docs'), | |
body: req | |
}, function (err, results) { | |
if (err) { | |
return callback(err); | |
} | |
results.forEach(function (result) { | |
result.ok = true; // smooths out cloudant not adding this | |
}); | |
callback(null, results); | |
}); | |
}).catch(callback); | |
}; | |
// Get a listing of the documents in the database given | |
// by host and ordered by increasing id. | |
api.allDocs = utils.adapterFun('allDocs', function (opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
opts = utils.clone(opts); | |
// List of parameters to add to the GET request | |
var params = []; | |
var body; | |
var method = 'GET'; | |
if (opts.conflicts) { | |
params.push('conflicts=true'); | |
} | |
// If opts.descending is truthy add it to params | |
if (opts.descending) { | |
params.push('descending=true'); | |
} | |
// If opts.include_docs exists, add the include_docs value to the | |
// list of parameters. | |
// If include_docs=true then include the associated document with each | |
// result. | |
if (opts.include_docs) { | |
params.push('include_docs=true'); | |
} | |
if (opts.attachments) { | |
// added in CouchDB 1.6.0 | |
params.push('attachments=true'); | |
} | |
if (opts.key) { | |
params.push('key=' + encodeURIComponent(JSON.stringify(opts.key))); | |
} | |
// If opts.startkey exists, add the startkey value to the list of | |
// parameters. | |
// If startkey is given then the returned list of documents will | |
// start with the document whose id is startkey. | |
if (opts.startkey) { | |
params.push('startkey=' + | |
encodeURIComponent(JSON.stringify(opts.startkey))); | |
} | |
// If opts.endkey exists, add the endkey value to the list of parameters. | |
// If endkey is given then the returned list of docuemnts will | |
// end with the document whose id is endkey. | |
if (opts.endkey) { | |
params.push('endkey=' + encodeURIComponent(JSON.stringify(opts.endkey))); | |
} | |
if (typeof opts.inclusive_end !== 'undefined') { | |
params.push('inclusive_end=' + !!opts.inclusive_end); | |
} | |
// If opts.limit exists, add the limit value to the parameter list. | |
if (typeof opts.limit !== 'undefined') { | |
params.push('limit=' + opts.limit); | |
} | |
if (typeof opts.skip !== 'undefined') { | |
params.push('skip=' + opts.skip); | |
} | |
// Format the list of parameters into a valid URI query string | |
params = params.join('&'); | |
if (params !== '') { | |
params = '?' + params; | |
} | |
if (typeof opts.keys !== 'undefined') { | |
var keysAsString = | |
'keys=' + encodeURIComponent(JSON.stringify(opts.keys)); | |
if (keysAsString.length + params.length + 1 <= MAX_URL_LENGTH) { | |
// If the keys are short enough, do a GET. we do this to work around | |
// Safari not understanding 304s on POSTs (see issue #1239) | |
params += (params.indexOf('?') !== -1 ? '&' : '?') + keysAsString; | |
} else { | |
// If keys are too long, issue a POST request to circumvent GET | |
// query string limits | |
// see http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options | |
method = 'POST'; | |
body = JSON.stringify({keys: opts.keys}); | |
} | |
} | |
// Get the document listing | |
ajax({ | |
headers: host.headers, | |
method: method, | |
url: genDBUrl(host, '_all_docs' + params), | |
body: body | |
}, callback); | |
}); | |
// Get a list of changes made to documents in the database given by host. | |
// TODO According to the README, there should be two other methods here, | |
// api.changes.addListener and api.changes.removeListener. | |
api._changes = function (opts) { | |
// We internally page the results of a changes request, this means | |
// if there is a large set of changes to be returned we can start | |
// processing them quicker instead of waiting on the entire | |
// set of changes to return and attempting to process them at once | |
var batchSize = 'batch_size' in opts ? opts.batch_size : CHANGES_BATCH_SIZE; | |
opts = utils.clone(opts); | |
opts.timeout = opts.timeout || 30 * 1000; | |
// We give a 5 second buffer for CouchDB changes to respond with | |
// an ok timeout | |
var params = { timeout: opts.timeout - (5 * 1000) }; | |
var limit = (typeof opts.limit !== 'undefined') ? opts.limit : false; | |
if (limit === 0) { | |
limit = 1; | |
} | |
var returnDocs; | |
if ('returnDocs' in opts) { | |
returnDocs = opts.returnDocs; | |
} else { | |
returnDocs = true; | |
} | |
// | |
var leftToFetch = limit; | |
if (opts.style) { | |
params.style = opts.style; | |
} | |
if (opts.include_docs || opts.filter && typeof opts.filter === 'function') { | |
params.include_docs = true; | |
} | |
if (opts.attachments) { | |
params.attachments = true; | |
} | |
if (opts.continuous) { | |
params.feed = 'longpoll'; | |
} | |
if (opts.conflicts) { | |
params.conflicts = true; | |
} | |
if (opts.descending) { | |
params.descending = true; | |
} | |
if (opts.filter && typeof opts.filter === 'string') { | |
params.filter = opts.filter; | |
if (opts.filter === '_view' && | |
opts.view && | |
typeof opts.view === 'string') { | |
params.view = opts.view; | |
} | |
} | |
// If opts.query_params exists, pass it through to the changes request. | |
// These parameters may be used by the filter on the source database. | |
if (opts.query_params && typeof opts.query_params === 'object') { | |
for (var param_name in opts.query_params) { | |
if (opts.query_params.hasOwnProperty(param_name)) { | |
params[param_name] = opts.query_params[param_name]; | |
} | |
} | |
} | |
var method = 'GET'; | |
var body; | |
if (opts.doc_ids) { | |
// set this automagically for the user; it's annoying that couchdb | |
// requires both a "filter" and a "doc_ids" param. | |
params.filter = '_doc_ids'; | |
var docIdsJson = JSON.stringify(opts.doc_ids); | |
if (docIdsJson.length < MAX_URL_LENGTH) { | |
params.doc_ids = docIdsJson; | |
} else { | |
// anything greater than ~2000 is unsafe for gets, so | |
// use POST instead | |
method = 'POST'; | |
body = {doc_ids: opts.doc_ids }; | |
} | |
} | |
if (opts.continuous && api._useSSE) { | |
return api.sse(opts, params, returnDocs); | |
} | |
var xhr; | |
var lastFetchedSeq; | |
// Get all the changes starting wtih the one immediately after the | |
// sequence number given by since. | |
var fetch = function (since, callback) { | |
if (opts.aborted) { | |
return; | |
} | |
params.since = since; | |
if (typeof params.since === "object") { | |
params.since = JSON.stringify(params.since); | |
} | |
if (opts.descending) { | |
if (limit) { | |
params.limit = leftToFetch; | |
} | |
} else { | |
params.limit = (!limit || leftToFetch > batchSize) ? | |
batchSize : leftToFetch; | |
} | |
var paramStr = '?' + Object.keys(params).map(function (k) { | |
return k + '=' + params[k]; | |
}).join('&'); | |
// Set the options for the ajax call | |
var xhrOpts = { | |
headers: host.headers, | |
method: method, | |
url: genDBUrl(host, '_changes' + paramStr), | |
// _changes can take a long time to generate, especially when filtered | |
timeout: opts.timeout, | |
body: body | |
}; | |
lastFetchedSeq = since; | |
if (opts.aborted) { | |
return; | |
} | |
// Get the changes | |
xhr = ajax(xhrOpts, callback); | |
}; | |
// If opts.since exists, get all the changes from the sequence | |
// number given by opts.since. Otherwise, get all the changes | |
// from the sequence number 0. | |
var fetchTimeout = 10; | |
var fetchRetryCount = 0; | |
var results = {results: []}; | |
var fetched = function (err, res) { | |
if (opts.aborted) { | |
return; | |
} | |
var raw_results_length = 0; | |
// If the result of the ajax call (res) contains changes (res.results) | |
if (res && res.results) { | |
raw_results_length = res.results.length; | |
results.last_seq = res.last_seq; | |
// For each change | |
var req = {}; | |
req.query = opts.query_params; | |
res.results = res.results.filter(function (c) { | |
leftToFetch--; | |
var ret = utils.filterChange(opts)(c); | |
if (ret) { | |
if (returnDocs) { | |
results.results.push(c); | |
} | |
utils.call(opts.onChange, c); | |
} | |
return ret; | |
}); | |
} else if (err) { | |
// In case of an error, stop listening for changes and call | |
// opts.complete | |
opts.aborted = true; | |
utils.call(opts.complete, err); | |
return; | |
} | |
// The changes feed may have timed out with no results | |
// if so reuse last update sequence | |
if (res && res.last_seq) { | |
lastFetchedSeq = res.last_seq; | |
} | |
var finished = (limit && leftToFetch <= 0) || | |
(res && raw_results_length < batchSize) || | |
(opts.descending); | |
if ((opts.continuous && !(limit && leftToFetch <= 0)) || !finished) { | |
// Increase retry delay exponentially as long as errors persist | |
if (err) { | |
fetchRetryCount += 1; | |
} else { | |
fetchRetryCount = 0; | |
} | |
var timeoutMultiplier = 1 << fetchRetryCount; | |
var retryWait = fetchTimeout * timeoutMultiplier; | |
var maximumWait = opts.maximumWait || 30000; | |
if (retryWait > maximumWait) { | |
utils.call(opts.complete, err || errors.error(errors.UNKNOWN_ERROR)); | |
return; | |
} | |
// Queue a call to fetch again with the newest sequence number | |
setTimeout(function () { fetch(lastFetchedSeq, fetched); }, retryWait); | |
} else { | |
// We're done, call the callback | |
utils.call(opts.complete, null, results); | |
} | |
}; | |
fetch(opts.since || 0, fetched); | |
// Return a method to cancel this method from processing any more | |
return { | |
cancel: function () { | |
opts.aborted = true; | |
if (xhr) { | |
xhr.abort(); | |
} | |
} | |
}; | |
}; | |
api.sse = function (opts, params, returnDocs) { | |
params.feed = 'eventsource'; | |
params.since = opts.since || 0; | |
params.limit = opts.limit; | |
delete params.timeout; | |
var paramStr = '?' + Object.keys(params).map(function (k) { | |
return k + '=' + params[k]; | |
}).join('&'); | |
var url = genDBUrl(host, '_changes' + paramStr); | |
var source = new EventSource(url); | |
var results = { | |
results: [], | |
last_seq: false | |
}; | |
var dispatched = false; | |
var open = false; | |
source.addEventListener('message', msgHandler, false); | |
source.onopen = function () { | |
open = true; | |
}; | |
source.onerror = errHandler; | |
return { | |
cancel: function () { | |
if (dispatched) { | |
return dispatched.cancel(); | |
} | |
source.removeEventListener('message', msgHandler, false); | |
source.close(); | |
} | |
}; | |
function msgHandler(e) { | |
var data = JSON.parse(e.data); | |
if (returnDocs) { | |
results.results.push(data); | |
} | |
results.last_seq = data.seq; | |
utils.call(opts.onChange, data); | |
} | |
function errHandler(err) { | |
source.removeEventListener('message', msgHandler, false); | |
if (open === false) { | |
// errored before it opened | |
// likely doesn't support EventSource | |
api._useSSE = false; | |
dispatched = api._changes(opts); | |
return; | |
} | |
source.close(); | |
utils.call(opts.complete, err); | |
} | |
}; | |
api._useSSE = false; | |
// Currently disabled due to failing chrome tests in saucelabs | |
// api._useSSE = typeof global.EventSource === 'function'; | |
// Given a set of document/revision IDs (given by req), tets the subset of | |
// those that do NOT correspond to revisions stored in the database. | |
// See http://wiki.apache.org/couchdb/HttpPostRevsDiff | |
api.revsDiff = utils.adapterFun('revsDiff', function (req, opts, callback) { | |
// If no options were given, set the callback to be the second parameter | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
// Get the missing document/revision IDs | |
ajax({ | |
headers: host.headers, | |
method: 'POST', | |
url: genDBUrl(host, '_revs_diff'), | |
body: JSON.stringify(req) | |
}, callback); | |
}); | |
api._close = function (callback) { | |
callback(); | |
}; | |
api._destroy = function (callback) { | |
ajax({ | |
url: genDBUrl(host, ''), | |
method: 'DELETE', | |
headers: host.headers | |
}, function (err, resp) { | |
if (err) { | |
api.emit('error', err); | |
callback(err); | |
} else { | |
api.emit('destroyed'); | |
api.constructor.emit('destroyed', opts.name); | |
callback(null, resp); | |
} | |
}); | |
}; | |
} | |
// HttpPouch is a valid adapter. | |
HttpPouch.valid = function () { | |
return true; | |
}; | |
module.exports = HttpPouch; | |
}).call(this,require('_process')) | |
},{"../../deps/buffer":100,"../../deps/errors":101,"../../utils":119,"_process":81,"debug":122}],85:[function(require,module,exports){ | |
'use strict'; | |
var merge = require('../../merge'); | |
var errors = require('../../deps/errors'); | |
var idbUtils = require('./idb-utils'); | |
var idbConstants = require('./idb-constants'); | |
var ATTACH_STORE = idbConstants.ATTACH_STORE; | |
var BY_SEQ_STORE = idbConstants.BY_SEQ_STORE; | |
var DOC_STORE = idbConstants.DOC_STORE; | |
var decodeDoc = idbUtils.decodeDoc; | |
var decodeMetadata = idbUtils.decodeMetadata; | |
var fetchAttachmentsIfNecessary = idbUtils.fetchAttachmentsIfNecessary; | |
var postProcessAttachments = idbUtils.postProcessAttachments; | |
var openTransactionSafely = idbUtils.openTransactionSafely; | |
function createKeyRange(start, end, inclusiveEnd, key, descending) { | |
try { | |
if (start && end) { | |
if (descending) { | |
return IDBKeyRange.bound(end, start, !inclusiveEnd, false); | |
} else { | |
return IDBKeyRange.bound(start, end, false, !inclusiveEnd); | |
} | |
} else if (start) { | |
if (descending) { | |
return IDBKeyRange.upperBound(start); | |
} else { | |
return IDBKeyRange.lowerBound(start); | |
} | |
} else if (end) { | |
if (descending) { | |
return IDBKeyRange.lowerBound(end, !inclusiveEnd); | |
} else { | |
return IDBKeyRange.upperBound(end, !inclusiveEnd); | |
} | |
} else if (key) { | |
return IDBKeyRange.only(key); | |
} | |
} catch (e) { | |
return {error: e}; | |
} | |
return null; | |
} | |
function handleKeyRangeError(api, opts, err, callback) { | |
if (err.name === "DataError" && err.code === 0) { | |
// data error, start is less than end | |
return callback(null, { | |
total_rows: api._meta.docCount, | |
offset: opts.skip, | |
rows: [] | |
}); | |
} | |
callback(errors.error(errors.IDB_ERROR, err.name, err.message)); | |
} | |
function idbAllDocs(opts, api, idb, callback) { | |
function allDocsQuery(opts, callback) { | |
var start = 'startkey' in opts ? opts.startkey : false; | |
var end = 'endkey' in opts ? opts.endkey : false; | |
var key = 'key' in opts ? opts.key : false; | |
var skip = opts.skip || 0; | |
var limit = typeof opts.limit === 'number' ? opts.limit : -1; | |
var inclusiveEnd = opts.inclusive_end !== false; | |
var descending = 'descending' in opts && opts.descending ? 'prev' : null; | |
var keyRange = createKeyRange(start, end, inclusiveEnd, key, descending); | |
if (keyRange && keyRange.error) { | |
return handleKeyRangeError(api, opts, keyRange.error, callback); | |
} | |
var stores = [DOC_STORE, BY_SEQ_STORE]; | |
if (opts.attachments) { | |
stores.push(ATTACH_STORE); | |
} | |
var txnResult = openTransactionSafely(idb, stores, 'readonly'); | |
if (txnResult.error) { | |
return callback(txnResult.error); | |
} | |
var txn = txnResult.txn; | |
var docStore = txn.objectStore(DOC_STORE); | |
var seqStore = txn.objectStore(BY_SEQ_STORE); | |
var cursor = descending ? | |
docStore.openCursor(keyRange, descending) : | |
docStore.openCursor(keyRange); | |
var docIdRevIndex = seqStore.index('_doc_id_rev'); | |
var results = []; | |
var docCount = 0; | |
// if the user specifies include_docs=true, then we don't | |
// want to block the main cursor while we're fetching the doc | |
function fetchDocAsynchronously(metadata, row, winningRev) { | |
var key = metadata.id + "::" + winningRev; | |
docIdRevIndex.get(key).onsuccess = function onGetDoc(e) { | |
row.doc = decodeDoc(e.target.result); | |
if (opts.conflicts) { | |
row.doc._conflicts = merge.collectConflicts(metadata); | |
} | |
fetchAttachmentsIfNecessary(row.doc, opts, txn); | |
}; | |
} | |
function allDocsInner(cursor, winningRev, metadata) { | |
var row = { | |
id: metadata.id, | |
key: metadata.id, | |
value: { | |
rev: winningRev | |
} | |
}; | |
var deleted = metadata.deleted; | |
if (opts.deleted === 'ok') { | |
results.push(row); | |
// deleted docs are okay with "keys" requests | |
if (deleted) { | |
row.value.deleted = true; | |
row.doc = null; | |
} else if (opts.include_docs) { | |
fetchDocAsynchronously(metadata, row, winningRev); | |
} | |
} else if (!deleted && skip-- <= 0) { | |
results.push(row); | |
if (opts.include_docs) { | |
fetchDocAsynchronously(metadata, row, winningRev); | |
} | |
if (--limit === 0) { | |
return; | |
} | |
} | |
cursor.continue(); | |
} | |
function onGetCursor(e) { | |
docCount = api._meta.docCount; // do this within the txn for consistency | |
var cursor = e.target.result; | |
if (!cursor) { | |
return; | |
} | |
var metadata = decodeMetadata(cursor.value); | |
var winningRev = metadata.winningRev; | |
allDocsInner(cursor, winningRev, metadata); | |
} | |
function onResultsReady() { | |
callback(null, { | |
total_rows: docCount, | |
offset: opts.skip, | |
rows: results | |
}); | |
} | |
function onTxnComplete() { | |
if (opts.attachments) { | |
postProcessAttachments(results).then(onResultsReady); | |
} else { | |
onResultsReady(); | |
} | |
} | |
txn.oncomplete = onTxnComplete; | |
cursor.onsuccess = onGetCursor; | |
} | |
function allDocs(opts, callback) { | |
if (opts.limit === 0) { | |
return callback(null, { | |
total_rows: api._meta.docCount, | |
offset: opts.skip, | |
rows: [] | |
}); | |
} | |
allDocsQuery(opts, callback); | |
} | |
allDocs(opts, callback); | |
} | |
module.exports = idbAllDocs; | |
},{"../../deps/errors":101,"../../merge":114,"./idb-constants":88,"./idb-utils":89}],86:[function(require,module,exports){ | |
'use strict'; | |
var utils = require('../../utils'); | |
var idbConstants = require('./idb-constants'); | |
var DETECT_BLOB_SUPPORT_STORE = idbConstants.DETECT_BLOB_SUPPORT_STORE; | |
// | |
// Detect blob support. Chrome didn't support it until version 38. | |
// In version 37 they had a broken version where PNGs (and possibly | |
// other binary types) aren't stored correctly, because when you fetch | |
// them, the content type is always null. | |
// | |
// Furthermore, they have some outstanding bugs where blobs occasionally | |
// are read by FileReader as null, or by ajax as 404s. | |
// | |
// Sadly we use the 404 bug to detect the FileReader bug, so if they | |
// get fixed independently and released in different versions of Chrome, | |
// then the bug could come back. So it's worthwhile to watch these issues: | |
// 404 bug: https://code.google.com/p/chromium/issues/detail?id=447916 | |
// FileReader bug: https://code.google.com/p/chromium/issues/detail?id=447836 | |
// | |
function checkBlobSupport(txn, idb) { | |
return new utils.Promise(function (resolve, reject) { | |
var blob = utils.createBlob([''], {type: 'image/png'}); | |
txn.objectStore(DETECT_BLOB_SUPPORT_STORE).put(blob, 'key'); | |
txn.oncomplete = function () { | |
// have to do it in a separate transaction, else the correct | |
// content type is always returned | |
var blobTxn = idb.transaction([DETECT_BLOB_SUPPORT_STORE], | |
'readwrite'); | |
var getBlobReq = blobTxn.objectStore( | |
DETECT_BLOB_SUPPORT_STORE).get('key'); | |
getBlobReq.onerror = reject; | |
getBlobReq.onsuccess = function (e) { | |
var storedBlob = e.target.result; | |
var url = URL.createObjectURL(storedBlob); | |
utils.ajax({ | |
url: url, | |
cache: true, | |
binary: true | |
}, function (err, res) { | |
if (err && err.status === 405) { | |
// firefox won't let us do that. but firefox doesn't | |
// have the blob type bug that Chrome does, so that's ok | |
resolve(true); | |
} else { | |
resolve(!!(res && res.type === 'image/png')); | |
if (err && err.status === 404) { | |
utils.explain404('PouchDB is just detecting blob URL support.'); | |
} | |
} | |
URL.revokeObjectURL(url); | |
}); | |
}; | |
}; | |
}).catch(function () { | |
return false; // error, so assume unsupported | |
}); | |
} | |
module.exports = checkBlobSupport; | |
},{"../../utils":119,"./idb-constants":88}],87:[function(require,module,exports){ | |
'use strict'; | |
var utils = require('../../utils'); | |
var errors = require('../../deps/errors'); | |
var idbUtils = require('./idb-utils'); | |
var idbConstants = require('./idb-constants'); | |
var ATTACH_AND_SEQ_STORE = idbConstants.ATTACH_AND_SEQ_STORE; | |
var ATTACH_STORE = idbConstants.ATTACH_STORE; | |
var BY_SEQ_STORE = idbConstants.BY_SEQ_STORE; | |
var DOC_STORE = idbConstants.DOC_STORE; | |
var LOCAL_STORE = idbConstants.LOCAL_STORE; | |
var META_STORE = idbConstants.META_STORE; | |
var compactRevs = idbUtils.compactRevs; | |
var decodeMetadata = idbUtils.decodeMetadata; | |
var encodeMetadata = idbUtils.encodeMetadata; | |
var idbError = idbUtils.idbError; | |
var openTransactionSafely = idbUtils.openTransactionSafely; | |
function idbBulkDocs(req, opts, api, idb, Changes, callback) { | |
var docInfos = req.docs; | |
var txn; | |
var docStore; | |
var bySeqStore; | |
var attachStore; | |
var attachAndSeqStore; | |
var docInfoError; | |
var docCountDelta = 0; | |
for (var i = 0, len = docInfos.length; i < len; i++) { | |
var doc = docInfos[i]; | |
if (doc._id && utils.isLocalId(doc._id)) { | |
continue; | |
} | |
doc = docInfos[i] = utils.parseDoc(doc, opts.new_edits); | |
if (doc.error && !docInfoError) { | |
docInfoError = doc; | |
} | |
} | |
if (docInfoError) { | |
return callback(docInfoError); | |
} | |
var results = new Array(docInfos.length); | |
var fetchedDocs = new utils.Map(); | |
var preconditionErrored = false; | |
var blobType = api._meta.blobSupport ? 'blob' : 'base64'; | |
utils.preprocessAttachments(docInfos, blobType, function (err) { | |
if (err) { | |
return callback(err); | |
} | |
startTransaction(); | |
}); | |
function startTransaction() { | |
var stores = [ | |
DOC_STORE, BY_SEQ_STORE, | |
ATTACH_STORE, META_STORE, | |
LOCAL_STORE, ATTACH_AND_SEQ_STORE | |
]; | |
var txnResult = openTransactionSafely(idb, stores, 'readwrite'); | |
if (txnResult.error) { | |
return callback(txnResult.error); | |
} | |
txn = txnResult.txn; | |
txn.onerror = idbError(callback); | |
txn.ontimeout = idbError(callback); | |
txn.oncomplete = complete; | |
docStore = txn.objectStore(DOC_STORE); | |
bySeqStore = txn.objectStore(BY_SEQ_STORE); | |
attachStore = txn.objectStore(ATTACH_STORE); | |
attachAndSeqStore = txn.objectStore(ATTACH_AND_SEQ_STORE); | |
verifyAttachments(function (err) { | |
if (err) { | |
preconditionErrored = true; | |
return callback(err); | |
} | |
fetchExistingDocs(); | |
}); | |
} | |
function processDocs() { | |
utils.processDocs(docInfos, api, fetchedDocs, txn, results, | |
writeDoc, opts); | |
} | |
function fetchExistingDocs() { | |
if (!docInfos.length) { | |
return; | |
} | |
var numFetched = 0; | |
function checkDone() { | |
if (++numFetched === docInfos.length) { | |
processDocs(); | |
} | |
} | |
function readMetadata(event) { | |
var metadata = decodeMetadata(event.target.result); | |
if (metadata) { | |
fetchedDocs.set(metadata.id, metadata); | |
} | |
checkDone(); | |
} | |
for (var i = 0, len = docInfos.length; i < len; i++) { | |
var docInfo = docInfos[i]; | |
if (docInfo._id && utils.isLocalId(docInfo._id)) { | |
checkDone(); // skip local docs | |
continue; | |
} | |
var req = docStore.get(docInfo.metadata.id); | |
req.onsuccess = readMetadata; | |
} | |
} | |
function complete() { | |
if (preconditionErrored) { | |
return; | |
} | |
Changes.notify(api._meta.name); | |
api._meta.docCount += docCountDelta; | |
callback(null, results); | |
} | |
function verifyAttachment(digest, callback) { | |
var req = attachStore.get(digest); | |
req.onsuccess = function (e) { | |
if (!e.target.result) { | |
var err = errors.error(errors.MISSING_STUB, | |
'unknown stub attachment with digest ' + | |
digest); | |
err.status = 412; | |
callback(err); | |
} else { | |
callback(); | |
} | |
}; | |
} | |
function verifyAttachments(finish) { | |
var digests = []; | |
docInfos.forEach(function (docInfo) { | |
if (docInfo.data && docInfo.data._attachments) { | |
Object.keys(docInfo.data._attachments).forEach(function (filename) { | |
var att = docInfo.data._attachments[filename]; | |
if (att.stub) { | |
digests.push(att.digest); | |
} | |
}); | |
} | |
}); | |
if (!digests.length) { | |
return finish(); | |
} | |
var numDone = 0; | |
var err; | |
function checkDone() { | |
if (++numDone === digests.length) { | |
finish(err); | |
} | |
} | |
digests.forEach(function (digest) { | |
verifyAttachment(digest, function (attErr) { | |
if (attErr && !err) { | |
err = attErr; | |
} | |
checkDone(); | |
}); | |
}); | |
} | |
function writeDoc(docInfo, winningRev, winningRevIsDeleted, newRevIsDeleted, | |
isUpdate, delta, resultsIdx, callback) { | |
docCountDelta += delta; | |
var doc = docInfo.data; | |
doc._id = docInfo.metadata.id; | |
doc._rev = docInfo.metadata.rev; | |
if (newRevIsDeleted) { | |
doc._deleted = true; | |
} | |
var hasAttachments = doc._attachments && | |
Object.keys(doc._attachments).length; | |
if (hasAttachments) { | |
return writeAttachments(docInfo, winningRev, winningRevIsDeleted, | |
isUpdate, resultsIdx, callback); | |
} | |
finishDoc(docInfo, winningRev, winningRevIsDeleted, | |
isUpdate, resultsIdx, callback); | |
} | |
function autoCompact(docInfo) { | |
var revsToDelete = utils.compactTree(docInfo.metadata); | |
compactRevs(revsToDelete, docInfo.metadata.id, txn); | |
} | |
function finishDoc(docInfo, winningRev, winningRevIsDeleted, | |
isUpdate, resultsIdx, callback) { | |
var doc = docInfo.data; | |
var metadata = docInfo.metadata; | |
doc._doc_id_rev = metadata.id + '::' + metadata.rev; | |
delete doc._id; | |
delete doc._rev; | |
function afterPutDoc(e) { | |
if (isUpdate && api.auto_compaction) { | |
autoCompact(docInfo); | |
} | |
metadata.seq = e.target.result; | |
// Current _rev is calculated from _rev_tree on read | |
delete metadata.rev; | |
var metadataToStore = encodeMetadata(metadata, winningRev, | |
winningRevIsDeleted); | |
var metaDataReq = docStore.put(metadataToStore); | |
metaDataReq.onsuccess = afterPutMetadata; | |
} | |
function afterPutDocError(e) { | |
// ConstraintError, need to update, not put (see #1638 for details) | |
e.preventDefault(); // avoid transaction abort | |
e.stopPropagation(); // avoid transaction onerror | |
var index = bySeqStore.index('_doc_id_rev'); | |
var getKeyReq = index.getKey(doc._doc_id_rev); | |
getKeyReq.onsuccess = function (e) { | |
var putReq = bySeqStore.put(doc, e.target.result); | |
putReq.onsuccess = afterPutDoc; | |
}; | |
} | |
function afterPutMetadata() { | |
results[resultsIdx] = { | |
ok: true, | |
id: metadata.id, | |
rev: winningRev | |
}; | |
fetchedDocs.set(docInfo.metadata.id, docInfo.metadata); | |
insertAttachmentMappings(docInfo, metadata.seq, callback); | |
} | |
var putReq = bySeqStore.put(doc); | |
putReq.onsuccess = afterPutDoc; | |
putReq.onerror = afterPutDocError; | |
} | |
function writeAttachments(docInfo, winningRev, winningRevIsDeleted, | |
isUpdate, resultsIdx, callback) { | |
var doc = docInfo.data; | |
var numDone = 0; | |
var attachments = Object.keys(doc._attachments); | |
function collectResults() { | |
if (numDone === attachments.length) { | |
finishDoc(docInfo, winningRev, winningRevIsDeleted, | |
isUpdate, resultsIdx, callback); | |
} | |
} | |
function attachmentSaved() { | |
numDone++; | |
collectResults(); | |
} | |
attachments.forEach(function (key) { | |
var att = docInfo.data._attachments[key]; | |
if (!att.stub) { | |
var data = att.data; | |
delete att.data; | |
var digest = att.digest; | |
saveAttachment(digest, data, attachmentSaved); | |
} else { | |
numDone++; | |
collectResults(); | |
} | |
}); | |
} | |
// map seqs to attachment digests, which | |
// we will need later during compaction | |
function insertAttachmentMappings(docInfo, seq, callback) { | |
var attsAdded = 0; | |
var attsToAdd = Object.keys(docInfo.data._attachments || {}); | |
if (!attsToAdd.length) { | |
return callback(); | |
} | |
function checkDone() { | |
if (++attsAdded === attsToAdd.length) { | |
callback(); | |
} | |
} | |
function add(att) { | |
var digest = docInfo.data._attachments[att].digest; | |
var req = attachAndSeqStore.put({ | |
seq: seq, | |
digestSeq: digest + '::' + seq | |
}); | |
req.onsuccess = checkDone; | |
req.onerror = function (e) { | |
// this callback is for a constaint error, which we ignore | |
// because this docid/rev has already been associated with | |
// the digest (e.g. when new_edits == false) | |
e.preventDefault(); // avoid transaction abort | |
e.stopPropagation(); // avoid transaction onerror | |
checkDone(); | |
}; | |
} | |
for (var i = 0; i < attsToAdd.length; i++) { | |
add(attsToAdd[i]); // do in parallel | |
} | |
} | |
function saveAttachment(digest, data, callback) { | |
var getKeyReq = attachStore.count(digest); | |
getKeyReq.onsuccess = function(e) { | |
var count = e.target.result; | |
if (count) { | |
return callback(); // already exists | |
} | |
var newAtt = { | |
digest: digest, | |
body: data | |
}; | |
var putReq = attachStore.put(newAtt); | |
putReq.onsuccess = callback; | |
}; | |
} | |
} | |
module.exports = idbBulkDocs; | |
},{"../../deps/errors":101,"../../utils":119,"./idb-constants":88,"./idb-utils":89}],88:[function(require,module,exports){ | |
'use strict'; | |
// IndexedDB requires a versioned database structure, so we use the | |
// version here to manage migrations. | |
exports.ADAPTER_VERSION = 5; | |
// The object stores created for each database | |
// DOC_STORE stores the document meta data, its revision history and state | |
// Keyed by document id | |
exports. DOC_STORE = 'document-store'; | |
// BY_SEQ_STORE stores a particular version of a document, keyed by its | |
// sequence id | |
exports.BY_SEQ_STORE = 'by-sequence'; | |
// Where we store attachments | |
exports.ATTACH_STORE = 'attach-store'; | |
// Where we store many-to-many relations | |
// between attachment digests and seqs | |
exports.ATTACH_AND_SEQ_STORE = 'attach-seq-store'; | |
// Where we store database-wide meta data in a single record | |
// keyed by id: META_STORE | |
exports.META_STORE = 'meta-store'; | |
// Where we store local documents | |
exports.LOCAL_STORE = 'local-store'; | |
// Where we detect blob support | |
exports.DETECT_BLOB_SUPPORT_STORE = 'detect-blob-support'; | |
},{}],89:[function(require,module,exports){ | |
(function (process){ | |
'use strict'; | |
var errors = require('../../deps/errors'); | |
var utils = require('../../utils'); | |
var constants = require('./idb-constants'); | |
function tryCode(fun, that, args) { | |
try { | |
fun.apply(that, args); | |
} catch (err) { // shouldn't happen | |
if (typeof PouchDB !== 'undefined') { | |
PouchDB.emit('error', err); | |
} | |
} | |
} | |
exports.taskQueue = { | |
running: false, | |
queue: [] | |
}; | |
exports.applyNext = function () { | |
if (exports.taskQueue.running || !exports.taskQueue.queue.length) { | |
return; | |
} | |
exports.taskQueue.running = true; | |
var item = exports.taskQueue.queue.shift(); | |
item.action(function (err, res) { | |
tryCode(item.callback, this, [err, res]); | |
exports.taskQueue.running = false; | |
process.nextTick(exports.applyNext); | |
}); | |
}; | |
exports.idbError = function (callback) { | |
return function (event) { | |
var message = (event.target && event.target.error && | |
event.target.error.name) || event.target; | |
callback(errors.error(errors.IDB_ERROR, message, event.type)); | |
}; | |
}; | |
// Unfortunately, the metadata has to be stringified | |
// when it is put into the database, because otherwise | |
// IndexedDB can throw errors for deeply-nested objects. | |
// Originally we just used JSON.parse/JSON.stringify; now | |
// we use this custom vuvuzela library that avoids recursion. | |
// If we could do it all over again, we'd probably use a | |
// format for the revision trees other than JSON. | |
exports.encodeMetadata = function (metadata, winningRev, deleted) { | |
return { | |
data: utils.safeJsonStringify(metadata), | |
winningRev: winningRev, | |
deletedOrLocal: deleted ? '1' : '0', | |
seq: metadata.seq, // highest seq for this doc | |
id: metadata.id | |
}; | |
}; | |
exports.decodeMetadata = function (storedObject) { | |
if (!storedObject) { | |
return null; | |
} | |
var metadata = utils.safeJsonParse(storedObject.data); | |
metadata.winningRev = storedObject.winningRev; | |
metadata.deleted = storedObject.deletedOrLocal === '1'; | |
metadata.seq = storedObject.seq; | |
return metadata; | |
}; | |
// read the doc back out from the database. we don't store the | |
// _id or _rev because we already have _doc_id_rev. | |
exports.decodeDoc = function (doc) { | |
if (!doc) { | |
return doc; | |
} | |
var idx = utils.lastIndexOf(doc._doc_id_rev, ':'); | |
doc._id = doc._doc_id_rev.substring(0, idx - 1); | |
doc._rev = doc._doc_id_rev.substring(idx + 1); | |
delete doc._doc_id_rev; | |
return doc; | |
}; | |
// Read a blob from the database, encoding as necessary | |
// and translating from base64 if the IDB doesn't support | |
// native Blobs | |
exports.readBlobData = function (body, type, encode, callback) { | |
if (encode) { | |
if (!body) { | |
callback(''); | |
} else if (typeof body !== 'string') { // we have blob support | |
utils.readAsBinaryString(body, function (binary) { | |
callback(utils.btoa(binary)); | |
}); | |
} else { // no blob support | |
callback(body); | |
} | |
} else { | |
if (!body) { | |
callback(utils.createBlob([''], {type: type})); | |
} else if (typeof body !== 'string') { // we have blob support | |
callback(body); | |
} else { // no blob support | |
body = utils.fixBinary(atob(body)); | |
callback(utils.createBlob([body], {type: type})); | |
} | |
} | |
}; | |
exports.fetchAttachmentsIfNecessary = function (doc, opts, txn, cb) { | |
var attachments = Object.keys(doc._attachments || {}); | |
if (!attachments.length) { | |
return cb && cb(); | |
} | |
var numDone = 0; | |
function checkDone() { | |
if (++numDone === attachments.length && cb) { | |
cb(); | |
} | |
} | |
function fetchAttachment(doc, att) { | |
var attObj = doc._attachments[att]; | |
var digest = attObj.digest; | |
var req = txn.objectStore(constants.ATTACH_STORE).get(digest); | |
req.onsuccess = function (e) { | |
attObj.body = e.target.result.body; | |
checkDone(); | |
}; | |
} | |
attachments.forEach(function (att) { | |
if (opts.attachments && opts.include_docs) { | |
fetchAttachment(doc, att); | |
} else { | |
doc._attachments[att].stub = true; | |
checkDone(); | |
} | |
}); | |
}; | |
// IDB-specific postprocessing necessary because | |
// we don't know whether we stored a true Blob or | |
// a base64-encoded string, and if it's a Blob it | |
// needs to be read outside of the transaction context | |
exports.postProcessAttachments = function (results) { | |
return utils.Promise.all(results.map(function (row) { | |
if (row.doc && row.doc._attachments) { | |
var attNames = Object.keys(row.doc._attachments); | |
return utils.Promise.all(attNames.map(function (att) { | |
var attObj = row.doc._attachments[att]; | |
if (!('body' in attObj)) { // already processed | |
return; | |
} | |
var body = attObj.body; | |
var type = attObj.content_type; | |
return new utils.Promise(function (resolve) { | |
exports.readBlobData(body, type, true, function (base64) { | |
row.doc._attachments[att] = utils.extend( | |
utils.pick(attObj, ['digest', 'content_type']), | |
{data: base64} | |
); | |
resolve(); | |
}); | |
}); | |
})); | |
} | |
})); | |
}; | |
exports.compactRevs = function (revs, docId, txn) { | |
var possiblyOrphanedDigests = []; | |
var seqStore = txn.objectStore(constants.BY_SEQ_STORE); | |
var attStore = txn.objectStore(constants.ATTACH_STORE); | |
var attAndSeqStore = txn.objectStore(constants.ATTACH_AND_SEQ_STORE); | |
var count = revs.length; | |
function checkDone() { | |
count--; | |
if (!count) { // done processing all revs | |
deleteOrphanedAttachments(); | |
} | |
} | |
function deleteOrphanedAttachments() { | |
if (!possiblyOrphanedDigests.length) { | |
return; | |
} | |
possiblyOrphanedDigests.forEach(function (digest) { | |
var countReq = attAndSeqStore.index('digestSeq').count( | |
IDBKeyRange.bound( | |
digest + '::', digest + '::\uffff', false, false)); | |
countReq.onsuccess = function (e) { | |
var count = e.target.result; | |
if (!count) { | |
// orphaned | |
attStore.delete(digest); | |
} | |
}; | |
}); | |
} | |
revs.forEach(function (rev) { | |
var index = seqStore.index('_doc_id_rev'); | |
var key = docId + "::" + rev; | |
index.getKey(key).onsuccess = function (e) { | |
var seq = e.target.result; | |
if (typeof seq !== 'number') { | |
return checkDone(); | |
} | |
seqStore.delete(seq); | |
var cursor = attAndSeqStore.index('seq') | |
.openCursor(IDBKeyRange.only(seq)); | |
cursor.onsuccess = function (event) { | |
var cursor = event.target.result; | |
if (cursor) { | |
var digest = cursor.value.digestSeq.split('::')[0]; | |
possiblyOrphanedDigests.push(digest); | |
attAndSeqStore.delete(cursor.primaryKey); | |
cursor.continue(); | |
} else { // done | |
checkDone(); | |
} | |
}; | |
}; | |
}); | |
}; | |
exports.openTransactionSafely = function (idb, stores, mode) { | |
try { | |
return { | |
txn: idb.transaction(stores, mode) | |
}; | |
} catch (err) { | |
return { | |
error: err | |
}; | |
} | |
}; | |
}).call(this,require('_process')) | |
},{"../../deps/errors":101,"../../utils":119,"./idb-constants":88,"_process":81}],90:[function(require,module,exports){ | |
(function (process){ | |
'use strict'; | |
var utils = require('../../utils'); | |
var merge = require('../../merge'); | |
var errors = require('../../deps/errors'); | |
var idbUtils = require('./idb-utils'); | |
var idbConstants = require('./idb-constants'); | |
var idbBulkDocs = require('./idb-bulk-docs'); | |
var idbAllDocs = require('./idb-all-docs'); | |
var checkBlobSupport = require('./idb-blob-support'); | |
var ADAPTER_VERSION = idbConstants.ADAPTER_VERSION; | |
var ATTACH_AND_SEQ_STORE = idbConstants.ATTACH_AND_SEQ_STORE; | |
var ATTACH_STORE = idbConstants.ATTACH_STORE; | |
var BY_SEQ_STORE = idbConstants.BY_SEQ_STORE; | |
var DETECT_BLOB_SUPPORT_STORE = idbConstants.DETECT_BLOB_SUPPORT_STORE; | |
var DOC_STORE = idbConstants.DOC_STORE; | |
var LOCAL_STORE = idbConstants.LOCAL_STORE; | |
var META_STORE = idbConstants.META_STORE; | |
var applyNext = idbUtils.applyNext; | |
var compactRevs = idbUtils.compactRevs; | |
var decodeDoc = idbUtils.decodeDoc; | |
var decodeMetadata = idbUtils.decodeMetadata; | |
var encodeMetadata = idbUtils.encodeMetadata; | |
var fetchAttachmentsIfNecessary = idbUtils.fetchAttachmentsIfNecessary; | |
var idbError = idbUtils.idbError; | |
var postProcessAttachments = idbUtils.postProcessAttachments; | |
var readBlobData = idbUtils.readBlobData; | |
var taskQueue = idbUtils.taskQueue; | |
var openTransactionSafely = idbUtils.openTransactionSafely; | |
var cachedDBs = {}; | |
var blobSupportPromise; | |
function IdbPouch(opts, callback) { | |
var api = this; | |
taskQueue.queue.push({ | |
action: function (thisCallback) { | |
init(api, opts, thisCallback); | |
}, | |
callback: callback | |
}); | |
applyNext(); | |
} | |
function init(api, opts, callback) { | |
var dbName = opts.name; | |
var idb = null; | |
api._meta = null; | |
// called when creating a fresh new database | |
function createSchema(db) { | |
var docStore = db.createObjectStore(DOC_STORE, {keyPath : 'id'}); | |
db.createObjectStore(BY_SEQ_STORE, {autoIncrement: true}) | |
.createIndex('_doc_id_rev', '_doc_id_rev', {unique: true}); | |
db.createObjectStore(ATTACH_STORE, {keyPath: 'digest'}); | |
db.createObjectStore(META_STORE, {keyPath: 'id', autoIncrement: false}); | |
db.createObjectStore(DETECT_BLOB_SUPPORT_STORE); | |
// added in v2 | |
docStore.createIndex('deletedOrLocal', 'deletedOrLocal', {unique : false}); | |
// added in v3 | |
db.createObjectStore(LOCAL_STORE, {keyPath: '_id'}); | |
// added in v4 | |
var attAndSeqStore = db.createObjectStore(ATTACH_AND_SEQ_STORE, | |
{autoIncrement: true}); | |
attAndSeqStore.createIndex('seq', 'seq'); | |
attAndSeqStore.createIndex('digestSeq', 'digestSeq', {unique: true}); | |
} | |
// migration to version 2 | |
// unfortunately "deletedOrLocal" is a misnomer now that we no longer | |
// store local docs in the main doc-store, but whaddyagonnado | |
function addDeletedOrLocalIndex(txn, callback) { | |
var docStore = txn.objectStore(DOC_STORE); | |
docStore.createIndex('deletedOrLocal', 'deletedOrLocal', {unique : false}); | |
docStore.openCursor().onsuccess = function (event) { | |
var cursor = event.target.result; | |
if (cursor) { | |
var metadata = cursor.value; | |
var deleted = utils.isDeleted(metadata); | |
metadata.deletedOrLocal = deleted ? "1" : "0"; | |
docStore.put(metadata); | |
cursor.continue(); | |
} else { | |
callback(); | |
} | |
}; | |
} | |
// migration to version 3 (part 1) | |
function createLocalStoreSchema(db) { | |
db.createObjectStore(LOCAL_STORE, {keyPath: '_id'}) | |
.createIndex('_doc_id_rev', '_doc_id_rev', {unique: true}); | |
} | |
// migration to version 3 (part 2) | |
function migrateLocalStore(txn, cb) { | |
var localStore = txn.objectStore(LOCAL_STORE); | |
var docStore = txn.objectStore(DOC_STORE); | |
var seqStore = txn.objectStore(BY_SEQ_STORE); | |
var cursor = docStore.openCursor(); | |
cursor.onsuccess = function (event) { | |
var cursor = event.target.result; | |
if (cursor) { | |
var metadata = cursor.value; | |
var docId = metadata.id; | |
var local = utils.isLocalId(docId); | |
var rev = merge.winningRev(metadata); | |
if (local) { | |
var docIdRev = docId + "::" + rev; | |
// remove all seq entries | |
// associated with this docId | |
var start = docId + "::"; | |
var end = docId + "::~"; | |
var index = seqStore.index('_doc_id_rev'); | |
var range = IDBKeyRange.bound(start, end, false, false); | |
var seqCursor = index.openCursor(range); | |
seqCursor.onsuccess = function (e) { | |
seqCursor = e.target.result; | |
if (!seqCursor) { | |
// done | |
docStore.delete(cursor.primaryKey); | |
cursor.continue(); | |
} else { | |
var data = seqCursor.value; | |
if (data._doc_id_rev === docIdRev) { | |
localStore.put(data); | |
} | |
seqStore.delete(seqCursor.primaryKey); | |
seqCursor.continue(); | |
} | |
}; | |
} else { | |
cursor.continue(); | |
} | |
} else if (cb) { | |
cb(); | |
} | |
}; | |
} | |
// migration to version 4 (part 1) | |
function addAttachAndSeqStore(db) { | |
var attAndSeqStore = db.createObjectStore(ATTACH_AND_SEQ_STORE, | |
{autoIncrement: true}); | |
attAndSeqStore.createIndex('seq', 'seq'); | |
attAndSeqStore.createIndex('digestSeq', 'digestSeq', {unique: true}); | |
} | |
// migration to version 4 (part 2) | |
function migrateAttsAndSeqs(txn, callback) { | |
var seqStore = txn.objectStore(BY_SEQ_STORE); | |
var attStore = txn.objectStore(ATTACH_STORE); | |
var attAndSeqStore = txn.objectStore(ATTACH_AND_SEQ_STORE); | |
// need to actually populate the table. this is the expensive part, | |
// so as an optimization, check first that this database even | |
// contains attachments | |
var req = attStore.count(); | |
req.onsuccess = function (e) { | |
var count = e.target.result; | |
if (!count) { | |
return callback(); // done | |
} | |
seqStore.openCursor().onsuccess = function (e) { | |
var cursor = e.target.result; | |
if (!cursor) { | |
return callback(); // done | |
} | |
var doc = cursor.value; | |
var seq = cursor.primaryKey; | |
var atts = Object.keys(doc._attachments || {}); | |
var digestMap = {}; | |
for (var j = 0; j < atts.length; j++) { | |
var att = doc._attachments[atts[j]]; | |
digestMap[att.digest] = true; // uniq digests, just in case | |
} | |
var digests = Object.keys(digestMap); | |
for (j = 0; j < digests.length; j++) { | |
var digest = digests[j]; | |
attAndSeqStore.put({ | |
seq: seq, | |
digestSeq: digest + '::' + seq | |
}); | |
} | |
cursor.continue(); | |
}; | |
}; | |
} | |
// migration to version 5 | |
// Instead of relying on on-the-fly migration of metadata, | |
// this brings the doc-store to its modern form: | |
// - metadata.winningrev | |
// - metadata.seq | |
// - stringify the metadata when storing it | |
function migrateMetadata(txn) { | |
function decodeMetadataCompat(storedObject) { | |
if (!storedObject.data) { | |
// old format, when we didn't store it stringified | |
storedObject.deleted = storedObject.deletedOrLocal === '1'; | |
return storedObject; | |
} | |
return decodeMetadata(storedObject); | |
} | |
// ensure that every metadata has a winningRev and seq, | |
// which was previously created on-the-fly but better to migrate | |
var bySeqStore = txn.objectStore(BY_SEQ_STORE); | |
var docStore = txn.objectStore(DOC_STORE); | |
var cursor = docStore.openCursor(); | |
cursor.onsuccess = function (e) { | |
var cursor = e.target.result; | |
if (!cursor) { | |
return; // done | |
} | |
var metadata = decodeMetadataCompat(cursor.value); | |
metadata.winningRev = metadata.winningRev || merge.winningRev(metadata); | |
function fetchMetadataSeq() { | |
// metadata.seq was added post-3.2.0, so if it's missing, | |
// we need to fetch it manually | |
var start = metadata.id + '::'; | |
var end = metadata.id + '::\uffff'; | |
var req = bySeqStore.index('_doc_id_rev').openCursor( | |
IDBKeyRange.bound(start, end)); | |
var metadataSeq = 0; | |
req.onsuccess = function (e) { | |
var cursor = e.target.result; | |
if (!cursor) { | |
metadata.seq = metadataSeq; | |
return onGetMetadataSeq(); | |
} | |
var seq = cursor.primaryKey; | |
if (seq > metadataSeq) { | |
metadataSeq = seq; | |
} | |
cursor.continue(); | |
}; | |
} | |
function onGetMetadataSeq() { | |
var metadataToStore = encodeMetadata(metadata, | |
metadata.winningRev, metadata.deleted); | |
var req = docStore.put(metadataToStore); | |
req.onsuccess = function () { | |
cursor.continue(); | |
}; | |
} | |
if (metadata.seq) { | |
return onGetMetadataSeq(); | |
} | |
fetchMetadataSeq(); | |
}; | |
} | |
api.type = function () { | |
return 'idb'; | |
}; | |
api._id = utils.toPromise(function (callback) { | |
callback(null, api._meta.instanceId); | |
}); | |
api._bulkDocs = function idb_bulkDocs(req, opts, callback) { | |
idbBulkDocs(req, opts, api, idb, IdbPouch.Changes, callback); | |
}; | |
// First we look up the metadata in the ids database, then we fetch the | |
// current revision(s) from the by sequence store | |
api._get = function idb_get(id, opts, callback) { | |
var doc; | |
var metadata; | |
var err; | |
var txn; | |
opts = utils.clone(opts); | |
if (opts.ctx) { | |
txn = opts.ctx; | |
} else { | |
var txnResult = openTransactionSafely(idb, | |
[DOC_STORE, BY_SEQ_STORE, ATTACH_STORE], 'readonly'); | |
if (txnResult.error) { | |
return callback(txnResult.error); | |
} | |
txn = txnResult.txn; | |
} | |
function finish() { | |
callback(err, {doc: doc, metadata: metadata, ctx: txn}); | |
} | |
txn.objectStore(DOC_STORE).get(id).onsuccess = function (e) { | |
metadata = decodeMetadata(e.target.result); | |
// we can determine the result here if: | |
// 1. there is no such document | |
// 2. the document is deleted and we don't ask about specific rev | |
// When we ask with opts.rev we expect the answer to be either | |
// doc (possibly with _deleted=true) or missing error | |
if (!metadata) { | |
err = errors.error(errors.MISSING_DOC, 'missing'); | |
return finish(); | |
} | |
if (utils.isDeleted(metadata) && !opts.rev) { | |
err = errors.error(errors.MISSING_DOC, "deleted"); | |
return finish(); | |
} | |
var objectStore = txn.objectStore(BY_SEQ_STORE); | |
var rev = opts.rev || metadata.winningRev; | |
var key = metadata.id + '::' + rev; | |
objectStore.index('_doc_id_rev').get(key).onsuccess = function (e) { | |
doc = e.target.result; | |
if (doc) { | |
doc = decodeDoc(doc); | |
} | |
if (!doc) { | |
err = errors.error(errors.MISSING_DOC, 'missing'); | |
return finish(); | |
} | |
finish(); | |
}; | |
}; | |
}; | |
api._getAttachment = function (attachment, opts, callback) { | |
var txn; | |
opts = utils.clone(opts); | |
if (opts.ctx) { | |
txn = opts.ctx; | |
} else { | |
var txnResult = openTransactionSafely(idb, | |
[DOC_STORE, BY_SEQ_STORE, ATTACH_STORE], 'readonly'); | |
if (txnResult.error) { | |
return callback(txnResult.error); | |
} | |
txn = txnResult.txn; | |
} | |
var digest = attachment.digest; | |
var type = attachment.content_type; | |
txn.objectStore(ATTACH_STORE).get(digest).onsuccess = function (e) { | |
var body = e.target.result.body; | |
readBlobData(body, type, opts.encode, function (blobData) { | |
callback(null, blobData); | |
}); | |
}; | |
}; | |
api._info = function idb_info(callback) { | |
if (idb === null || !cachedDBs[dbName]) { | |
var error = new Error('db isn\'t open'); | |
error.id = 'idbNull'; | |
return callback(error); | |
} | |
var updateSeq; | |
var docCount; | |
var txnResult = openTransactionSafely(idb, [BY_SEQ_STORE], 'readonly'); | |
if (txnResult.error) { | |
return callback(txnResult.error); | |
} | |
var txn = txnResult.txn; | |
var cursor = txn.objectStore(BY_SEQ_STORE).openCursor(null, 'prev'); | |
cursor.onsuccess = function (event) { | |
var cursor = event.target.result; | |
updateSeq = cursor ? cursor.key : 0; | |
// count within the same txn for consistency | |
docCount = api._meta.docCount; | |
}; | |
txn.oncomplete = function () { | |
callback(null, { | |
doc_count: docCount, | |
update_seq: updateSeq, | |
// for debugging | |
idb_attachment_format: (api._meta.blobSupport ? 'binary' : 'base64') | |
}); | |
}; | |
}; | |
api._allDocs = function idb_allDocs(opts, callback) { | |
idbAllDocs(opts, api, idb, callback); | |
}; | |
api._changes = function (opts) { | |
opts = utils.clone(opts); | |
if (opts.continuous) { | |
var id = dbName + ':' + utils.uuid(); | |
IdbPouch.Changes.addListener(dbName, id, api, opts); | |
IdbPouch.Changes.notify(dbName); | |
return { | |
cancel: function () { | |
IdbPouch.Changes.removeListener(dbName, id); | |
} | |
}; | |
} | |
var docIds = opts.doc_ids && new utils.Set(opts.doc_ids); | |
var descending = opts.descending ? 'prev' : null; | |
opts.since = opts.since || 0; | |
var lastSeq = opts.since; | |
var limit = 'limit' in opts ? opts.limit : -1; | |
if (limit === 0) { | |
limit = 1; // per CouchDB _changes spec | |
} | |
var returnDocs; | |
if ('returnDocs' in opts) { | |
returnDocs = opts.returnDocs; | |
} else { | |
returnDocs = true; | |
} | |
var results = []; | |
var numResults = 0; | |
var filter = utils.filterChange(opts); | |
var docIdsToMetadata = new utils.Map(); | |
var txn; | |
var bySeqStore; | |
var docStore; | |
function onGetCursor(cursor) { | |
var doc = decodeDoc(cursor.value); | |
var seq = cursor.key; | |
if (docIds && !docIds.has(doc._id)) { | |
return cursor.continue(); | |
} | |
var metadata; | |
function onGetMetadata() { | |
if (metadata.seq !== seq) { | |
// some other seq is later | |
return cursor.continue(); | |
} | |
lastSeq = seq; | |
if (metadata.winningRev === doc._rev) { | |
return onGetWinningDoc(doc); | |
} | |
fetchWinningDoc(); | |
} | |
function fetchWinningDoc() { | |
var docIdRev = doc._id + '::' + metadata.winningRev; | |
var req = bySeqStore.index('_doc_id_rev').openCursor( | |
IDBKeyRange.bound(docIdRev, docIdRev + '\uffff')); | |
req.onsuccess = function (e) { | |
onGetWinningDoc(decodeDoc(e.target.result.value)); | |
}; | |
} | |
function onGetWinningDoc(winningDoc) { | |
var change = opts.processChange(winningDoc, metadata, opts); | |
change.seq = metadata.seq; | |
if (filter(change)) { | |
numResults++; | |
if (returnDocs) { | |
results.push(change); | |
} | |
// process the attachment immediately | |
// for the benefit of live listeners | |
if (opts.attachments && opts.include_docs) { | |
fetchAttachmentsIfNecessary(winningDoc, opts, txn, function () { | |
postProcessAttachments([change]).then(function () { | |
opts.onChange(change); | |
}); | |
}); | |
} else { | |
opts.onChange(change); | |
} | |
} | |
if (numResults !== limit) { | |
cursor.continue(); | |
} | |
} | |
metadata = docIdsToMetadata.get(doc._id); | |
if (metadata) { // cached | |
return onGetMetadata(); | |
} | |
// metadata not cached, have to go fetch it | |
docStore.get(doc._id).onsuccess = function (event) { | |
metadata = decodeMetadata(event.target.result); | |
docIdsToMetadata.set(doc._id, metadata); | |
onGetMetadata(); | |
}; | |
} | |
function onsuccess(event) { | |
var cursor = event.target.result; | |
if (!cursor) { | |
return; | |
} | |
onGetCursor(cursor); | |
} | |
function fetchChanges() { | |
var objectStores = [DOC_STORE, BY_SEQ_STORE]; | |
if (opts.attachments) { | |
objectStores.push(ATTACH_STORE); | |
} | |
var txnResult = openTransactionSafely(idb, objectStores, 'readonly'); | |
if (txnResult.error) { | |
return opts.complete(txnResult.error); | |
} | |
txn = txnResult.txn; | |
txn.onerror = idbError(opts.complete); | |
txn.oncomplete = onTxnComplete; | |
bySeqStore = txn.objectStore(BY_SEQ_STORE); | |
docStore = txn.objectStore(DOC_STORE); | |
var req; | |
if (descending) { | |
req = bySeqStore.openCursor( | |
null, descending); | |
} else { | |
req = bySeqStore.openCursor( | |
IDBKeyRange.lowerBound(opts.since, true)); | |
} | |
req.onsuccess = onsuccess; | |
} | |
fetchChanges(); | |
function onTxnComplete() { | |
function finish() { | |
opts.complete(null, { | |
results: results, | |
last_seq: lastSeq | |
}); | |
} | |
if (!opts.continuous && opts.attachments) { | |
// cannot guarantee that postProcessing was already done, | |
// so do it again | |
postProcessAttachments(results).then(finish); | |
} else { | |
finish(); | |
} | |
} | |
}; | |
api._close = function (callback) { | |
if (idb === null) { | |
return callback(errors.error(errors.NOT_OPEN)); | |
} | |
// https://developer.mozilla.org/en-US/docs/IndexedDB/IDBDatabase#close | |
// "Returns immediately and closes the connection in a separate thread..." | |
idb.close(); | |
delete cachedDBs[dbName]; | |
idb = null; | |
callback(); | |
}; | |
api._getRevisionTree = function (docId, callback) { | |
var txnResult = openTransactionSafely(idb, [DOC_STORE], 'readonly'); | |
if (txnResult.error) { | |
return callback(txnResult.error); | |
} | |
var txn = txnResult.txn; | |
var req = txn.objectStore(DOC_STORE).get(docId); | |
req.onsuccess = function (event) { | |
var doc = decodeMetadata(event.target.result); | |
if (!doc) { | |
callback(errors.error(errors.MISSING_DOC)); | |
} else { | |
callback(null, doc.rev_tree); | |
} | |
}; | |
}; | |
// This function removes revisions of document docId | |
// which are listed in revs and sets this document | |
// revision to to rev_tree | |
api._doCompaction = function (docId, revs, callback) { | |
var stores = [ | |
DOC_STORE, | |
BY_SEQ_STORE, | |
ATTACH_STORE, | |
ATTACH_AND_SEQ_STORE | |
]; | |
var txnResult = openTransactionSafely(idb, stores, 'readwrite'); | |
if (txnResult.error) { | |
return callback(txnResult.error); | |
} | |
var txn = txnResult.txn; | |
var docStore = txn.objectStore(DOC_STORE); | |
docStore.get(docId).onsuccess = function (event) { | |
var metadata = decodeMetadata(event.target.result); | |
merge.traverseRevTree(metadata.rev_tree, function (isLeaf, pos, | |
revHash, ctx, opts) { | |
var rev = pos + '-' + revHash; | |
if (revs.indexOf(rev) !== -1) { | |
opts.status = 'missing'; | |
} | |
}); | |
compactRevs(revs, docId, txn); | |
var winningRev = metadata.winningRev; | |
var deleted = metadata.deleted; | |
txn.objectStore(DOC_STORE).put( | |
encodeMetadata(metadata, winningRev, deleted)); | |
}; | |
txn.onerror = idbError(callback); | |
txn.oncomplete = function () { | |
utils.call(callback); | |
}; | |
}; | |
api._getLocal = function (id, callback) { | |
var txnResult = openTransactionSafely(idb, [LOCAL_STORE], 'readonly'); | |
if (txnResult.error) { | |
return callback(txnResult.error); | |
} | |
var tx = txnResult.txn; | |
var req = tx.objectStore(LOCAL_STORE).get(id); | |
req.onerror = idbError(callback); | |
req.onsuccess = function (e) { | |
var doc = e.target.result; | |
if (!doc) { | |
callback(errors.error(errors.MISSING_DOC)); | |
} else { | |
delete doc['_doc_id_rev']; // for backwards compat | |
callback(null, doc); | |
} | |
}; | |
}; | |
api._putLocal = function (doc, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
delete doc._revisions; // ignore this, trust the rev | |
var oldRev = doc._rev; | |
var id = doc._id; | |
if (!oldRev) { | |
doc._rev = '0-1'; | |
} else { | |
doc._rev = '0-' + (parseInt(oldRev.split('-')[1], 10) + 1); | |
} | |
var tx = opts.ctx; | |
var ret; | |
if (!tx) { | |
var txnResult = openTransactionSafely(idb, [LOCAL_STORE], 'readwrite'); | |
if (txnResult.error) { | |
return callback(txnResult.error); | |
} | |
tx = txnResult.txn; | |
tx.onerror = idbError(callback); | |
tx.oncomplete = function () { | |
if (ret) { | |
callback(null, ret); | |
} | |
}; | |
} | |
var oStore = tx.objectStore(LOCAL_STORE); | |
var req; | |
if (oldRev) { | |
req = oStore.get(id); | |
req.onsuccess = function (e) { | |
var oldDoc = e.target.result; | |
if (!oldDoc || oldDoc._rev !== oldRev) { | |
callback(errors.error(errors.REV_CONFLICT)); | |
} else { // update | |
var req = oStore.put(doc); | |
req.onsuccess = function () { | |
ret = {ok: true, id: doc._id, rev: doc._rev}; | |
if (opts.ctx) { // return immediately | |
callback(null, ret); | |
} | |
}; | |
} | |
}; | |
} else { // new doc | |
req = oStore.add(doc); | |
req.onerror = function (e) { | |
// constraint error, already exists | |
callback(errors.error(errors.REV_CONFLICT)); | |
e.preventDefault(); // avoid transaction abort | |
e.stopPropagation(); // avoid transaction onerror | |
}; | |
req.onsuccess = function () { | |
ret = {ok: true, id: doc._id, rev: doc._rev}; | |
if (opts.ctx) { // return immediately | |
callback(null, ret); | |
} | |
}; | |
} | |
}; | |
api._removeLocal = function (doc, callback) { | |
var txnResult = openTransactionSafely(idb, [LOCAL_STORE], 'readwrite'); | |
if (txnResult.error) { | |
return callback(txnResult.error); | |
} | |
var tx = txnResult.txn; | |
var ret; | |
tx.oncomplete = function () { | |
if (ret) { | |
callback(null, ret); | |
} | |
}; | |
var id = doc._id; | |
var oStore = tx.objectStore(LOCAL_STORE); | |
var req = oStore.get(id); | |
req.onerror = idbError(callback); | |
req.onsuccess = function (e) { | |
var oldDoc = e.target.result; | |
if (!oldDoc || oldDoc._rev !== doc._rev) { | |
callback(errors.error(errors.MISSING_DOC)); | |
} else { | |
oStore.delete(id); | |
ret = {ok: true, id: id, rev: '0-0'}; | |
} | |
}; | |
}; | |
api._destroy = function (callback) { | |
IdbPouch.Changes.removeAllListeners(dbName); | |
//Close open request for "dbName" database to fix ie delay. | |
if (IdbPouch.openReqList[dbName] && IdbPouch.openReqList[dbName].result) { | |
IdbPouch.openReqList[dbName].result.close(); | |
delete cachedDBs[dbName]; | |
} | |
var req = indexedDB.deleteDatabase(dbName); | |
req.onsuccess = function () { | |
//Remove open request from the list. | |
if (IdbPouch.openReqList[dbName]) { | |
IdbPouch.openReqList[dbName] = null; | |
} | |
if (utils.hasLocalStorage() && (dbName in localStorage)) { | |
delete localStorage[dbName]; | |
} | |
callback(null, { 'ok': true }); | |
}; | |
req.onerror = idbError(callback); | |
}; | |
var cached = cachedDBs[dbName]; | |
if (cached) { | |
idb = cached.idb; | |
api._meta = cached.global; | |
process.nextTick(function () { | |
callback(null, api); | |
}); | |
return; | |
} | |
var req = indexedDB.open(dbName, ADAPTER_VERSION); | |
if (!('openReqList' in IdbPouch)) { | |
IdbPouch.openReqList = {}; | |
} | |
IdbPouch.openReqList[dbName] = req; | |
req.onupgradeneeded = function (e) { | |
var db = e.target.result; | |
if (e.oldVersion < 1) { | |
return createSchema(db); // new db, initial schema | |
} | |
// do migrations | |
var txn = e.currentTarget.transaction; | |
// these migrations have to be done in this function, before | |
// control is returned to the event loop, because IndexedDB | |
if (e.oldVersion < 3) { | |
createLocalStoreSchema(db); // v2 -> v3 | |
} | |
if (e.oldVersion < 4) { | |
addAttachAndSeqStore(db); // v3 -> v4 | |
} | |
var migrations = [ | |
addDeletedOrLocalIndex, // v1 -> v2 | |
migrateLocalStore, // v2 -> v3 | |
migrateAttsAndSeqs, // v3 -> v4 | |
migrateMetadata // v4 -> v5 | |
]; | |
var i = e.oldVersion; | |
function next() { | |
var migration = migrations[i - 1]; | |
i++; | |
if (migration) { | |
migration(txn, next); | |
} | |
} | |
next(); | |
}; | |
req.onsuccess = function (e) { | |
idb = e.target.result; | |
idb.onversionchange = function () { | |
idb.close(); | |
delete cachedDBs[dbName]; | |
}; | |
idb.onabort = function () { | |
idb.close(); | |
delete cachedDBs[dbName]; | |
}; | |
var txn = idb.transaction([ | |
META_STORE, | |
DETECT_BLOB_SUPPORT_STORE, | |
DOC_STORE | |
], 'readwrite'); | |
var req = txn.objectStore(META_STORE).get(META_STORE); | |
var blobSupport = null; | |
var docCount = null; | |
var instanceId = null; | |
req.onsuccess = function (e) { | |
var checkSetupComplete = function () { | |
if (blobSupport === null || docCount === null || | |
instanceId === null) { | |
return; | |
} else { | |
api._meta = { | |
name: dbName, | |
instanceId: instanceId, | |
blobSupport: blobSupport, | |
docCount: docCount | |
}; | |
cachedDBs[dbName] = { | |
idb: idb, | |
global: api._meta | |
}; | |
callback(null, api); | |
} | |
}; | |
// | |
// fetch/store the id | |
// | |
var meta = e.target.result || {id: META_STORE}; | |
if (dbName + '_id' in meta) { | |
instanceId = meta[dbName + '_id']; | |
checkSetupComplete(); | |
} else { | |
instanceId = utils.uuid(); | |
meta[dbName + '_id'] = instanceId; | |
txn.objectStore(META_STORE).put(meta).onsuccess = function () { | |
checkSetupComplete(); | |
}; | |
} | |
// | |
// check blob support | |
// | |
if (!blobSupportPromise) { | |
// make sure blob support is only checked once | |
blobSupportPromise = checkBlobSupport(txn, idb); | |
} | |
blobSupportPromise.then(function (val) { | |
blobSupport = val; | |
checkSetupComplete(); | |
}); | |
// | |
// count docs | |
// | |
var index = txn.objectStore(DOC_STORE).index('deletedOrLocal'); | |
index.count(IDBKeyRange.only('0')).onsuccess = function (e) { | |
docCount = e.target.result; | |
checkSetupComplete(); | |
}; | |
}; | |
}; | |
req.onerror = idbError(callback); | |
} | |
IdbPouch.valid = function () { | |
// Issue #2533, we finally gave up on doing bug | |
// detection instead of browser sniffing. Safari brought us | |
// to our knees. | |
var isSafari = typeof openDatabase !== 'undefined' && | |
/(Safari|iPhone|iPad|iPod)/.test(navigator.userAgent) && | |
!/Chrome/.test(navigator.userAgent) && | |
!/BlackBerry/.test(navigator.platform); | |
// some outdated implementations of IDB that appear on Samsung | |
// and HTC Android devices <4.4 are missing IDBKeyRange | |
return !isSafari && typeof indexedDB !== 'undefined' && | |
typeof IDBKeyRange !== 'undefined'; | |
}; | |
IdbPouch.Changes = new utils.Changes(); | |
module.exports = IdbPouch; | |
}).call(this,require('_process')) | |
},{"../../deps/errors":101,"../../merge":114,"../../utils":119,"./idb-all-docs":85,"./idb-blob-support":86,"./idb-bulk-docs":87,"./idb-constants":88,"./idb-utils":89,"_process":81}],91:[function(require,module,exports){ | |
'use strict'; | |
var utils = require('../../utils'); | |
var errors = require('../../deps/errors'); | |
var websqlUtils = require('./websql-utils'); | |
var websqlConstants = require('./websql-constants'); | |
var DOC_STORE = websqlConstants.DOC_STORE; | |
var BY_SEQ_STORE = websqlConstants.BY_SEQ_STORE; | |
var ATTACH_STORE = websqlConstants.ATTACH_STORE; | |
var ATTACH_AND_SEQ_STORE = websqlConstants.ATTACH_AND_SEQ_STORE; | |
var select = websqlUtils.select; | |
var stringifyDoc = websqlUtils.stringifyDoc; | |
var compactRevs = websqlUtils.compactRevs; | |
var unknownError = websqlUtils.unknownError; | |
function websqlBulkDocs(req, opts, api, db, Changes, callback) { | |
var newEdits = opts.new_edits; | |
var userDocs = req.docs; | |
// Parse the docs, give them a sequence number for the result | |
var docInfos = userDocs.map(function (doc) { | |
if (doc._id && utils.isLocalId(doc._id)) { | |
return doc; | |
} | |
var newDoc = utils.parseDoc(doc, newEdits); | |
return newDoc; | |
}); | |
var docInfoErrors = docInfos.filter(function (docInfo) { | |
return docInfo.error; | |
}); | |
if (docInfoErrors.length) { | |
return callback(docInfoErrors[0]); | |
} | |
var tx; | |
var results = new Array(docInfos.length); | |
var fetchedDocs = new utils.Map(); | |
var preconditionErrored; | |
function complete() { | |
if (preconditionErrored) { | |
return callback(preconditionErrored); | |
} | |
Changes.notify(api._name); | |
api._docCount = -1; // invalidate | |
callback(null, results); | |
} | |
function verifyAttachment(digest, callback) { | |
var sql = 'SELECT count(*) as cnt FROM ' + ATTACH_STORE + | |
' WHERE digest=?'; | |
tx.executeSql(sql, [digest], function (tx, result) { | |
if (result.rows.item(0).cnt === 0) { | |
var err = errors.error(errors.MISSING_STUB, | |
'unknown stub attachment with digest ' + | |
digest); | |
callback(err); | |
} else { | |
callback(); | |
} | |
}); | |
} | |
function verifyAttachments(finish) { | |
var digests = []; | |
docInfos.forEach(function (docInfo) { | |
if (docInfo.data && docInfo.data._attachments) { | |
Object.keys(docInfo.data._attachments).forEach(function (filename) { | |
var att = docInfo.data._attachments[filename]; | |
if (att.stub) { | |
digests.push(att.digest); | |
} | |
}); | |
} | |
}); | |
if (!digests.length) { | |
return finish(); | |
} | |
var numDone = 0; | |
var err; | |
function checkDone() { | |
if (++numDone === digests.length) { | |
finish(err); | |
} | |
} | |
digests.forEach(function (digest) { | |
verifyAttachment(digest, function (attErr) { | |
if (attErr && !err) { | |
err = attErr; | |
} | |
checkDone(); | |
}); | |
}); | |
} | |
function writeDoc(docInfo, winningRev, winningRevIsDeleted, newRevIsDeleted, | |
isUpdate, delta, resultsIdx, callback) { | |
function finish() { | |
var data = docInfo.data; | |
var deletedInt = newRevIsDeleted ? 1 : 0; | |
var id = data._id; | |
var rev = data._rev; | |
var json = stringifyDoc(data); | |
var sql = 'INSERT INTO ' + BY_SEQ_STORE + | |
' (doc_id, rev, json, deleted) VALUES (?, ?, ?, ?);'; | |
var sqlArgs = [id, rev, json, deletedInt]; | |
// map seqs to attachment digests, which | |
// we will need later during compaction | |
function insertAttachmentMappings(seq, callback) { | |
var attsAdded = 0; | |
var attsToAdd = Object.keys(data._attachments || {}); | |
if (!attsToAdd.length) { | |
return callback(); | |
} | |
function checkDone() { | |
if (++attsAdded === attsToAdd.length) { | |
callback(); | |
} | |
return false; // ack handling a constraint error | |
} | |
function add(att) { | |
var sql = 'INSERT INTO ' + ATTACH_AND_SEQ_STORE + | |
' (digest, seq) VALUES (?,?)'; | |
var sqlArgs = [data._attachments[att].digest, seq]; | |
tx.executeSql(sql, sqlArgs, checkDone, checkDone); | |
// second callback is for a constaint error, which we ignore | |
// because this docid/rev has already been associated with | |
// the digest (e.g. when new_edits == false) | |
} | |
for (var i = 0; i < attsToAdd.length; i++) { | |
add(attsToAdd[i]); // do in parallel | |
} | |
} | |
tx.executeSql(sql, sqlArgs, function (tx, result) { | |
var seq = result.insertId; | |
insertAttachmentMappings(seq, function () { | |
dataWritten(tx, seq); | |
}); | |
}, function () { | |
// constraint error, recover by updating instead (see #1638) | |
var fetchSql = select('seq', BY_SEQ_STORE, null, | |
'doc_id=? AND rev=?'); | |
tx.executeSql(fetchSql, [id, rev], function (tx, res) { | |
var seq = res.rows.item(0).seq; | |
var sql = 'UPDATE ' + BY_SEQ_STORE + | |
' SET json=?, deleted=? WHERE doc_id=? AND rev=?;'; | |
var sqlArgs = [json, deletedInt, id, rev]; | |
tx.executeSql(sql, sqlArgs, function (tx) { | |
insertAttachmentMappings(seq, function () { | |
dataWritten(tx, seq); | |
}); | |
}); | |
}); | |
return false; // ack that we've handled the error | |
}); | |
} | |
function collectResults(attachmentErr) { | |
if (!err) { | |
if (attachmentErr) { | |
err = attachmentErr; | |
callback(err); | |
} else if (recv === attachments.length) { | |
finish(); | |
} | |
} | |
} | |
var err = null; | |
var recv = 0; | |
docInfo.data._id = docInfo.metadata.id; | |
docInfo.data._rev = docInfo.metadata.rev; | |
var attachments = Object.keys(docInfo.data._attachments || {}); | |
if (newRevIsDeleted) { | |
docInfo.data._deleted = true; | |
} | |
function attachmentSaved(err) { | |
recv++; | |
collectResults(err); | |
} | |
attachments.forEach(function (key) { | |
var att = docInfo.data._attachments[key]; | |
if (!att.stub) { | |
var data = att.data; | |
delete att.data; | |
var digest = att.digest; | |
saveAttachment(digest, data, attachmentSaved); | |
} else { | |
recv++; | |
collectResults(); | |
} | |
}); | |
if (!attachments.length) { | |
finish(); | |
} | |
function autoCompact() { | |
if (!isUpdate || !api.auto_compaction) { | |
return; // nothing to do | |
} | |
var id = docInfo.metadata.id; | |
var revsToDelete = utils.compactTree(docInfo.metadata); | |
compactRevs(revsToDelete, id, tx); | |
} | |
function dataWritten(tx, seq) { | |
autoCompact(); | |
docInfo.metadata.seq = seq; | |
delete docInfo.metadata.rev; | |
var sql = isUpdate ? | |
'UPDATE ' + DOC_STORE + | |
' SET json=?, max_seq=?, winningseq=' + | |
'(SELECT seq FROM ' + BY_SEQ_STORE + | |
' WHERE doc_id=' + DOC_STORE + '.id AND rev=?) WHERE id=?' | |
: 'INSERT INTO ' + DOC_STORE + | |
' (id, winningseq, max_seq, json) VALUES (?,?,?,?);'; | |
var metadataStr = utils.safeJsonStringify(docInfo.metadata); | |
var id = docInfo.metadata.id; | |
var params = isUpdate ? | |
[metadataStr, seq, winningRev, id] : | |
[id, seq, seq, metadataStr]; | |
tx.executeSql(sql, params, function () { | |
results[resultsIdx] = { | |
ok: true, | |
id: docInfo.metadata.id, | |
rev: winningRev | |
}; | |
fetchedDocs.set(id, docInfo.metadata); | |
callback(); | |
}); | |
} | |
} | |
function processDocs() { | |
utils.processDocs(docInfos, api, fetchedDocs, | |
tx, results, writeDoc, opts); | |
} | |
function fetchExistingDocs(callback) { | |
if (!docInfos.length) { | |
return callback(); | |
} | |
var numFetched = 0; | |
function checkDone() { | |
if (++numFetched === docInfos.length) { | |
callback(); | |
} | |
} | |
docInfos.forEach(function (docInfo) { | |
if (docInfo._id && utils.isLocalId(docInfo._id)) { | |
return checkDone(); // skip local docs | |
} | |
var id = docInfo.metadata.id; | |
tx.executeSql('SELECT json FROM ' + DOC_STORE + | |
' WHERE id = ?', [id], function (tx, result) { | |
if (result.rows.length) { | |
var metadata = utils.safeJsonParse(result.rows.item(0).json); | |
fetchedDocs.set(id, metadata); | |
} | |
checkDone(); | |
}); | |
}); | |
} | |
function saveAttachment(digest, data, callback) { | |
var sql = 'SELECT digest FROM ' + ATTACH_STORE + ' WHERE digest=?'; | |
tx.executeSql(sql, [digest], function (tx, result) { | |
if (result.rows.length) { // attachment already exists | |
return callback(); | |
} | |
// we could just insert before selecting and catch the error, | |
// but my hunch is that it's cheaper not to serialize the blob | |
// from JS to C if we don't have to (TODO: confirm this) | |
sql = 'INSERT INTO ' + ATTACH_STORE + | |
' (digest, body, escaped) VALUES (?,?,1)'; | |
tx.executeSql(sql, [digest, websqlUtils.escapeBlob(data)], function () { | |
callback(); | |
}, function () { | |
// ignore constaint errors, means it already exists | |
callback(); | |
return false; // ack we handled the error | |
}); | |
}); | |
} | |
utils.preprocessAttachments(docInfos, 'binary', function (err) { | |
if (err) { | |
return callback(err); | |
} | |
db.transaction(function (txn) { | |
tx = txn; | |
verifyAttachments(function (err) { | |
if (err) { | |
preconditionErrored = err; | |
} else { | |
fetchExistingDocs(processDocs); | |
} | |
}); | |
}, unknownError(callback), complete); | |
}); | |
} | |
module.exports = websqlBulkDocs; | |
},{"../../deps/errors":101,"../../utils":119,"./websql-constants":92,"./websql-utils":93}],92:[function(require,module,exports){ | |
'use strict'; | |
function quote(str) { | |
return "'" + str + "'"; | |
} | |
exports.ADAPTER_VERSION = 7; // used to manage migrations | |
// The object stores created for each database | |
// DOC_STORE stores the document meta data, its revision history and state | |
exports.DOC_STORE = quote('document-store'); | |
// BY_SEQ_STORE stores a particular version of a document, keyed by its | |
// sequence id | |
exports.BY_SEQ_STORE = quote('by-sequence'); | |
// Where we store attachments | |
exports.ATTACH_STORE = quote('attach-store'); | |
exports.LOCAL_STORE = quote('local-store'); | |
exports.META_STORE = quote('metadata-store'); | |
// where we store many-to-many relations between attachment | |
// digests and seqs | |
exports.ATTACH_AND_SEQ_STORE = quote('attach-seq-store'); | |
},{}],93:[function(require,module,exports){ | |
'use strict'; | |
var utils = require('../../utils'); | |
var errors = require('../../deps/errors'); | |
var websqlConstants = require('./websql-constants'); | |
var BY_SEQ_STORE = websqlConstants.BY_SEQ_STORE; | |
var ATTACH_STORE = websqlConstants.ATTACH_STORE; | |
var ATTACH_AND_SEQ_STORE = websqlConstants.ATTACH_AND_SEQ_STORE; | |
// escapeBlob and unescapeBlob are workarounds for a websql bug: | |
// https://code.google.com/p/chromium/issues/detail?id=422690 | |
// https://bugs.webkit.org/show_bug.cgi?id=137637 | |
// The goal is to never actually insert the \u0000 character | |
// in the database. | |
function escapeBlob(str) { | |
return str | |
.replace(/\u0002/g, '\u0002\u0002') | |
.replace(/\u0001/g, '\u0001\u0002') | |
.replace(/\u0000/g, '\u0001\u0001'); | |
} | |
function unescapeBlob(str) { | |
return str | |
.replace(/\u0001\u0001/g, '\u0000') | |
.replace(/\u0001\u0002/g, '\u0001') | |
.replace(/\u0002\u0002/g, '\u0002'); | |
} | |
function stringifyDoc(doc) { | |
// don't bother storing the id/rev. it uses lots of space, | |
// in persistent map/reduce especially | |
delete doc._id; | |
delete doc._rev; | |
return JSON.stringify(doc); | |
} | |
function unstringifyDoc(doc, id, rev) { | |
doc = JSON.parse(doc); | |
doc._id = id; | |
doc._rev = rev; | |
return doc; | |
} | |
// question mark groups IN queries, e.g. 3 -> '(?,?,?)' | |
function qMarks(num) { | |
var s = '('; | |
while (num--) { | |
s += '?'; | |
if (num) { | |
s += ','; | |
} | |
} | |
return s + ')'; | |
} | |
function select(selector, table, joiner, where, orderBy) { | |
return 'SELECT ' + selector + ' FROM ' + | |
(typeof table === 'string' ? table : table.join(' JOIN ')) + | |
(joiner ? (' ON ' + joiner) : '') + | |
(where ? (' WHERE ' + | |
(typeof where === 'string' ? where : where.join(' AND '))) : '') + | |
(orderBy ? (' ORDER BY ' + orderBy) : ''); | |
} | |
function compactRevs(revs, docId, tx) { | |
if (!revs.length) { | |
return; | |
} | |
var numDone = 0; | |
var seqs = []; | |
function checkDone() { | |
if (++numDone === revs.length) { // done | |
deleteOrphans(); | |
} | |
} | |
function deleteOrphans() { | |
// find orphaned attachment digests | |
if (!seqs.length) { | |
return; | |
} | |
var sql = 'SELECT DISTINCT digest AS digest FROM ' + | |
ATTACH_AND_SEQ_STORE + ' WHERE seq IN ' + qMarks(seqs.length); | |
tx.executeSql(sql, seqs, function (tx, res) { | |
var digestsToCheck = []; | |
for (var i = 0; i < res.rows.length; i++) { | |
digestsToCheck.push(res.rows.item(i).digest); | |
} | |
if (!digestsToCheck.length) { | |
return; | |
} | |
var sql = 'DELETE FROM ' + ATTACH_AND_SEQ_STORE + | |
' WHERE seq IN (' + | |
seqs.map(function () { return '?'; }).join(',') + | |
')'; | |
tx.executeSql(sql, seqs, function (tx) { | |
var sql = 'SELECT digest FROM ' + ATTACH_AND_SEQ_STORE + | |
' WHERE digest IN (' + | |
digestsToCheck.map(function () { return '?'; }).join(',') + | |
')'; | |
tx.executeSql(sql, digestsToCheck, function (tx, res) { | |
var nonOrphanedDigests = new utils.Set(); | |
for (var i = 0; i < res.rows.length; i++) { | |
nonOrphanedDigests.add(res.rows.item(i).digest); | |
} | |
digestsToCheck.forEach(function (digest) { | |
if (nonOrphanedDigests.has(digest)) { | |
return; | |
} | |
tx.executeSql( | |
'DELETE FROM ' + ATTACH_AND_SEQ_STORE + ' WHERE digest=?', | |
[digest]); | |
tx.executeSql( | |
'DELETE FROM ' + ATTACH_STORE + ' WHERE digest=?', [digest]); | |
}); | |
}); | |
}); | |
}); | |
} | |
// update by-seq and attach stores in parallel | |
revs.forEach(function (rev) { | |
var sql = 'SELECT seq FROM ' + BY_SEQ_STORE + | |
' WHERE doc_id=? AND rev=?'; | |
tx.executeSql(sql, [docId, rev], function (tx, res) { | |
if (!res.rows.length) { // already deleted | |
return checkDone(); | |
} | |
var seq = res.rows.item(0).seq; | |
seqs.push(seq); | |
tx.executeSql( | |
'DELETE FROM ' + BY_SEQ_STORE + ' WHERE seq=?', [seq], checkDone); | |
}); | |
}); | |
} | |
function unknownError(callback) { | |
return function (event) { | |
// event may actually be a SQLError object, so report is as such | |
var errorNameMatch = event && event.constructor.toString() | |
.match(/function ([^\(]+)/); | |
var errorName = (errorNameMatch && errorNameMatch[1]) || event.type; | |
var errorReason = event.target || event.message; | |
callback(errors.error(errors.WSQ_ERROR, errorReason, errorName)); | |
}; | |
} | |
function getSize(opts) { | |
if ('size' in opts) { | |
// triggers immediate popup in iOS, fixes #2347 | |
// e.g. 5000001 asks for 5 MB, 10000001 asks for 10 MB, | |
return opts.size * 1000000; | |
} | |
// In iOS, doesn't matter as long as it's <= 5000000. | |
// Except that if you request too much, our tests fail | |
// because of the native "do you accept?" popup. | |
// In Android <=4.3, this value is actually used as an | |
// honest-to-god ceiling for data, so we need to | |
// set it to a decently high number. | |
var isAndroid = /Android/.test(window.navigator.userAgent); | |
return isAndroid ? 5000000 : 1; // in PhantomJS, if you use 0 it will crash | |
} | |
function createOpenDBFunction() { | |
if (typeof sqlitePlugin !== 'undefined') { | |
// The SQLite Plugin started deviating pretty heavily from the | |
// standard openDatabase() function, as they started adding more features. | |
// It's better to just use their "new" format and pass in a big ol' | |
// options object. | |
return sqlitePlugin.openDatabase.bind(sqlitePlugin); | |
} | |
if (typeof openDatabase !== 'undefined') { | |
return function openDB(opts) { | |
// Traditional WebSQL API | |
return openDatabase(opts.name, opts.version, opts.description, opts.size); | |
}; | |
} | |
} | |
var cachedDatabases = {}; | |
function openDB(opts) { | |
var openDBFunction = createOpenDBFunction(); | |
var db = cachedDatabases[opts.name]; | |
if (!db) { | |
db = cachedDatabases[opts.name] = openDBFunction(opts); | |
db._sqlitePlugin = typeof sqlitePlugin !== 'undefined'; | |
} | |
return db; | |
} | |
function valid() { | |
// SQLitePlugin leaks this global object, which we can use | |
// to detect if it's installed or not. The benefit is that it's | |
// declared immediately, before the 'deviceready' event has fired. | |
return typeof openDatabase !== 'undefined' || | |
typeof SQLitePlugin !== 'undefined'; | |
} | |
module.exports = { | |
escapeBlob: escapeBlob, | |
unescapeBlob: unescapeBlob, | |
stringifyDoc: stringifyDoc, | |
unstringifyDoc: unstringifyDoc, | |
qMarks: qMarks, | |
select: select, | |
compactRevs: compactRevs, | |
unknownError: unknownError, | |
getSize: getSize, | |
openDB: openDB, | |
valid: valid | |
}; | |
},{"../../deps/errors":101,"../../utils":119,"./websql-constants":92}],94:[function(require,module,exports){ | |
'use strict'; | |
var utils = require('../../utils'); | |
var merge = require('../../merge'); | |
var errors = require('../../deps/errors'); | |
var parseHexString = require('../../deps/parse-hex'); | |
var websqlConstants = require('./websql-constants'); | |
var websqlUtils = require('./websql-utils'); | |
var websqlBulkDocs = require('./websql-bulk-docs'); | |
var ADAPTER_VERSION = websqlConstants.ADAPTER_VERSION; | |
var DOC_STORE = websqlConstants.DOC_STORE; | |
var BY_SEQ_STORE = websqlConstants.BY_SEQ_STORE; | |
var ATTACH_STORE = websqlConstants.ATTACH_STORE; | |
var LOCAL_STORE = websqlConstants.LOCAL_STORE; | |
var META_STORE = websqlConstants.META_STORE; | |
var ATTACH_AND_SEQ_STORE = websqlConstants.ATTACH_AND_SEQ_STORE; | |
var qMarks = websqlUtils.qMarks; | |
var stringifyDoc = websqlUtils.stringifyDoc; | |
var unstringifyDoc = websqlUtils.unstringifyDoc; | |
var select = websqlUtils.select; | |
var compactRevs = websqlUtils.compactRevs; | |
var unknownError = websqlUtils.unknownError; | |
var getSize = websqlUtils.getSize; | |
var openDB = websqlUtils.openDB; | |
function fetchAttachmentsIfNecessary(doc, opts, api, txn, cb) { | |
var attachments = Object.keys(doc._attachments || {}); | |
if (!attachments.length) { | |
return cb && cb(); | |
} | |
var numDone = 0; | |
function checkDone() { | |
if (++numDone === attachments.length && cb) { | |
cb(); | |
} | |
} | |
function fetchAttachment(doc, att) { | |
var attObj = doc._attachments[att]; | |
var attOpts = {encode: true, ctx: txn}; | |
api._getAttachment(attObj, attOpts, function (_, base64) { | |
doc._attachments[att] = utils.extend( | |
utils.pick(attObj, ['digest', 'content_type']), | |
{ data: base64 } | |
); | |
checkDone(); | |
}); | |
} | |
attachments.forEach(function (att) { | |
if (opts.attachments && opts.include_docs) { | |
fetchAttachment(doc, att); | |
} else { | |
doc._attachments[att].stub = true; | |
checkDone(); | |
} | |
}); | |
} | |
var POUCH_VERSION = 1; | |
// these indexes cover the ground for most allDocs queries | |
var BY_SEQ_STORE_DELETED_INDEX_SQL = | |
'CREATE INDEX IF NOT EXISTS \'by-seq-deleted-idx\' ON ' + | |
BY_SEQ_STORE + ' (seq, deleted)'; | |
var BY_SEQ_STORE_DOC_ID_REV_INDEX_SQL = | |
'CREATE UNIQUE INDEX IF NOT EXISTS \'by-seq-doc-id-rev\' ON ' + | |
BY_SEQ_STORE + ' (doc_id, rev)'; | |
var DOC_STORE_WINNINGSEQ_INDEX_SQL = | |
'CREATE INDEX IF NOT EXISTS \'doc-winningseq-idx\' ON ' + | |
DOC_STORE + ' (winningseq)'; | |
var ATTACH_AND_SEQ_STORE_SEQ_INDEX_SQL = | |
'CREATE INDEX IF NOT EXISTS \'attach-seq-seq-idx\' ON ' + | |
ATTACH_AND_SEQ_STORE + ' (seq)'; | |
var ATTACH_AND_SEQ_STORE_ATTACH_INDEX_SQL = | |
'CREATE UNIQUE INDEX IF NOT EXISTS \'attach-seq-digest-idx\' ON ' + | |
ATTACH_AND_SEQ_STORE + ' (digest, seq)'; | |
var DOC_STORE_AND_BY_SEQ_JOINER = BY_SEQ_STORE + | |
'.seq = ' + DOC_STORE + '.winningseq'; | |
var SELECT_DOCS = BY_SEQ_STORE + '.seq AS seq, ' + | |
BY_SEQ_STORE + '.deleted AS deleted, ' + | |
BY_SEQ_STORE + '.json AS data, ' + | |
BY_SEQ_STORE + '.rev AS rev, ' + | |
DOC_STORE + '.json AS metadata'; | |
function WebSqlPouch(opts, callback) { | |
var api = this; | |
var instanceId = null; | |
var size = getSize(opts); | |
var idRequests = []; | |
var encoding; | |
api._docCount = -1; // cache sqlite count(*) for performance | |
api._name = opts.name; | |
var db = openDB({ | |
name: api._name, | |
version: POUCH_VERSION, | |
description: api._name, | |
size: size, | |
location: opts.location, | |
createFromLocation: opts.createFromLocation | |
}); | |
if (!db) { | |
return callback(errors.error(errors.UNKNOWN_ERROR)); | |
} else if (typeof db.readTransaction !== 'function') { | |
// doesn't exist in sqlite plugin | |
db.readTransaction = db.transaction; | |
} | |
function dbCreated() { | |
// note the db name in case the browser upgrades to idb | |
if (utils.hasLocalStorage()) { | |
window.localStorage['_pouch__websqldb_' + api._name] = true; | |
} | |
callback(null, api); | |
} | |
// In this migration, we added the 'deleted' and 'local' columns to the | |
// by-seq and doc store tables. | |
// To preserve existing user data, we re-process all the existing JSON | |
// and add these values. | |
// Called migration2 because it corresponds to adapter version (db_version) #2 | |
function runMigration2(tx, callback) { | |
// index used for the join in the allDocs query | |
tx.executeSql(DOC_STORE_WINNINGSEQ_INDEX_SQL); | |
tx.executeSql('ALTER TABLE ' + BY_SEQ_STORE + | |
' ADD COLUMN deleted TINYINT(1) DEFAULT 0', [], function () { | |
tx.executeSql(BY_SEQ_STORE_DELETED_INDEX_SQL); | |
tx.executeSql('ALTER TABLE ' + DOC_STORE + | |
' ADD COLUMN local TINYINT(1) DEFAULT 0', [], function () { | |
tx.executeSql('CREATE INDEX IF NOT EXISTS \'doc-store-local-idx\' ON ' + | |
DOC_STORE + ' (local, id)'); | |
var sql = 'SELECT ' + DOC_STORE + '.winningseq AS seq, ' + DOC_STORE + | |
'.json AS metadata FROM ' + BY_SEQ_STORE + ' JOIN ' + DOC_STORE + | |
' ON ' + BY_SEQ_STORE + '.seq = ' + DOC_STORE + '.winningseq'; | |
tx.executeSql(sql, [], function (tx, result) { | |
var deleted = []; | |
var local = []; | |
for (var i = 0; i < result.rows.length; i++) { | |
var item = result.rows.item(i); | |
var seq = item.seq; | |
var metadata = JSON.parse(item.metadata); | |
if (utils.isDeleted(metadata)) { | |
deleted.push(seq); | |
} | |
if (utils.isLocalId(metadata.id)) { | |
local.push(metadata.id); | |
} | |
} | |
tx.executeSql('UPDATE ' + DOC_STORE + 'SET local = 1 WHERE id IN ' + | |
qMarks(local.length), local, function () { | |
tx.executeSql('UPDATE ' + BY_SEQ_STORE + | |
' SET deleted = 1 WHERE seq IN ' + | |
qMarks(deleted.length), deleted, callback); | |
}); | |
}); | |
}); | |
}); | |
} | |
// in this migration, we make all the local docs unversioned | |
function runMigration3(tx, callback) { | |
var local = 'CREATE TABLE IF NOT EXISTS ' + LOCAL_STORE + | |
' (id UNIQUE, rev, json)'; | |
tx.executeSql(local, [], function () { | |
var sql = 'SELECT ' + DOC_STORE + '.id AS id, ' + | |
BY_SEQ_STORE + '.json AS data ' + | |
'FROM ' + BY_SEQ_STORE + ' JOIN ' + | |
DOC_STORE + ' ON ' + BY_SEQ_STORE + '.seq = ' + | |
DOC_STORE + '.winningseq WHERE local = 1'; | |
tx.executeSql(sql, [], function (tx, res) { | |
var rows = []; | |
for (var i = 0; i < res.rows.length; i++) { | |
rows.push(res.rows.item(i)); | |
} | |
function doNext() { | |
if (!rows.length) { | |
return callback(tx); | |
} | |
var row = rows.shift(); | |
var rev = JSON.parse(row.data)._rev; | |
tx.executeSql('INSERT INTO ' + LOCAL_STORE + | |
' (id, rev, json) VALUES (?,?,?)', | |
[row.id, rev, row.data], function (tx) { | |
tx.executeSql('DELETE FROM ' + DOC_STORE + ' WHERE id=?', | |
[row.id], function (tx) { | |
tx.executeSql('DELETE FROM ' + BY_SEQ_STORE + ' WHERE seq=?', | |
[row.seq], function () { | |
doNext(); | |
}); | |
}); | |
}); | |
} | |
doNext(); | |
}); | |
}); | |
} | |
// in this migration, we remove doc_id_rev and just use rev | |
function runMigration4(tx, callback) { | |
function updateRows(rows) { | |
function doNext() { | |
if (!rows.length) { | |
return callback(tx); | |
} | |
var row = rows.shift(); | |
var doc_id_rev = parseHexString(row.hex, encoding); | |
var idx = doc_id_rev.lastIndexOf('::'); | |
var doc_id = doc_id_rev.substring(0, idx); | |
var rev = doc_id_rev.substring(idx + 2); | |
var sql = 'UPDATE ' + BY_SEQ_STORE + | |
' SET doc_id=?, rev=? WHERE doc_id_rev=?'; | |
tx.executeSql(sql, [doc_id, rev, doc_id_rev], function () { | |
doNext(); | |
}); | |
} | |
doNext(); | |
} | |
var sql = 'ALTER TABLE ' + BY_SEQ_STORE + ' ADD COLUMN doc_id'; | |
tx.executeSql(sql, [], function (tx) { | |
var sql = 'ALTER TABLE ' + BY_SEQ_STORE + ' ADD COLUMN rev'; | |
tx.executeSql(sql, [], function (tx) { | |
tx.executeSql(BY_SEQ_STORE_DOC_ID_REV_INDEX_SQL, [], function (tx) { | |
var sql = 'SELECT hex(doc_id_rev) as hex FROM ' + BY_SEQ_STORE; | |
tx.executeSql(sql, [], function (tx, res) { | |
var rows = []; | |
for (var i = 0; i < res.rows.length; i++) { | |
rows.push(res.rows.item(i)); | |
} | |
updateRows(rows); | |
}); | |
}); | |
}); | |
}); | |
} | |
// in this migration, we add the attach_and_seq table | |
// for issue #2818 | |
function runMigration5(tx, callback) { | |
function migrateAttsAndSeqs(tx) { | |
// need to actually populate the table. this is the expensive part, | |
// so as an optimization, check first that this database even | |
// contains attachments | |
var sql = 'SELECT COUNT(*) AS cnt FROM ' + ATTACH_STORE; | |
tx.executeSql(sql, [], function (tx, res) { | |
var count = res.rows.item(0).cnt; | |
if (!count) { | |
return callback(tx); | |
} | |
var offset = 0; | |
var pageSize = 10; | |
function nextPage() { | |
var sql = select( | |
SELECT_DOCS + ', ' + DOC_STORE + '.id AS id', | |
[DOC_STORE, BY_SEQ_STORE], | |
DOC_STORE_AND_BY_SEQ_JOINER, | |
null, | |
DOC_STORE + '.id ' | |
); | |
sql += ' LIMIT ' + pageSize + ' OFFSET ' + offset; | |
offset += pageSize; | |
tx.executeSql(sql, [], function (tx, res) { | |
if (!res.rows.length) { | |
return callback(tx); | |
} | |
var digestSeqs = {}; | |
function addDigestSeq(digest, seq) { | |
// uniq digest/seq pairs, just in case there are dups | |
var seqs = digestSeqs[digest] = (digestSeqs[digest] || []); | |
if (seqs.indexOf(seq) === -1) { | |
seqs.push(seq); | |
} | |
} | |
for (var i = 0; i < res.rows.length; i++) { | |
var row = res.rows.item(i); | |
var doc = unstringifyDoc(row.data, row.id, row.rev); | |
var atts = Object.keys(doc._attachments || {}); | |
for (var j = 0; j < atts.length; j++) { | |
var att = doc._attachments[atts[j]]; | |
addDigestSeq(att.digest, row.seq); | |
} | |
} | |
var digestSeqPairs = []; | |
Object.keys(digestSeqs).forEach(function (digest) { | |
var seqs = digestSeqs[digest]; | |
seqs.forEach(function (seq) { | |
digestSeqPairs.push([digest, seq]); | |
}); | |
}); | |
if (!digestSeqPairs.length) { | |
return nextPage(); | |
} | |
var numDone = 0; | |
digestSeqPairs.forEach(function (pair) { | |
var sql = 'INSERT INTO ' + ATTACH_AND_SEQ_STORE + | |
' (digest, seq) VALUES (?,?)'; | |
tx.executeSql(sql, pair, function () { | |
if (++numDone === digestSeqPairs.length) { | |
nextPage(); | |
} | |
}); | |
}); | |
}); | |
} | |
nextPage(); | |
}); | |
} | |
var attachAndRev = 'CREATE TABLE IF NOT EXISTS ' + | |
ATTACH_AND_SEQ_STORE + ' (digest, seq INTEGER)'; | |
tx.executeSql(attachAndRev, [], function (tx) { | |
tx.executeSql( | |
ATTACH_AND_SEQ_STORE_ATTACH_INDEX_SQL, [], function (tx) { | |
tx.executeSql( | |
ATTACH_AND_SEQ_STORE_SEQ_INDEX_SQL, [], | |
migrateAttsAndSeqs); | |
}); | |
}); | |
} | |
// in this migration, we use escapeBlob() and unescapeBlob() | |
// instead of reading out the binary as HEX, which is slow | |
function runMigration6(tx, callback) { | |
var sql = 'ALTER TABLE ' + ATTACH_STORE + | |
' ADD COLUMN escaped TINYINT(1) DEFAULT 0'; | |
tx.executeSql(sql, [], callback); | |
} | |
// issue #3136, in this migration we need a "latest seq" as well | |
// as the "winning seq" in the doc store | |
function runMigration7(tx, callback) { | |
var sql = 'ALTER TABLE ' + DOC_STORE + | |
' ADD COLUMN max_seq INTEGER'; | |
tx.executeSql(sql, [], function (tx) { | |
var sql = 'UPDATE ' + DOC_STORE + ' SET max_seq=(SELECT MAX(seq) FROM ' + | |
BY_SEQ_STORE + ' WHERE doc_id=id)'; | |
tx.executeSql(sql, [], function (tx) { | |
// add unique index after filling, else we'll get a constraint | |
// error when we do the ALTER TABLE | |
var sql = | |
'CREATE UNIQUE INDEX IF NOT EXISTS \'doc-max-seq-idx\' ON ' + | |
DOC_STORE + ' (max_seq)'; | |
tx.executeSql(sql, [], callback); | |
}); | |
}); | |
} | |
function checkEncoding(tx, cb) { | |
// UTF-8 on chrome/android, UTF-16 on safari < 7.1 | |
tx.executeSql('SELECT HEX("a") AS hex', [], function (tx, res) { | |
var hex = res.rows.item(0).hex; | |
encoding = hex.length === 2 ? 'UTF-8' : 'UTF-16'; | |
cb(); | |
} | |
); | |
} | |
function onGetInstanceId() { | |
while (idRequests.length > 0) { | |
var idCallback = idRequests.pop(); | |
idCallback(null, instanceId); | |
} | |
} | |
function onGetVersion(tx, dbVersion) { | |
if (dbVersion === 0) { | |
// initial schema | |
var meta = 'CREATE TABLE IF NOT EXISTS ' + META_STORE + | |
' (dbid, db_version INTEGER)'; | |
var attach = 'CREATE TABLE IF NOT EXISTS ' + ATTACH_STORE + | |
' (digest UNIQUE, escaped TINYINT(1), body BLOB)'; | |
var attachAndRev = 'CREATE TABLE IF NOT EXISTS ' + | |
ATTACH_AND_SEQ_STORE + ' (digest, seq INTEGER)'; | |
// TODO: migrate winningseq to INTEGER | |
var doc = 'CREATE TABLE IF NOT EXISTS ' + DOC_STORE + | |
' (id unique, json, winningseq, max_seq INTEGER UNIQUE)'; | |
var seq = 'CREATE TABLE IF NOT EXISTS ' + BY_SEQ_STORE + | |
' (seq INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, ' + | |
'json, deleted TINYINT(1), doc_id, rev)'; | |
var local = 'CREATE TABLE IF NOT EXISTS ' + LOCAL_STORE + | |
' (id UNIQUE, rev, json)'; | |
// creates | |
tx.executeSql(attach); | |
tx.executeSql(local); | |
tx.executeSql(attachAndRev, [], function () { | |
tx.executeSql(ATTACH_AND_SEQ_STORE_SEQ_INDEX_SQL); | |
tx.executeSql(ATTACH_AND_SEQ_STORE_ATTACH_INDEX_SQL); | |
}); | |
tx.executeSql(doc, [], function () { | |
tx.executeSql(DOC_STORE_WINNINGSEQ_INDEX_SQL); | |
tx.executeSql(seq, [], function () { | |
tx.executeSql(BY_SEQ_STORE_DELETED_INDEX_SQL); | |
tx.executeSql(BY_SEQ_STORE_DOC_ID_REV_INDEX_SQL); | |
tx.executeSql(meta, [], function () { | |
// mark the db version, and new dbid | |
var initSeq = 'INSERT INTO ' + META_STORE + | |
' (db_version, dbid) VALUES (?,?)'; | |
instanceId = utils.uuid(); | |
var initSeqArgs = [ADAPTER_VERSION, instanceId]; | |
tx.executeSql(initSeq, initSeqArgs, function () { | |
onGetInstanceId(); | |
}); | |
}); | |
}); | |
}); | |
} else { // version > 0 | |
var setupDone = function () { | |
var migrated = dbVersion < ADAPTER_VERSION; | |
if (migrated) { | |
// update the db version within this transaction | |
tx.executeSql('UPDATE ' + META_STORE + ' SET db_version = ' + | |
ADAPTER_VERSION); | |
} | |
// notify db.id() callers | |
var sql = 'SELECT dbid FROM ' + META_STORE; | |
tx.executeSql(sql, [], function (tx, result) { | |
instanceId = result.rows.item(0).dbid; | |
onGetInstanceId(); | |
}); | |
}; | |
// would love to use promises here, but then websql | |
// ends the transaction early | |
var tasks = [ | |
runMigration2, | |
runMigration3, | |
runMigration4, | |
runMigration5, | |
runMigration6, | |
runMigration7, | |
setupDone | |
]; | |
// run each migration sequentially | |
var i = dbVersion; | |
var nextMigration = function (tx) { | |
tasks[i - 1](tx, nextMigration); | |
i++; | |
}; | |
nextMigration(tx); | |
} | |
} | |
function setup() { | |
db.transaction(function (tx) { | |
// first check the encoding | |
checkEncoding(tx, function () { | |
// then get the version | |
fetchVersion(tx); | |
}); | |
}, unknownError(callback), dbCreated); | |
} | |
function fetchVersion(tx) { | |
var sql = 'SELECT sql FROM sqlite_master WHERE tbl_name = ' + META_STORE; | |
tx.executeSql(sql, [], function (tx, result) { | |
if (!result.rows.length) { | |
// database hasn't even been created yet (version 0) | |
onGetVersion(tx, 0); | |
} else if (!/db_version/.test(result.rows.item(0).sql)) { | |
// table was created, but without the new db_version column, | |
// so add it. | |
tx.executeSql('ALTER TABLE ' + META_STORE + | |
' ADD COLUMN db_version INTEGER', [], function () { | |
// before version 2, this column didn't even exist | |
onGetVersion(tx, 1); | |
}); | |
} else { // column exists, we can safely get it | |
tx.executeSql('SELECT db_version FROM ' + META_STORE, | |
[], function (tx, result) { | |
var dbVersion = result.rows.item(0).db_version; | |
onGetVersion(tx, dbVersion); | |
}); | |
} | |
}); | |
} | |
if (utils.isCordova()) { | |
//to wait until custom api is made in pouch.adapters before doing setup | |
window.addEventListener(api._name + '_pouch', function cordova_init() { | |
window.removeEventListener(api._name + '_pouch', cordova_init, false); | |
setup(); | |
}, false); | |
} else { | |
setup(); | |
} | |
api.type = function () { | |
return 'websql'; | |
}; | |
api._id = utils.toPromise(function (callback) { | |
callback(null, instanceId); | |
}); | |
api._info = function (callback) { | |
db.readTransaction(function (tx) { | |
countDocs(tx, function (docCount) { | |
var sql = 'SELECT MAX(seq) AS seq FROM ' + BY_SEQ_STORE; | |
tx.executeSql(sql, [], function (tx, res) { | |
var updateSeq = res.rows.item(0).seq || 0; | |
callback(null, { | |
doc_count: docCount, | |
update_seq: updateSeq, | |
// for debugging | |
sqlite_plugin: db._sqlitePlugin, | |
websql_encoding: encoding | |
}); | |
}); | |
}); | |
}, unknownError(callback)); | |
}; | |
api._bulkDocs = function (req, opts, callback) { | |
websqlBulkDocs(req, opts, api, db, WebSqlPouch.Changes, callback); | |
}; | |
api._get = function (id, opts, callback) { | |
opts = utils.clone(opts); | |
var doc; | |
var metadata; | |
var err; | |
if (!opts.ctx) { | |
db.readTransaction(function (txn) { | |
opts.ctx = txn; | |
api._get(id, opts, callback); | |
}); | |
return; | |
} | |
var tx = opts.ctx; | |
function finish() { | |
callback(err, {doc: doc, metadata: metadata, ctx: tx}); | |
} | |
var sql; | |
var sqlArgs; | |
if (opts.rev) { | |
sql = select( | |
SELECT_DOCS, | |
[DOC_STORE, BY_SEQ_STORE], | |
DOC_STORE + '.id=' + BY_SEQ_STORE + '.doc_id', | |
[BY_SEQ_STORE + '.doc_id=?', BY_SEQ_STORE + '.rev=?']); | |
sqlArgs = [id, opts.rev]; | |
} else { | |
sql = select( | |
SELECT_DOCS, | |
[DOC_STORE, BY_SEQ_STORE], | |
DOC_STORE_AND_BY_SEQ_JOINER, | |
DOC_STORE + '.id=?'); | |
sqlArgs = [id]; | |
} | |
tx.executeSql(sql, sqlArgs, function (a, results) { | |
if (!results.rows.length) { | |
err = errors.error(errors.MISSING_DOC, 'missing'); | |
return finish(); | |
} | |
var item = results.rows.item(0); | |
metadata = utils.safeJsonParse(item.metadata); | |
if (item.deleted && !opts.rev) { | |
err = errors.error(errors.MISSING_DOC, 'deleted'); | |
return finish(); | |
} | |
doc = unstringifyDoc(item.data, metadata.id, item.rev); | |
finish(); | |
}); | |
}; | |
function countDocs(tx, callback) { | |
if (api._docCount !== -1) { | |
return callback(api._docCount); | |
} | |
// count the total rows | |
var sql = select( | |
'COUNT(' + DOC_STORE + '.id) AS \'num\'', | |
[DOC_STORE, BY_SEQ_STORE], | |
DOC_STORE_AND_BY_SEQ_JOINER, | |
BY_SEQ_STORE + '.deleted=0'); | |
tx.executeSql(sql, [], function (tx, result) { | |
api._docCount = result.rows.item(0).num; | |
callback(api._docCount); | |
}); | |
} | |
api._allDocs = function (opts, callback) { | |
var results = []; | |
var totalRows; | |
var start = 'startkey' in opts ? opts.startkey : false; | |
var end = 'endkey' in opts ? opts.endkey : false; | |
var key = 'key' in opts ? opts.key : false; | |
var descending = 'descending' in opts ? opts.descending : false; | |
var limit = 'limit' in opts ? opts.limit : -1; | |
var offset = 'skip' in opts ? opts.skip : 0; | |
var inclusiveEnd = opts.inclusive_end !== false; | |
var sqlArgs = []; | |
var criteria = []; | |
if (key !== false) { | |
criteria.push(DOC_STORE + '.id = ?'); | |
sqlArgs.push(key); | |
} else if (start !== false || end !== false) { | |
if (start !== false) { | |
criteria.push(DOC_STORE + '.id ' + (descending ? '<=' : '>=') + ' ?'); | |
sqlArgs.push(start); | |
} | |
if (end !== false) { | |
var comparator = descending ? '>' : '<'; | |
if (inclusiveEnd) { | |
comparator += '='; | |
} | |
criteria.push(DOC_STORE + '.id ' + comparator + ' ?'); | |
sqlArgs.push(end); | |
} | |
if (key !== false) { | |
criteria.push(DOC_STORE + '.id = ?'); | |
sqlArgs.push(key); | |
} | |
} | |
if (opts.deleted !== 'ok') { | |
// report deleted if keys are specified | |
criteria.push(BY_SEQ_STORE + '.deleted = 0'); | |
} | |
db.readTransaction(function (tx) { | |
// first count up the total rows | |
countDocs(tx, function (count) { | |
totalRows = count; | |
if (limit === 0) { | |
return; | |
} | |
// then actually fetch the documents | |
var sql = select( | |
SELECT_DOCS, | |
[DOC_STORE, BY_SEQ_STORE], | |
DOC_STORE_AND_BY_SEQ_JOINER, | |
criteria, | |
DOC_STORE + '.id ' + (descending ? 'DESC' : 'ASC') | |
); | |
sql += ' LIMIT ' + limit + ' OFFSET ' + offset; | |
tx.executeSql(sql, sqlArgs, function (tx, result) { | |
for (var i = 0, l = result.rows.length; i < l; i++) { | |
var item = result.rows.item(i); | |
var metadata = utils.safeJsonParse(item.metadata); | |
var id = metadata.id; | |
var data = unstringifyDoc(item.data, id, item.rev); | |
var winningRev = data._rev; | |
var doc = { | |
id: id, | |
key: id, | |
value: {rev: winningRev} | |
}; | |
if (opts.include_docs) { | |
doc.doc = data; | |
doc.doc._rev = winningRev; | |
if (opts.conflicts) { | |
doc.doc._conflicts = merge.collectConflicts(metadata); | |
} | |
fetchAttachmentsIfNecessary(doc.doc, opts, api, tx); | |
} | |
if (item.deleted) { | |
if (opts.deleted === 'ok') { | |
doc.value.deleted = true; | |
doc.doc = null; | |
} else { | |
continue; | |
} | |
} | |
results.push(doc); | |
} | |
}); | |
}); | |
}, unknownError(callback), function () { | |
callback(null, { | |
total_rows: totalRows, | |
offset: opts.skip, | |
rows: results | |
}); | |
}); | |
}; | |
api._changes = function (opts) { | |
opts = utils.clone(opts); | |
if (opts.continuous) { | |
var id = api._name + ':' + utils.uuid(); | |
WebSqlPouch.Changes.addListener(api._name, id, api, opts); | |
WebSqlPouch.Changes.notify(api._name); | |
return { | |
cancel: function () { | |
WebSqlPouch.Changes.removeListener(api._name, id); | |
} | |
}; | |
} | |
var descending = opts.descending; | |
// Ignore the `since` parameter when `descending` is true | |
opts.since = opts.since && !descending ? opts.since : 0; | |
var limit = 'limit' in opts ? opts.limit : -1; | |
if (limit === 0) { | |
limit = 1; // per CouchDB _changes spec | |
} | |
var returnDocs; | |
if ('returnDocs' in opts) { | |
returnDocs = opts.returnDocs; | |
} else { | |
returnDocs = true; | |
} | |
var results = []; | |
var numResults = 0; | |
function fetchChanges() { | |
var selectStmt = | |
DOC_STORE + '.json AS metadata, ' + | |
DOC_STORE + '.max_seq AS maxSeq, ' + | |
BY_SEQ_STORE + '.json AS winningDoc, ' + | |
BY_SEQ_STORE + '.rev AS winningRev '; | |
var from = DOC_STORE + ' JOIN ' + BY_SEQ_STORE; | |
var joiner = DOC_STORE + '.id=' + BY_SEQ_STORE + '.doc_id' + | |
' AND ' + DOC_STORE + '.winningseq=' + BY_SEQ_STORE + '.seq'; | |
var criteria = ['maxSeq > ?']; | |
var sqlArgs = [opts.since]; | |
if (opts.doc_ids) { | |
criteria.push(DOC_STORE + '.id IN ' + qMarks(opts.doc_ids.length)); | |
sqlArgs = sqlArgs.concat(opts.doc_ids); | |
} | |
var orderBy = 'maxSeq ' + (descending ? 'DESC' : 'ASC'); | |
var sql = select(selectStmt, from, joiner, criteria, orderBy); | |
var filter = utils.filterChange(opts); | |
if (!opts.view && !opts.filter) { | |
// we can just limit in the query | |
sql += ' LIMIT ' + limit; | |
} | |
var lastSeq = opts.since || 0; | |
db.readTransaction(function (tx) { | |
tx.executeSql(sql, sqlArgs, function (tx, result) { | |
function reportChange(change) { | |
return function () { | |
opts.onChange(change); | |
}; | |
} | |
for (var i = 0, l = result.rows.length; i < l; i++) { | |
var item = result.rows.item(i); | |
var metadata = utils.safeJsonParse(item.metadata); | |
lastSeq = item.maxSeq; | |
var doc = unstringifyDoc(item.winningDoc, metadata.id, | |
item.winningRev); | |
var change = opts.processChange(doc, metadata, opts); | |
change.seq = item.maxSeq; | |
if (filter(change)) { | |
numResults++; | |
if (returnDocs) { | |
results.push(change); | |
} | |
// process the attachment immediately | |
// for the benefit of live listeners | |
if (opts.attachments && opts.include_docs) { | |
fetchAttachmentsIfNecessary(doc, opts, api, tx, | |
reportChange(change)); | |
} else { | |
reportChange(change)(); | |
} | |
} | |
if (numResults === limit) { | |
break; | |
} | |
} | |
}); | |
}, unknownError(opts.complete), function () { | |
if (!opts.continuous) { | |
opts.complete(null, { | |
results: results, | |
last_seq: lastSeq | |
}); | |
} | |
}); | |
} | |
fetchChanges(); | |
}; | |
api._close = function (callback) { | |
//WebSQL databases do not need to be closed | |
callback(); | |
}; | |
api._getAttachment = function (attachment, opts, callback) { | |
var res; | |
var tx = opts.ctx; | |
var digest = attachment.digest; | |
var type = attachment.content_type; | |
var sql = 'SELECT escaped, ' + | |
'CASE WHEN escaped = 1 THEN body ELSE HEX(body) END AS body FROM ' + | |
ATTACH_STORE + ' WHERE digest=?'; | |
tx.executeSql(sql, [digest], function (tx, result) { | |
// websql has a bug where \u0000 causes early truncation in strings | |
// and blobs. to work around this, we used to use the hex() function, | |
// but that's not performant. after migration 6, we remove \u0000 | |
// and add it back in afterwards | |
var item = result.rows.item(0); | |
var data = item.escaped ? websqlUtils.unescapeBlob(item.body) : | |
parseHexString(item.body, encoding); | |
if (opts.encode) { | |
res = btoa(data); | |
} else { | |
data = utils.fixBinary(data); | |
res = utils.createBlob([data], {type: type}); | |
} | |
callback(null, res); | |
}); | |
}; | |
api._getRevisionTree = function (docId, callback) { | |
db.readTransaction(function (tx) { | |
var sql = 'SELECT json AS metadata FROM ' + DOC_STORE + ' WHERE id = ?'; | |
tx.executeSql(sql, [docId], function (tx, result) { | |
if (!result.rows.length) { | |
callback(errors.error(errors.MISSING_DOC)); | |
} else { | |
var data = utils.safeJsonParse(result.rows.item(0).metadata); | |
callback(null, data.rev_tree); | |
} | |
}); | |
}); | |
}; | |
api._doCompaction = function (docId, revs, callback) { | |
if (!revs.length) { | |
return callback(); | |
} | |
db.transaction(function (tx) { | |
// update doc store | |
var sql = 'SELECT json AS metadata FROM ' + DOC_STORE + ' WHERE id = ?'; | |
tx.executeSql(sql, [docId], function (tx, result) { | |
var metadata = utils.safeJsonParse(result.rows.item(0).metadata); | |
merge.traverseRevTree(metadata.rev_tree, function (isLeaf, pos, | |
revHash, ctx, opts) { | |
var rev = pos + '-' + revHash; | |
if (revs.indexOf(rev) !== -1) { | |
opts.status = 'missing'; | |
} | |
}); | |
var sql = 'UPDATE ' + DOC_STORE + ' SET json = ? WHERE id = ?'; | |
tx.executeSql(sql, [utils.safeJsonStringify(metadata), docId]); | |
}); | |
compactRevs(revs, docId, tx); | |
}, unknownError(callback), function () { | |
callback(); | |
}); | |
}; | |
api._getLocal = function (id, callback) { | |
db.readTransaction(function (tx) { | |
var sql = 'SELECT json, rev FROM ' + LOCAL_STORE + ' WHERE id=?'; | |
tx.executeSql(sql, [id], function (tx, res) { | |
if (res.rows.length) { | |
var item = res.rows.item(0); | |
var doc = unstringifyDoc(item.json, id, item.rev); | |
callback(null, doc); | |
} else { | |
callback(errors.error(errors.MISSING_DOC)); | |
} | |
}); | |
}); | |
}; | |
api._putLocal = function (doc, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
delete doc._revisions; // ignore this, trust the rev | |
var oldRev = doc._rev; | |
var id = doc._id; | |
var newRev; | |
if (!oldRev) { | |
newRev = doc._rev = '0-1'; | |
} else { | |
newRev = doc._rev = '0-' + (parseInt(oldRev.split('-')[1], 10) + 1); | |
} | |
var json = stringifyDoc(doc); | |
var ret; | |
function putLocal(tx) { | |
var sql; | |
var values; | |
if (oldRev) { | |
sql = 'UPDATE ' + LOCAL_STORE + ' SET rev=?, json=? ' + | |
'WHERE id=? AND rev=?'; | |
values = [newRev, json, id, oldRev]; | |
} else { | |
sql = 'INSERT INTO ' + LOCAL_STORE + ' (id, rev, json) VALUES (?,?,?)'; | |
values = [id, newRev, json]; | |
} | |
tx.executeSql(sql, values, function (tx, res) { | |
if (res.rowsAffected) { | |
ret = {ok: true, id: id, rev: newRev}; | |
if (opts.ctx) { // return immediately | |
callback(null, ret); | |
} | |
} else { | |
callback(errors.error(errors.REV_CONFLICT)); | |
} | |
}, function () { | |
callback(errors.error(errors.REV_CONFLICT)); | |
return false; // ack that we handled the error | |
}); | |
} | |
if (opts.ctx) { | |
putLocal(opts.ctx); | |
} else { | |
db.transaction(function (tx) { | |
putLocal(tx); | |
}, unknownError(callback), function () { | |
if (ret) { | |
callback(null, ret); | |
} | |
}); | |
} | |
}; | |
api._removeLocal = function (doc, callback) { | |
var ret; | |
db.transaction(function (tx) { | |
var sql = 'DELETE FROM ' + LOCAL_STORE + ' WHERE id=? AND rev=?'; | |
var params = [doc._id, doc._rev]; | |
tx.executeSql(sql, params, function (tx, res) { | |
if (!res.rowsAffected) { | |
return callback(errors.error(errors.MISSING_DOC)); | |
} | |
ret = {ok: true, id: doc._id, rev: '0-0'}; | |
}); | |
}, unknownError(callback), function () { | |
if (ret) { | |
callback(null, ret); | |
} | |
}); | |
}; | |
api._destroy = function (callback) { | |
WebSqlPouch.Changes.removeAllListeners(api._name); | |
db.transaction(function (tx) { | |
var stores = [DOC_STORE, BY_SEQ_STORE, ATTACH_STORE, META_STORE, | |
LOCAL_STORE, ATTACH_AND_SEQ_STORE]; | |
stores.forEach(function (store) { | |
tx.executeSql('DROP TABLE IF EXISTS ' + store, []); | |
}); | |
}, unknownError(callback), function () { | |
if (utils.hasLocalStorage()) { | |
delete window.localStorage['_pouch__websqldb_' + api._name]; | |
delete window.localStorage[api._name]; | |
} | |
callback(null, {'ok': true}); | |
}); | |
}; | |
} | |
WebSqlPouch.valid = websqlUtils.valid; | |
WebSqlPouch.Changes = new utils.Changes(); | |
module.exports = WebSqlPouch; | |
},{"../../deps/errors":101,"../../deps/parse-hex":105,"../../merge":114,"../../utils":119,"./websql-bulk-docs":91,"./websql-constants":92,"./websql-utils":93}],95:[function(require,module,exports){ | |
'use strict'; | |
var utils = require('./utils'); | |
var merge = require('./merge'); | |
var errors = require('./deps/errors'); | |
var EE = require('events').EventEmitter; | |
var evalFilter = require('./evalFilter'); | |
var evalView = require('./evalView'); | |
module.exports = Changes; | |
utils.inherits(Changes, EE); | |
function Changes(db, opts, callback) { | |
EE.call(this); | |
var self = this; | |
this.db = db; | |
opts = opts ? utils.clone(opts) : {}; | |
var oldComplete = callback || opts.complete || function () {}; | |
var complete = opts.complete = utils.once(function (err, resp) { | |
if (err) { | |
self.emit('error', err); | |
} else { | |
self.emit('complete', resp); | |
} | |
self.removeAllListeners(); | |
db.removeListener('destroyed', onDestroy); | |
}); | |
if (oldComplete) { | |
self.on('complete', function (resp) { | |
oldComplete(null, resp); | |
}); | |
self.on('error', function (err) { | |
oldComplete(err); | |
}); | |
} | |
var oldOnChange = opts.onChange; | |
if (oldOnChange) { | |
self.on('change', oldOnChange); | |
} | |
function onDestroy() { | |
self.cancel(); | |
} | |
db.once('destroyed', onDestroy); | |
opts.onChange = function (change) { | |
if (opts.isCancelled) { | |
return; | |
} | |
self.emit('change', change); | |
if (self.startSeq && self.startSeq <= change.seq) { | |
self.emit('uptodate'); | |
self.startSeq = false; | |
} | |
if (change.deleted) { | |
self.emit('delete', change); | |
} else if (change.changes.length === 1 && | |
change.changes[0].rev.slice(0, 2) === '1-') { | |
self.emit('create', change); | |
} else { | |
self.emit('update', change); | |
} | |
}; | |
var promise = new utils.Promise(function (fulfill, reject) { | |
opts.complete = function (err, res) { | |
if (err) { | |
reject(err); | |
} else { | |
fulfill(res); | |
} | |
}; | |
}); | |
self.once('cancel', function () { | |
if (oldOnChange) { | |
self.removeListener('change', oldOnChange); | |
} | |
opts.complete(null, {status: 'cancelled'}); | |
}); | |
this.then = promise.then.bind(promise); | |
this['catch'] = promise['catch'].bind(promise); | |
this.then(function (result) { | |
complete(null, result); | |
}, complete); | |
if (!db.taskqueue.isReady) { | |
db.taskqueue.addTask(function () { | |
if (self.isCancelled) { | |
self.emit('cancel'); | |
} else { | |
self.doChanges(opts); | |
} | |
}); | |
} else { | |
self.doChanges(opts); | |
} | |
} | |
Changes.prototype.cancel = function () { | |
this.isCancelled = true; | |
if (this.db.taskqueue.isReady) { | |
this.emit('cancel'); | |
} | |
}; | |
function processChange(doc, metadata, opts) { | |
var changeList = [{rev: doc._rev}]; | |
if (opts.style === 'all_docs') { | |
changeList = merge.collectLeaves(metadata.rev_tree) | |
.map(function (x) { return {rev: x.rev}; }); | |
} | |
var change = { | |
id: metadata.id, | |
changes: changeList, | |
doc: doc | |
}; | |
if (utils.isDeleted(metadata, doc._rev)) { | |
change.deleted = true; | |
} | |
if (opts.conflicts) { | |
change.doc._conflicts = merge.collectConflicts(metadata); | |
if (!change.doc._conflicts.length) { | |
delete change.doc._conflicts; | |
} | |
} | |
return change; | |
} | |
Changes.prototype.doChanges = function (opts) { | |
var self = this; | |
var callback = opts.complete; | |
opts = utils.clone(opts); | |
if ('live' in opts && !('continuous' in opts)) { | |
opts.continuous = opts.live; | |
} | |
opts.processChange = processChange; | |
if (opts.since === 'latest') { | |
opts.since = 'now'; | |
} | |
if (!opts.since) { | |
opts.since = 0; | |
} | |
if (opts.since === 'now') { | |
this.db.info().then(function (info) { | |
if (self.isCancelled) { | |
callback(null, {status: 'cancelled'}); | |
return; | |
} | |
opts.since = info.update_seq; | |
self.doChanges(opts); | |
}, callback); | |
return; | |
} | |
if (opts.continuous && opts.since !== 'now') { | |
this.db.info().then(function (info) { | |
self.startSeq = info.update_seq; | |
}, function (err) { | |
if (err.id === 'idbNull') { | |
//db closed before this returned | |
//thats ok | |
return; | |
} | |
throw err; | |
}); | |
} | |
if (this.db.type() !== 'http' && | |
opts.filter && typeof opts.filter === 'string' && | |
!opts.doc_ids) { | |
return this.filterChanges(opts); | |
} | |
if (!('descending' in opts)) { | |
opts.descending = false; | |
} | |
// 0 and 1 should return 1 document | |
opts.limit = opts.limit === 0 ? 1 : opts.limit; | |
opts.complete = callback; | |
var newPromise = this.db._changes(opts); | |
if (newPromise && typeof newPromise.cancel === 'function') { | |
var cancel = self.cancel; | |
self.cancel = utils.getArguments(function (args) { | |
newPromise.cancel(); | |
cancel.apply(this, args); | |
}); | |
} | |
}; | |
Changes.prototype.filterChanges = function (opts) { | |
var self = this; | |
var callback = opts.complete; | |
if (opts.filter === '_view') { | |
if (!opts.view || typeof opts.view !== 'string') { | |
var err = errors.error(errors.BAD_REQUEST, | |
'`view` filter parameter is not provided.'); | |
callback(err); | |
return; | |
} | |
// fetch a view from a design doc, make it behave like a filter | |
var viewName = opts.view.split('/'); | |
this.db.get('_design/' + viewName[0], function (err, ddoc) { | |
if (self.isCancelled) { | |
callback(null, {status: 'cancelled'}); | |
return; | |
} | |
if (err) { | |
callback(errors.generateErrorFromResponse(err)); | |
return; | |
} | |
if (ddoc && ddoc.views && ddoc.views[viewName[1]]) { | |
var filter = evalView(ddoc.views[viewName[1]].map); | |
opts.filter = filter; | |
self.doChanges(opts); | |
return; | |
} | |
var msg = ddoc.views ? 'missing json key: ' + viewName[1] : | |
'missing json key: views'; | |
if (!err) { | |
err = errors.error(errors.MISSING_DOC, msg); | |
} | |
callback(err); | |
return; | |
}); | |
} else { | |
// fetch a filter from a design doc | |
var filterName = opts.filter.split('/'); | |
this.db.get('_design/' + filterName[0], function (err, ddoc) { | |
if (self.isCancelled) { | |
callback(null, {status: 'cancelled'}); | |
return; | |
} | |
if (err) { | |
callback(errors.generateErrorFromResponse(err)); | |
return; | |
} | |
if (ddoc && ddoc.filters && ddoc.filters[filterName[1]]) { | |
var filter = evalFilter(ddoc.filters[filterName[1]]); | |
opts.filter = filter; | |
self.doChanges(opts); | |
return; | |
} else { | |
var msg = (ddoc && ddoc.filters) ? 'missing json key: ' + filterName[1] | |
: 'missing json key: filters'; | |
if (!err) { | |
err = errors.error(errors.MISSING_DOC, msg); | |
} | |
callback(err); | |
return; | |
} | |
}); | |
} | |
}; | |
},{"./deps/errors":101,"./evalFilter":111,"./evalView":112,"./merge":114,"./utils":119,"events":80}],96:[function(require,module,exports){ | |
'use strict'; | |
var Promise = require('./deps/promise'); | |
var explain404 = require('./deps/explain404'); | |
var pouchCollate = require('pouchdb-collate'); | |
var collate = pouchCollate.collate; | |
function updateCheckpoint(db, id, checkpoint, returnValue) { | |
return db.get(id).catch(function (err) { | |
if (err.status === 404) { | |
if (db.type() === 'http') { | |
explain404( | |
'PouchDB is just checking if a remote checkpoint exists.'); | |
} | |
return {_id: id}; | |
} | |
throw err; | |
}).then(function (doc) { | |
if (returnValue.cancelled) { | |
return; | |
} | |
doc.last_seq = checkpoint; | |
return db.put(doc).catch(function (err) { | |
if (err.status === 409) { | |
// retry; someone is trying to write a checkpoint simultaneously | |
return updateCheckpoint(db, id, checkpoint, returnValue); | |
} | |
throw err; | |
}); | |
}); | |
} | |
function Checkpointer(src, target, id, returnValue) { | |
this.src = src; | |
this.target = target; | |
this.id = id; | |
this.returnValue = returnValue; | |
} | |
Checkpointer.prototype.writeCheckpoint = function (checkpoint) { | |
var self = this; | |
return this.updateTarget(checkpoint).then(function () { | |
return self.updateSource(checkpoint); | |
}); | |
}; | |
Checkpointer.prototype.updateTarget = function (checkpoint) { | |
return updateCheckpoint(this.target, this.id, checkpoint, this.returnValue); | |
}; | |
Checkpointer.prototype.updateSource = function (checkpoint) { | |
var self = this; | |
if (this.readOnlySource) { | |
return Promise.resolve(true); | |
} | |
return updateCheckpoint(this.src, this.id, checkpoint, this.returnValue) | |
.catch(function (err) { | |
var isForbidden = typeof err.status === 'number' && | |
Math.floor(err.status / 100) === 4; | |
if (isForbidden) { | |
self.readOnlySource = true; | |
return true; | |
} | |
throw err; | |
}); | |
}; | |
Checkpointer.prototype.getCheckpoint = function () { | |
var self = this; | |
return self.target.get(self.id).then(function (targetDoc) { | |
return self.src.get(self.id).then(function (sourceDoc) { | |
if (collate(targetDoc.last_seq, sourceDoc.last_seq) === 0) { | |
return sourceDoc.last_seq; | |
} | |
return 0; | |
}, function (err) { | |
if (err.status === 404 && targetDoc.last_seq) { | |
return self.src.put({ | |
_id: self.id, | |
last_seq: 0 | |
}).then(function () { | |
return 0; | |
}, function (err) { | |
if (err.status === 401) { | |
self.readOnlySource = true; | |
return targetDoc.last_seq; | |
} | |
return 0; | |
}); | |
} | |
throw err; | |
}); | |
}).catch(function (err) { | |
if (err.status !== 404) { | |
throw err; | |
} | |
return 0; | |
}); | |
}; | |
module.exports = Checkpointer; | |
},{"./deps/explain404":102,"./deps/promise":107,"pouchdb-collate":144}],97:[function(require,module,exports){ | |
(function (process,global){ | |
/*globals cordova */ | |
"use strict"; | |
var Adapter = require('./adapter'); | |
var utils = require('./utils'); | |
var TaskQueue = require('./taskqueue'); | |
var Promise = utils.Promise; | |
function defaultCallback(err) { | |
if (err && global.debug) { | |
console.error(err); | |
} | |
} | |
utils.inherits(PouchDB, Adapter); | |
function PouchDB(name, opts, callback) { | |
if (!(this instanceof PouchDB)) { | |
return new PouchDB(name, opts, callback); | |
} | |
var self = this; | |
if (typeof opts === 'function' || typeof opts === 'undefined') { | |
callback = opts; | |
opts = {}; | |
} | |
if (name && typeof name === 'object') { | |
opts = name; | |
name = undefined; | |
} | |
if (typeof callback === 'undefined') { | |
callback = defaultCallback; | |
} | |
name = name || opts.name; | |
opts = opts ? utils.clone(opts) : {}; | |
// if name was specified via opts, ignore for the sake of dependentDbs | |
delete opts.name; | |
this.__opts = opts; | |
var oldCB = callback; | |
self.auto_compaction = opts.auto_compaction; | |
self.prefix = PouchDB.prefix; | |
Adapter.call(self); | |
self.taskqueue = new TaskQueue(); | |
var promise = new Promise(function (fulfill, reject) { | |
callback = function (err, resp) { | |
if (err) { | |
return reject(err); | |
} | |
delete resp.then; | |
fulfill(resp); | |
}; | |
opts = utils.clone(opts); | |
var originalName = opts.name || name; | |
var backend, error; | |
(function () { | |
try { | |
if (typeof originalName !== 'string') { | |
error = new Error('Missing/invalid DB name'); | |
error.code = 400; | |
throw error; | |
} | |
backend = PouchDB.parseAdapter(originalName, opts); | |
opts.originalName = originalName; | |
opts.name = backend.name; | |
if (opts.prefix && backend.adapter !== 'http' && | |
backend.adapter !== 'https') { | |
opts.name = opts.prefix + opts.name; | |
} | |
opts.adapter = opts.adapter || backend.adapter; | |
self._adapter = opts.adapter; | |
self._db_name = originalName; | |
if (!PouchDB.adapters[opts.adapter]) { | |
error = new Error('Adapter is missing'); | |
error.code = 404; | |
throw error; | |
} | |
if (!PouchDB.adapters[opts.adapter].valid()) { | |
error = new Error('Invalid Adapter'); | |
error.code = 404; | |
throw error; | |
} | |
} catch (err) { | |
self.taskqueue.fail(err); | |
self.changes = utils.toPromise(function (opts) { | |
if (opts.complete) { | |
opts.complete(err); | |
} | |
}); | |
} | |
}()); | |
if (error) { | |
return reject(error); // constructor error, see above | |
} | |
self.adapter = opts.adapter; | |
// needs access to PouchDB; | |
self.replicate = {}; | |
self.replicate.from = function (url, opts, callback) { | |
return self.constructor.replicate(url, self, opts, callback); | |
}; | |
self.replicate.to = function (url, opts, callback) { | |
return self.constructor.replicate(self, url, opts, callback); | |
}; | |
self.sync = function (dbName, opts, callback) { | |
return self.constructor.sync(self, dbName, opts, callback); | |
}; | |
self.replicate.sync = self.sync; | |
PouchDB.adapters[opts.adapter].call(self, opts, function (err) { | |
if (err) { | |
if (callback) { | |
self.taskqueue.fail(err); | |
callback(err); | |
} | |
return; | |
} | |
function destructionListener() { | |
PouchDB.emit('destroyed', opts.originalName); | |
//so we don't have to sift through all dbnames | |
PouchDB.emit(opts.originalName, 'destroyed'); | |
self.removeListener('destroyed', destructionListener); | |
} | |
self.on('destroyed', destructionListener); | |
self.emit('created', self); | |
PouchDB.emit('created', opts.originalName); | |
self.taskqueue.ready(self); | |
callback(null, self); | |
}); | |
if (opts.skipSetup) { | |
self.taskqueue.ready(self); | |
process.nextTick(function () { | |
callback(null, self); | |
}); | |
} | |
if (utils.isCordova()) { | |
//to inform websql adapter that we can use api | |
cordova.fireWindowEvent(opts.name + "_pouch", {}); | |
} | |
}); | |
promise.then(function (resp) { | |
oldCB(null, resp); | |
}, oldCB); | |
self.then = promise.then.bind(promise); | |
self.catch = promise.catch.bind(promise); | |
} | |
PouchDB.debug = require('debug'); | |
module.exports = PouchDB; | |
}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"./adapter":83,"./taskqueue":118,"./utils":119,"_process":81,"debug":122}],98:[function(require,module,exports){ | |
(function (process){ | |
"use strict"; | |
var request = require('request'); | |
var buffer = require('./buffer'); | |
var errors = require('./errors'); | |
var utils = require('../utils'); | |
function ajax(options, adapterCallback) { | |
var requestCompleted = false; | |
var callback = utils.getArguments(function (args) { | |
if (requestCompleted) { | |
return; | |
} | |
adapterCallback.apply(this, args); | |
requestCompleted = true; | |
}); | |
if (typeof options === "function") { | |
callback = options; | |
options = {}; | |
} | |
options = utils.clone(options); | |
var defaultOptions = { | |
method : "GET", | |
headers: {}, | |
json: true, | |
processData: true, | |
timeout: 10000, | |
cache: false | |
}; | |
options = utils.extend(true, defaultOptions, options); | |
function onSuccess(obj, resp, cb) { | |
if (!options.binary && !options.json && options.processData && | |
typeof obj !== 'string') { | |
obj = JSON.stringify(obj); | |
} else if (!options.binary && options.json && typeof obj === 'string') { | |
try { | |
obj = JSON.parse(obj); | |
} catch (e) { | |
// Probably a malformed JSON from server | |
return cb(e); | |
} | |
} | |
if (Array.isArray(obj)) { | |
obj = obj.map(function (v) { | |
if (v.error || v.missing) { | |
return errors.generateErrorFromResponse(v); | |
} else { | |
return v; | |
} | |
}); | |
} | |
cb(null, obj, resp); | |
} | |
function onError(err, cb) { | |
var errParsed, errObj; | |
if (err.code && err.status) { | |
var err2 = new Error(err.message || err.code); | |
err2.status = err.status; | |
return cb(err2); | |
} | |
try { | |
errParsed = JSON.parse(err.responseText); | |
//would prefer not to have a try/catch clause | |
errObj = errors.generateErrorFromResponse(errParsed); | |
} catch (e) { | |
errObj = errors.generateErrorFromResponse(err); | |
} | |
cb(errObj); | |
} | |
if (options.json) { | |
if (!options.binary) { | |
options.headers.Accept = 'application/json'; | |
} | |
options.headers['Content-Type'] = options.headers['Content-Type'] || | |
'application/json'; | |
} | |
if (options.binary) { | |
options.encoding = null; | |
options.json = false; | |
} | |
if (!options.processData) { | |
options.json = false; | |
} | |
function defaultBody(data) { | |
if (process.browser) { | |
return ''; | |
} | |
return new buffer('', 'binary'); | |
} | |
return request(options, function (err, response, body) { | |
if (err) { | |
err.status = response ? response.statusCode : 400; | |
return onError(err, callback); | |
} | |
var error; | |
var content_type = response.headers && response.headers['content-type']; | |
var data = body || defaultBody(); | |
// CouchDB doesn't always return the right content-type for JSON data, so | |
// we check for ^{ and }$ (ignoring leading/trailing whitespace) | |
if (!options.binary && (options.json || !options.processData) && | |
typeof data !== 'object' && | |
(/json/.test(content_type) || | |
(/^[\s]*\{/.test(data) && /\}[\s]*$/.test(data)))) { | |
data = JSON.parse(data); | |
} | |
if (response.statusCode >= 200 && response.statusCode < 300) { | |
onSuccess(data, response, callback); | |
} else { | |
if (options.binary) { | |
data = JSON.parse(data.toString()); | |
} | |
error = errors.generateErrorFromResponse(data); | |
error.status = response.statusCode; | |
callback(error); | |
} | |
}); | |
} | |
module.exports = ajax; | |
}).call(this,require('_process')) | |
},{"../utils":119,"./buffer":100,"./errors":101,"_process":81,"request":108}],99:[function(require,module,exports){ | |
(function (global){ | |
"use strict"; | |
//Abstracts constructing a Blob object, so it also works in older | |
//browsers that don't support the native Blob constructor. (i.e. | |
//old QtWebKit versions, at least). | |
function createBlob(parts, properties) { | |
parts = parts || []; | |
properties = properties || {}; | |
try { | |
return new Blob(parts, properties); | |
} catch (e) { | |
if (e.name !== "TypeError") { | |
throw e; | |
} | |
var BlobBuilder = global.BlobBuilder || | |
global.MSBlobBuilder || | |
global.MozBlobBuilder || | |
global.WebKitBlobBuilder; | |
var builder = new BlobBuilder(); | |
for (var i = 0; i < parts.length; i += 1) { | |
builder.append(parts[i]); | |
} | |
return builder.getBlob(properties.type); | |
} | |
} | |
module.exports = createBlob; | |
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{}],100:[function(require,module,exports){ | |
// hey guess what, we don't need this in the browser | |
module.exports = {}; | |
},{}],101:[function(require,module,exports){ | |
"use strict"; | |
var inherits = require('inherits'); | |
inherits(PouchError, Error); | |
function PouchError(opts) { | |
Error.call(opts.reason); | |
this.status = opts.status; | |
this.name = opts.error; | |
this.message = opts.reason; | |
this.error = true; | |
} | |
PouchError.prototype.toString = function () { | |
return JSON.stringify({ | |
status: this.status, | |
name: this.name, | |
message: this.message | |
}); | |
}; | |
exports.UNAUTHORIZED = new PouchError({ | |
status: 401, | |
error: 'unauthorized', | |
reason: "Name or password is incorrect." | |
}); | |
exports.MISSING_BULK_DOCS = new PouchError({ | |
status: 400, | |
error: 'bad_request', | |
reason: "Missing JSON list of 'docs'" | |
}); | |
exports.MISSING_DOC = new PouchError({ | |
status: 404, | |
error: 'not_found', | |
reason: 'missing' | |
}); | |
exports.REV_CONFLICT = new PouchError({ | |
status: 409, | |
error: 'conflict', | |
reason: 'Document update conflict' | |
}); | |
exports.INVALID_ID = new PouchError({ | |
status: 400, | |
error: 'invalid_id', | |
reason: '_id field must contain a string' | |
}); | |
exports.MISSING_ID = new PouchError({ | |
status: 412, | |
error: 'missing_id', | |
reason: '_id is required for puts' | |
}); | |
exports.RESERVED_ID = new PouchError({ | |
status: 400, | |
error: 'bad_request', | |
reason: 'Only reserved document ids may start with underscore.' | |
}); | |
exports.NOT_OPEN = new PouchError({ | |
status: 412, | |
error: 'precondition_failed', | |
reason: 'Database not open' | |
}); | |
exports.UNKNOWN_ERROR = new PouchError({ | |
status: 500, | |
error: 'unknown_error', | |
reason: 'Database encountered an unknown error' | |
}); | |
exports.BAD_ARG = new PouchError({ | |
status: 500, | |
error: 'badarg', | |
reason: 'Some query argument is invalid' | |
}); | |
exports.INVALID_REQUEST = new PouchError({ | |
status: 400, | |
error: 'invalid_request', | |
reason: 'Request was invalid' | |
}); | |
exports.QUERY_PARSE_ERROR = new PouchError({ | |
status: 400, | |
error: 'query_parse_error', | |
reason: 'Some query parameter is invalid' | |
}); | |
exports.DOC_VALIDATION = new PouchError({ | |
status: 500, | |
error: 'doc_validation', | |
reason: 'Bad special document member' | |
}); | |
exports.BAD_REQUEST = new PouchError({ | |
status: 400, | |
error: 'bad_request', | |
reason: 'Something wrong with the request' | |
}); | |
exports.NOT_AN_OBJECT = new PouchError({ | |
status: 400, | |
error: 'bad_request', | |
reason: 'Document must be a JSON object' | |
}); | |
exports.DB_MISSING = new PouchError({ | |
status: 404, | |
error: 'not_found', | |
reason: 'Database not found' | |
}); | |
exports.IDB_ERROR = new PouchError({ | |
status: 500, | |
error: 'indexed_db_went_bad', | |
reason: 'unknown' | |
}); | |
exports.WSQ_ERROR = new PouchError({ | |
status: 500, | |
error: 'web_sql_went_bad', | |
reason: 'unknown' | |
}); | |
exports.LDB_ERROR = new PouchError({ | |
status: 500, | |
error: 'levelDB_went_went_bad', | |
reason: 'unknown' | |
}); | |
exports.FORBIDDEN = new PouchError({ | |
status: 403, | |
error: 'forbidden', | |
reason: 'Forbidden by design doc validate_doc_update function' | |
}); | |
exports.INVALID_REV = new PouchError({ | |
status: 400, | |
error: 'bad_request', | |
reason: 'Invalid rev format' | |
}); | |
exports.FILE_EXISTS = new PouchError({ | |
status: 412, | |
error: 'file_exists', | |
reason: 'The database could not be created, the file already exists.' | |
}); | |
exports.MISSING_STUB = new PouchError({ | |
status: 412, | |
error: 'missing_stub' | |
}); | |
exports.error = function (error, reason, name) { | |
function CustomPouchError(reason) { | |
// inherit error properties from our parent error manually | |
// so as to allow proper JSON parsing. | |
/* jshint ignore:start */ | |
for (var p in error) { | |
if (typeof error[p] !== 'function') { | |
this[p] = error[p]; | |
} | |
} | |
/* jshint ignore:end */ | |
if (name !== undefined) { | |
this.name = name; | |
} | |
if (reason !== undefined) { | |
this.reason = reason; | |
} | |
} | |
CustomPouchError.prototype = PouchError.prototype; | |
return new CustomPouchError(reason); | |
}; | |
// Find one of the errors defined above based on the value | |
// of the specified property. | |
// If reason is provided prefer the error matching that reason. | |
// This is for differentiating between errors with the same name and status, | |
// eg, bad_request. | |
exports.getErrorTypeByProp = function (prop, value, reason) { | |
var errors = exports; | |
var keys = Object.keys(errors).filter(function (key) { | |
var error = errors[key]; | |
return typeof error !== 'function' && error[prop] === value; | |
}); | |
var key = reason && keys.filter(function (key) { | |
var error = errors[key]; | |
return error.message === reason; | |
})[0] || keys[0]; | |
return (key) ? errors[key] : null; | |
}; | |
exports.generateErrorFromResponse = function (res) { | |
var error, errName, errType, errMsg, errReason; | |
var errors = exports; | |
errName = (res.error === true && typeof res.name === 'string') ? | |
res.name : | |
res.error; | |
errReason = res.reason; | |
errType = errors.getErrorTypeByProp('name', errName, errReason); | |
if (res.missing || | |
errReason === 'missing' || | |
errReason === 'deleted' || | |
errName === 'not_found') { | |
errType = errors.MISSING_DOC; | |
} else if (errName === 'doc_validation') { | |
// doc validation needs special treatment since | |
// res.reason depends on the validation error. | |
// see utils.js | |
errType = errors.DOC_VALIDATION; | |
errMsg = errReason; | |
} else if (errName === 'bad_request' && errType.message !== errReason) { | |
// if bad_request error already found based on reason don't override. | |
// attachment errors. | |
if (errReason.indexOf('unknown stub attachment') === 0) { | |
errType = errors.MISSING_STUB; | |
errMsg = errReason; | |
} else { | |
errType = errors.BAD_REQUEST; | |
} | |
} | |
// fallback to error by statys or unknown error. | |
if (!errType) { | |
errType = errors.getErrorTypeByProp('status', res.status, errReason) || | |
errors.UNKNOWN_ERROR; | |
} | |
error = errors.error(errType, errReason, errName); | |
// Keep custom message. | |
if (errMsg) { | |
error.message = errMsg; | |
} | |
// Keep helpful response data in our error messages. | |
if (res.id) { | |
error.id = res.id; | |
} | |
if (res.status) { | |
error.status = res.status; | |
} | |
if (res.statusText) { | |
error.name = res.statusText; | |
} | |
if (res.missing) { | |
error.missing = res.missing; | |
} | |
return error; | |
}; | |
},{"inherits":125}],102:[function(require,module,exports){ | |
(function (process,global){ | |
'use strict'; | |
// designed to give info to browser users, who are disturbed | |
// when they see 404s in the console | |
function explain404(str) { | |
if (process.browser && 'console' in global && 'info' in console) { | |
console.info('The above 404 is totally normal. ' + str); | |
} | |
} | |
module.exports = explain404; | |
}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"_process":81}],103:[function(require,module,exports){ | |
(function (process,global){ | |
'use strict'; | |
var crypto = require('crypto'); | |
var Md5 = require('spark-md5'); | |
var setImmediateShim = global.setImmediate || global.setTimeout; | |
var MD5_CHUNK_SIZE = 32768; | |
// convert a 64-bit int to a binary string | |
function intToString(int) { | |
var bytes = [ | |
(int & 0xff), | |
((int >>> 8) & 0xff), | |
((int >>> 16) & 0xff), | |
((int >>> 24) & 0xff) | |
]; | |
return bytes.map(function (byte) { | |
return String.fromCharCode(byte); | |
}).join(''); | |
} | |
// convert an array of 64-bit ints into | |
// a base64-encoded string | |
function rawToBase64(raw) { | |
var res = ''; | |
for (var i = 0; i < raw.length; i++) { | |
res += intToString(raw[i]); | |
} | |
return btoa(res); | |
} | |
function appendBuffer(buffer, data, start, end) { | |
if (start > 0 || end < data.byteLength) { | |
// only create a subarray if we really need to | |
data = new Uint8Array(data, start, | |
Math.min(end, data.byteLength) - start); | |
} | |
buffer.append(data); | |
} | |
function appendString(buffer, data, start, end) { | |
if (start > 0 || end < data.length) { | |
// only create a substring if we really need to | |
data = data.substring(start, end); | |
} | |
buffer.appendBinary(data); | |
} | |
module.exports = function (data, callback) { | |
if (!process.browser) { | |
var base64 = crypto.createHash('md5').update(data).digest('base64'); | |
callback(null, base64); | |
return; | |
} | |
var inputIsString = typeof data === 'string'; | |
var len = inputIsString ? data.length : data.byteLength; | |
var chunkSize = Math.min(MD5_CHUNK_SIZE, len); | |
var chunks = Math.ceil(len / chunkSize); | |
var currentChunk = 0; | |
var buffer = inputIsString ? new Md5() : new Md5.ArrayBuffer(); | |
var append = inputIsString ? appendString : appendBuffer; | |
function loadNextChunk() { | |
var start = currentChunk * chunkSize; | |
var end = start + chunkSize; | |
currentChunk++; | |
if (currentChunk < chunks) { | |
append(buffer, data, start, end); | |
setImmediateShim(loadNextChunk); | |
} else { | |
append(buffer, data, start, end); | |
var raw = buffer.end(true); | |
var base64 = rawToBase64(raw); | |
callback(null, base64); | |
buffer.destroy(); | |
} | |
} | |
loadNextChunk(); | |
}; | |
}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"_process":81,"crypto":79,"spark-md5":155}],104:[function(require,module,exports){ | |
'use strict'; | |
var errors = require('./errors'); | |
var uuid = require('./uuid'); | |
function toObject(array) { | |
return array.reduce(function (obj, item) { | |
obj[item] = true; | |
return obj; | |
}, {}); | |
} | |
// List of top level reserved words for doc | |
var reservedWords = toObject([ | |
'_id', | |
'_rev', | |
'_attachments', | |
'_deleted', | |
'_revisions', | |
'_revs_info', | |
'_conflicts', | |
'_deleted_conflicts', | |
'_local_seq', | |
'_rev_tree', | |
//replication documents | |
'_replication_id', | |
'_replication_state', | |
'_replication_state_time', | |
'_replication_state_reason', | |
'_replication_stats', | |
// Specific to Couchbase Sync Gateway | |
'_removed' | |
]); | |
// List of reserved words that should end up the document | |
var dataWords = toObject([ | |
'_attachments', | |
//replication documents | |
'_replication_id', | |
'_replication_state', | |
'_replication_state_time', | |
'_replication_state_reason', | |
'_replication_stats' | |
]); | |
// Determine id an ID is valid | |
// - invalid IDs begin with an underescore that does not begin '_design' or | |
// '_local' | |
// - any other string value is a valid id | |
// Returns the specific error object for each case | |
exports.invalidIdError = function (id) { | |
var err; | |
if (!id) { | |
err = errors.error(errors.MISSING_ID); | |
} else if (typeof id !== 'string') { | |
err = errors.error(errors.INVALID_ID); | |
} else if (/^_/.test(id) && !(/^_(design|local)/).test(id)) { | |
err = errors.error(errors.RESERVED_ID); | |
} | |
if (err) { | |
throw err; | |
} | |
}; | |
function parseRevisionInfo(rev) { | |
if (!/^\d+\-./.test(rev)) { | |
return errors.error(errors.INVALID_REV); | |
} | |
var idx = rev.indexOf('-'); | |
var left = rev.substring(0, idx); | |
var right = rev.substring(idx + 1); | |
return { | |
prefix: parseInt(left, 10), | |
id: right | |
}; | |
} | |
function makeRevTreeFromRevisions(revisions, opts) { | |
var pos = revisions.start - revisions.ids.length + 1; | |
var revisionIds = revisions.ids; | |
var ids = [revisionIds[0], opts, []]; | |
for (var i = 1, len = revisionIds.length; i < len; i++) { | |
ids = [revisionIds[i], {status: 'missing'}, [ids]]; | |
} | |
return [{ | |
pos: pos, | |
ids: ids | |
}]; | |
} | |
// Preprocess documents, parse their revisions, assign an id and a | |
// revision for new writes that are missing them, etc | |
exports.parseDoc = function (doc, newEdits) { | |
var nRevNum; | |
var newRevId; | |
var revInfo; | |
var opts = {status: 'available'}; | |
if (doc._deleted) { | |
opts.deleted = true; | |
} | |
if (newEdits) { | |
if (!doc._id) { | |
doc._id = uuid(); | |
} | |
newRevId = uuid(32, 16).toLowerCase(); | |
if (doc._rev) { | |
revInfo = parseRevisionInfo(doc._rev); | |
if (revInfo.error) { | |
return revInfo; | |
} | |
doc._rev_tree = [{ | |
pos: revInfo.prefix, | |
ids: [revInfo.id, {status: 'missing'}, [[newRevId, opts, []]]] | |
}]; | |
nRevNum = revInfo.prefix + 1; | |
} else { | |
doc._rev_tree = [{ | |
pos: 1, | |
ids : [newRevId, opts, []] | |
}]; | |
nRevNum = 1; | |
} | |
} else { | |
if (doc._revisions) { | |
doc._rev_tree = makeRevTreeFromRevisions(doc._revisions, opts); | |
nRevNum = doc._revisions.start; | |
newRevId = doc._revisions.ids[0]; | |
} | |
if (!doc._rev_tree) { | |
revInfo = parseRevisionInfo(doc._rev); | |
if (revInfo.error) { | |
return revInfo; | |
} | |
nRevNum = revInfo.prefix; | |
newRevId = revInfo.id; | |
doc._rev_tree = [{ | |
pos: nRevNum, | |
ids: [newRevId, opts, []] | |
}]; | |
} | |
} | |
exports.invalidIdError(doc._id); | |
doc._rev = nRevNum + '-' + newRevId; | |
var result = {metadata : {}, data : {}}; | |
for (var key in doc) { | |
if (doc.hasOwnProperty(key)) { | |
var specialKey = key[0] === '_'; | |
if (specialKey && !reservedWords[key]) { | |
var error = errors.error(errors.DOC_VALIDATION, key); | |
error.message = errors.DOC_VALIDATION.message + ': ' + key; | |
throw error; | |
} else if (specialKey && !dataWords[key]) { | |
result.metadata[key.slice(1)] = doc[key]; | |
} else { | |
result.data[key] = doc[key]; | |
} | |
} | |
} | |
return result; | |
}; | |
},{"./errors":101,"./uuid":110}],105:[function(require,module,exports){ | |
'use strict'; | |
// | |
// Parsing hex strings. Yeah. | |
// | |
// So basically we need this because of a bug in WebSQL: | |
// https://code.google.com/p/chromium/issues/detail?id=422690 | |
// https://bugs.webkit.org/show_bug.cgi?id=137637 | |
// | |
// UTF-8 and UTF-16 are provided as separate functions | |
// for meager performance improvements | |
// | |
function decodeUtf8(str) { | |
return decodeURIComponent(window.escape(str)); | |
} | |
function hexToInt(charCode) { | |
// '0'-'9' is 48-57 | |
// 'A'-'F' is 65-70 | |
// SQLite will only give us uppercase hex | |
return charCode < 65 ? (charCode - 48) : (charCode - 55); | |
} | |
// Example: | |
// pragma encoding=utf8; | |
// select hex('A'); | |
// returns '41' | |
function parseHexUtf8(str, start, end) { | |
var result = ''; | |
while (start < end) { | |
result += String.fromCharCode( | |
(hexToInt(str.charCodeAt(start++)) << 4) | | |
hexToInt(str.charCodeAt(start++))); | |
} | |
return result; | |
} | |
// Example: | |
// pragma encoding=utf16; | |
// select hex('A'); | |
// returns '4100' | |
// notice that the 00 comes after the 41 (i.e. it's swizzled) | |
function parseHexUtf16(str, start, end) { | |
var result = ''; | |
while (start < end) { | |
// UTF-16, so swizzle the bytes | |
result += String.fromCharCode( | |
(hexToInt(str.charCodeAt(start + 2)) << 12) | | |
(hexToInt(str.charCodeAt(start + 3)) << 8) | | |
(hexToInt(str.charCodeAt(start)) << 4) | | |
hexToInt(str.charCodeAt(start + 1))); | |
start += 4; | |
} | |
return result; | |
} | |
function parseHexString(str, encoding) { | |
if (encoding === 'UTF-8') { | |
return decodeUtf8(parseHexUtf8(str, 0, str.length)); | |
} else { | |
return parseHexUtf16(str, 0, str.length); | |
} | |
} | |
module.exports = parseHexString; | |
},{}],106:[function(require,module,exports){ | |
'use strict'; | |
// originally parseUri 1.2.2, now patched by us | |
// (c) Steven Levithan <stevenlevithan.com> | |
// MIT License | |
var options = { | |
strictMode: false, | |
key: ["source", "protocol", "authority", "userInfo", "user", "password", | |
"host", "port", "relative", "path", "directory", "file", "query", | |
"anchor"], | |
q: { | |
name: "queryKey", | |
parser: /(?:^|&)([^&=]*)=?([^&]*)/g | |
}, | |
parser: { | |
/* jshint maxlen: false */ | |
strict: /^(?:([^:\/?#]+):)?(?:\/\/((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?))?((((?:[^?#\/]*\/)*)([^?#]*))(?:\?([^#]*))?(?:#(.*))?)/, | |
loose: /^(?:(?![^:@]+:[^:@\/]*@)([^:\/?#.]+):)?(?:\/\/)?((?:(([^:@]*)(?::([^:@]*))?)?@)?([^:\/?#]*)(?::(\d*))?)(((\/(?:[^?#](?![^?#\/]*\.[^?#\/.]+(?:[?#]|$)))*\/?)?([^?#\/]*))(?:\?([^#]*))?(?:#(.*))?)/ | |
} | |
}; | |
function parseUri(str) { | |
var o = options; | |
var m = o.parser[o.strictMode ? "strict" : "loose"].exec(str); | |
var uri = {}; | |
var i = 14; | |
while (i--) { | |
var key = o.key[i]; | |
var value = m[i] || ""; | |
var encoded = ['user', 'password'].indexOf(key) !== -1; | |
uri[key] = encoded ? decodeURIComponent(value) : value; | |
} | |
uri[o.q.name] = {}; | |
uri[o.key[12]].replace(o.q.parser, function ($0, $1, $2) { | |
if ($1) { | |
uri[o.q.name][$1] = $2; | |
} | |
}); | |
return uri; | |
} | |
module.exports = parseUri; | |
},{}],107:[function(require,module,exports){ | |
'use strict'; | |
/* istanbul ignore next */ | |
module.exports = typeof Promise === 'function' ? Promise : require('lie'); | |
},{"lie":129}],108:[function(require,module,exports){ | |
/* global fetch */ | |
/* global Headers */ | |
'use strict'; | |
var createBlob = require('./blob.js'); | |
var utils = require('../utils'); | |
function wrappedFetch() { | |
var wrappedPromise = {}; | |
var promise = new utils.Promise(function(resolve, reject) { | |
wrappedPromise.resolve = resolve; | |
wrappedPromise.reject = reject; | |
}); | |
var args = new Array(arguments.length); | |
for (var i = 0; i < args.length; i++) { | |
args[i] = arguments[i]; | |
} | |
wrappedPromise.then = promise.then.bind(promise); | |
wrappedPromise.catch = promise.catch.bind(promise); | |
wrappedPromise.promise = promise; | |
fetch.apply(null, args).then(function(response) { | |
wrappedPromise.resolve(response); | |
}, function(error) { | |
wrappedPromise.reject(error); | |
}).catch(function(error) { | |
wrappedPromise.catch(error); | |
}); | |
return wrappedPromise; | |
} | |
function fetchRequest(options, callback) { | |
var wrappedPromise, timer, fetchResponse; | |
var headers = new Headers(); | |
var fetchOptions = { | |
method: options.method, | |
credentials: 'include', | |
headers: headers | |
}; | |
if (options.json) { | |
headers.set('Accept', 'application/json'); | |
headers.set('Content-Type', options.headers['Content-Type'] || | |
'application/json'); | |
} | |
if (options.body && (options.body instanceof Blob)) { | |
utils.readAsBinaryString(options.body, function(binary) { | |
fetchOptions.body = utils.fixBinary(binary); | |
}); | |
} else if (options.body && | |
options.processData && | |
typeof options.body !== 'string') { | |
fetchOptions.body = JSON.stringify(options.body); | |
} else if ('body' in options) { | |
fetchOptions.body = options.body; | |
} else { | |
fetchOptions.body = null; | |
} | |
Object.keys(options.headers).forEach(function(key) { | |
if (options.headers.hasOwnProperty(key)) { | |
headers.set(key, options.headers[key]); | |
} | |
}); | |
wrappedPromise = wrappedFetch(options.url, fetchOptions); | |
if (options.timeout > 0) { | |
timer = setTimeout(function() { | |
wrappedPromise.reject(new Error('Load timeout for resource: ' + | |
options.url)); | |
}, options.timeout); | |
} | |
wrappedPromise.promise.then(function(response) { | |
var result; | |
fetchResponse = response; | |
if (options.timeout > 0) { | |
clearTimeout(timer); | |
} | |
if (response.status >= 200 && response.status < 300) { | |
return options.binary ? response.blob() : response.text(); | |
} | |
return result.json(); | |
}).then(function(result) { | |
if (fetchResponse.status >= 200 && fetchResponse.status < 300) { | |
callback(null, fetchResponse, result); | |
} else { | |
callback(result, fetchResponse); | |
} | |
}).catch(function(error) { | |
callback(error, fetchResponse); | |
}); | |
return {abort: wrappedPromise.reject}; | |
} | |
function xhRequest(options, callback) { | |
var xhr, timer, hasUpload; | |
var abortReq = function () { | |
xhr.abort(); | |
}; | |
if (options.xhr) { | |
xhr = new options.xhr(); | |
} else { | |
xhr = new XMLHttpRequest(); | |
} | |
// cache-buster, specifically designed to work around IE's aggressive caching | |
// see http://www.dashbay.com/2011/05/internet-explorer-caches-ajax/ | |
if (options.method === 'GET' && !options.cache) { | |
var hasArgs = options.url.indexOf('?') !== -1; | |
options.url += (hasArgs ? '&' : '?') + '_nonce=' + Date.now(); | |
} | |
xhr.open(options.method, options.url); | |
xhr.withCredentials = true; | |
if (options.method === 'GET') { | |
delete options.headers['Content-Type']; | |
} else if (options.json) { | |
options.headers.Accept = 'application/json'; | |
options.headers['Content-Type'] = options.headers['Content-Type'] || | |
'application/json'; | |
if (options.body && | |
options.processData && | |
typeof options.body !== "string") { | |
options.body = JSON.stringify(options.body); | |
} | |
} | |
if (options.binary) { | |
xhr.responseType = 'arraybuffer'; | |
} | |
if (!('body' in options)) { | |
options.body = null; | |
} | |
for (var key in options.headers) { | |
if (options.headers.hasOwnProperty(key)) { | |
xhr.setRequestHeader(key, options.headers[key]); | |
} | |
} | |
if (options.timeout > 0) { | |
timer = setTimeout(abortReq, options.timeout); | |
xhr.onprogress = function () { | |
clearTimeout(timer); | |
timer = setTimeout(abortReq, options.timeout); | |
}; | |
if (typeof hasUpload === 'undefined') { | |
// IE throws an error if you try to access it directly | |
hasUpload = Object.keys(xhr).indexOf('upload') !== -1 && | |
typeof xhr.upload !== 'undefined'; | |
} | |
if (hasUpload) { // does not exist in ie9 | |
xhr.upload.onprogress = xhr.onprogress; | |
} | |
} | |
xhr.onreadystatechange = function () { | |
if (xhr.readyState !== 4) { | |
return; | |
} | |
var response = { | |
statusCode: xhr.status | |
}; | |
if (xhr.status >= 200 && xhr.status < 300) { | |
var data; | |
if (options.binary) { | |
data = createBlob([xhr.response || ''], { | |
type: xhr.getResponseHeader('Content-Type') | |
}); | |
} else { | |
data = xhr.responseText; | |
} | |
callback(null, response, data); | |
} else { | |
var err = {}; | |
try { | |
err = JSON.parse(xhr.response); | |
} catch(e) {} | |
callback(err, response); | |
} | |
}; | |
if (options.body && (options.body instanceof Blob)) { | |
utils.readAsBinaryString(options.body, function (binary) { | |
xhr.send(utils.fixBinary(binary)); | |
}); | |
} else { | |
xhr.send(options.body); | |
} | |
return {abort: abortReq}; | |
} | |
module.exports = function(options, callback) { | |
if (typeof XMLHttpRequest === 'undefined' && !options.xhr) { | |
return fetchRequest(options, callback); | |
} else { | |
return xhRequest(options, callback); | |
} | |
}; | |
},{"../utils":119,"./blob.js":99}],109:[function(require,module,exports){ | |
'use strict'; | |
var upsert = require('pouchdb-upsert').upsert; | |
module.exports = function (db, doc, diffFun, cb) { | |
return upsert.call(db, doc, diffFun, cb); | |
}; | |
},{"pouchdb-upsert":154}],110:[function(require,module,exports){ | |
"use strict"; | |
// BEGIN Math.uuid.js | |
/*! | |
Math.uuid.js (v1.4) | |
http://www.broofa.com | |
mailto:[email protected] | |
Copyright (c) 2010 Robert Kieffer | |
Dual licensed under the MIT and GPL licenses. | |
*/ | |
/* | |
* Generate a random uuid. | |
* | |
* USAGE: Math.uuid(length, radix) | |
* length - the desired number of characters | |
* radix - the number of allowable values for each character. | |
* | |
* EXAMPLES: | |
* // No arguments - returns RFC4122, version 4 ID | |
* >>> Math.uuid() | |
* "92329D39-6F5C-4520-ABFC-AAB64544E172" | |
* | |
* // One argument - returns ID of the specified length | |
* >>> Math.uuid(15) // 15 character ID (default base=62) | |
* "VcydxgltxrVZSTV" | |
* | |
* // Two arguments - returns ID of the specified length, and radix. | |
* // (Radix must be <= 62) | |
* >>> Math.uuid(8, 2) // 8 character ID (base=2) | |
* "01001010" | |
* >>> Math.uuid(8, 10) // 8 character ID (base=10) | |
* "47473046" | |
* >>> Math.uuid(8, 16) // 8 character ID (base=16) | |
* "098F4D35" | |
*/ | |
var chars = ( | |
'0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ' + | |
'abcdefghijklmnopqrstuvwxyz' | |
).split(''); | |
function getValue(radix) { | |
return 0 | Math.random() * radix; | |
} | |
function uuid(len, radix) { | |
radix = radix || chars.length; | |
var out = ''; | |
var i = -1; | |
if (len) { | |
// Compact form | |
while (++i < len) { | |
out += chars[getValue(radix)]; | |
} | |
return out; | |
} | |
// rfc4122, version 4 form | |
// Fill in random data. At i==19 set the high bits of clock sequence as | |
// per rfc4122, sec. 4.1.5 | |
while (++i < 36) { | |
switch (i) { | |
case 8: | |
case 13: | |
case 18: | |
case 23: | |
out += '-'; | |
break; | |
case 19: | |
out += chars[(getValue(16) & 0x3) | 0x8]; | |
break; | |
default: | |
out += chars[getValue(16)]; | |
} | |
} | |
return out; | |
} | |
module.exports = uuid; | |
},{}],111:[function(require,module,exports){ | |
'use strict'; | |
module.exports = evalFilter; | |
function evalFilter(input) { | |
/*jshint evil: true */ | |
return eval([ | |
'(function () { return ', | |
input, | |
' })()' | |
].join('')); | |
} | |
},{}],112:[function(require,module,exports){ | |
'use strict'; | |
module.exports = evalView; | |
function evalView(input) { | |
/*jshint evil: true */ | |
return eval([ | |
'(function () {', | |
' return function (doc) {', | |
' var emitted = false;', | |
' var emit = function (a, b) {', | |
' emitted = true;', | |
' };', | |
' var view = ' + input + ';', | |
' view(doc);', | |
' if (emitted) {', | |
' return true;', | |
' }', | |
' }', | |
'})()' | |
].join('\n')); | |
} | |
},{}],113:[function(require,module,exports){ | |
(function (process){ | |
"use strict"; | |
var PouchDB = require('./setup'); | |
module.exports = PouchDB; | |
PouchDB.ajax = require('./deps/ajax'); | |
PouchDB.utils = require('./utils'); | |
PouchDB.Errors = require('./deps/errors'); | |
PouchDB.replicate = require('./replicate').replicate; | |
PouchDB.sync = require('./sync'); | |
PouchDB.version = require('./version'); | |
var httpAdapter = require('./adapters/http/http'); | |
PouchDB.adapter('http', httpAdapter); | |
PouchDB.adapter('https', httpAdapter); | |
PouchDB.adapter('idb', require('./adapters/idb/idb'), true); | |
PouchDB.adapter('websql', require('./adapters/websql/websql'), true); | |
PouchDB.plugin(require('pouchdb-mapreduce')); | |
if (!process.browser) { | |
var ldbAdapter = require('./adapters/leveldb/leveldb'); | |
PouchDB.adapter('leveldb', ldbAdapter, true); | |
} | |
}).call(this,require('_process')) | |
},{"./adapters/http/http":84,"./adapters/idb/idb":90,"./adapters/leveldb/leveldb":79,"./adapters/websql/websql":94,"./deps/ajax":98,"./deps/errors":101,"./replicate":115,"./setup":116,"./sync":117,"./utils":119,"./version":120,"_process":81,"pouchdb-mapreduce":150}],114:[function(require,module,exports){ | |
'use strict'; | |
var extend = require('pouchdb-extend'); | |
// for a better overview of what this is doing, read: | |
// https://github.com/apache/couchdb/blob/master/src/couchdb/couch_key_tree.erl | |
// | |
// But for a quick intro, CouchDB uses a revision tree to store a documents | |
// history, A -> B -> C, when a document has conflicts, that is a branch in the | |
// tree, A -> (B1 | B2 -> C), We store these as a nested array in the format | |
// | |
// KeyTree = [Path ... ] | |
// Path = {pos: position_from_root, ids: Tree} | |
// Tree = [Key, Opts, [Tree, ...]], in particular single node: [Key, []] | |
// classic binary search | |
function binarySearch(arr, item, comparator) { | |
var low = 0; | |
var high = arr.length; | |
var mid; | |
while (low < high) { | |
mid = (low + high) >>> 1; | |
if (comparator(arr[mid], item) < 0) { | |
low = mid + 1; | |
} else { | |
high = mid; | |
} | |
} | |
return low; | |
} | |
// assuming the arr is sorted, insert the item in the proper place | |
function insertSorted(arr, item, comparator) { | |
var idx = binarySearch(arr, item, comparator); | |
arr.splice(idx, 0, item); | |
} | |
// Turn a path as a flat array into a tree with a single branch | |
function pathToTree(path) { | |
var doc = path.shift(); | |
var root = [doc.id, doc.opts, []]; | |
var leaf = root; | |
var nleaf; | |
while (path.length) { | |
doc = path.shift(); | |
nleaf = [doc.id, doc.opts, []]; | |
leaf[2].push(nleaf); | |
leaf = nleaf; | |
} | |
return root; | |
} | |
// compare the IDs of two trees | |
function compareTree(a, b) { | |
return a[0] < b[0] ? -1 : 1; | |
} | |
// Merge two trees together | |
// The roots of tree1 and tree2 must be the same revision | |
function mergeTree(in_tree1, in_tree2) { | |
var queue = [{tree1: in_tree1, tree2: in_tree2}]; | |
var conflicts = false; | |
while (queue.length > 0) { | |
var item = queue.pop(); | |
var tree1 = item.tree1; | |
var tree2 = item.tree2; | |
if (tree1[1].status || tree2[1].status) { | |
tree1[1].status = | |
(tree1[1].status === 'available' || | |
tree2[1].status === 'available') ? 'available' : 'missing'; | |
} | |
for (var i = 0; i < tree2[2].length; i++) { | |
if (!tree1[2][0]) { | |
conflicts = 'new_leaf'; | |
tree1[2][0] = tree2[2][i]; | |
continue; | |
} | |
var merged = false; | |
for (var j = 0; j < tree1[2].length; j++) { | |
if (tree1[2][j][0] === tree2[2][i][0]) { | |
queue.push({tree1: tree1[2][j], tree2: tree2[2][i]}); | |
merged = true; | |
} | |
} | |
if (!merged) { | |
conflicts = 'new_branch'; | |
insertSorted(tree1[2], tree2[2][i], compareTree); | |
} | |
} | |
} | |
return {conflicts: conflicts, tree: in_tree1}; | |
} | |
function doMerge(tree, path, dontExpand) { | |
var restree = []; | |
var conflicts = false; | |
var merged = false; | |
var res; | |
if (!tree.length) { | |
return {tree: [path], conflicts: 'new_leaf'}; | |
} | |
tree.forEach(function (branch) { | |
if (branch.pos === path.pos && branch.ids[0] === path.ids[0]) { | |
// Paths start at the same position and have the same root, so they need | |
// merged | |
res = mergeTree(branch.ids, path.ids); | |
restree.push({pos: branch.pos, ids: res.tree}); | |
conflicts = conflicts || res.conflicts; | |
merged = true; | |
} else if (dontExpand !== true) { | |
// The paths start at a different position, take the earliest path and | |
// traverse up until it as at the same point from root as the path we | |
// want to merge. If the keys match we return the longer path with the | |
// other merged After stemming we dont want to expand the trees | |
var t1 = branch.pos < path.pos ? branch : path; | |
var t2 = branch.pos < path.pos ? path : branch; | |
var diff = t2.pos - t1.pos; | |
var candidateParents = []; | |
var trees = []; | |
trees.push({ids: t1.ids, diff: diff, parent: null, parentIdx: null}); | |
while (trees.length > 0) { | |
var item = trees.pop(); | |
if (item.diff === 0) { | |
if (item.ids[0] === t2.ids[0]) { | |
candidateParents.push(item); | |
} | |
continue; | |
} | |
if (!item.ids) { | |
continue; | |
} | |
/*jshint loopfunc:true */ | |
item.ids[2].forEach(function (el, idx) { | |
trees.push( | |
{ids: el, diff: item.diff - 1, parent: item.ids, parentIdx: idx}); | |
}); | |
} | |
var el = candidateParents[0]; | |
if (!el) { | |
restree.push(branch); | |
} else { | |
res = mergeTree(el.ids, t2.ids); | |
el.parent[2][el.parentIdx] = res.tree; | |
restree.push({pos: t1.pos, ids: t1.ids}); | |
conflicts = conflicts || res.conflicts; | |
merged = true; | |
} | |
} else { | |
restree.push(branch); | |
} | |
}); | |
// We didnt find | |
if (!merged) { | |
restree.push(path); | |
} | |
restree.sort(function (a, b) { | |
return a.pos - b.pos; | |
}); | |
return { | |
tree: restree, | |
conflicts: conflicts || 'internal_node' | |
}; | |
} | |
// To ensure we dont grow the revision tree infinitely, we stem old revisions | |
function stem(tree, depth) { | |
// First we break out the tree into a complete list of root to leaf paths, | |
// we cut off the start of the path and generate a new set of flat trees | |
var stemmedPaths = PouchMerge.rootToLeaf(tree).map(function (path) { | |
var stemmed = path.ids.slice(-depth); | |
return { | |
pos: path.pos + (path.ids.length - stemmed.length), | |
ids: pathToTree(stemmed) | |
}; | |
}); | |
// Then we remerge all those flat trees together, ensuring that we dont | |
// connect trees that would go beyond the depth limit | |
return stemmedPaths.reduce(function (prev, current) { | |
return doMerge(prev, current, true).tree; | |
}, [stemmedPaths.shift()]); | |
} | |
var PouchMerge = {}; | |
PouchMerge.merge = function (tree, path, depth) { | |
// Ugh, nicer way to not modify arguments in place? | |
tree = extend(true, [], tree); | |
path = extend(true, {}, path); | |
var newTree = doMerge(tree, path); | |
return { | |
tree: stem(newTree.tree, depth), | |
conflicts: newTree.conflicts | |
}; | |
}; | |
// We fetch all leafs of the revision tree, and sort them based on tree length | |
// and whether they were deleted, undeleted documents with the longest revision | |
// tree (most edits) win | |
// The final sort algorithm is slightly documented in a sidebar here: | |
// http://guide.couchdb.org/draft/conflicts.html | |
PouchMerge.winningRev = function (metadata) { | |
var leafs = []; | |
PouchMerge.traverseRevTree(metadata.rev_tree, | |
function (isLeaf, pos, id, something, opts) { | |
if (isLeaf) { | |
leafs.push({pos: pos, id: id, deleted: !!opts.deleted}); | |
} | |
}); | |
leafs.sort(function (a, b) { | |
if (a.deleted !== b.deleted) { | |
return a.deleted > b.deleted ? 1 : -1; | |
} | |
if (a.pos !== b.pos) { | |
return b.pos - a.pos; | |
} | |
return a.id < b.id ? 1 : -1; | |
}); | |
return leafs[0].pos + '-' + leafs[0].id; | |
}; | |
// Pretty much all below can be combined into a higher order function to | |
// traverse revisions | |
// The return value from the callback will be passed as context to all | |
// children of that node | |
PouchMerge.traverseRevTree = function (revs, callback) { | |
var toVisit = revs.slice(); | |
var node; | |
while ((node = toVisit.pop())) { | |
var pos = node.pos; | |
var tree = node.ids; | |
var branches = tree[2]; | |
var newCtx = | |
callback(branches.length === 0, pos, tree[0], node.ctx, tree[1]); | |
for (var i = 0, len = branches.length; i < len; i++) { | |
toVisit.push({pos: pos + 1, ids: branches[i], ctx: newCtx}); | |
} | |
} | |
}; | |
PouchMerge.collectLeaves = function (revs) { | |
var leaves = []; | |
PouchMerge.traverseRevTree(revs, function (isLeaf, pos, id, acc, opts) { | |
if (isLeaf) { | |
leaves.push({rev: pos + "-" + id, pos: pos, opts: opts}); | |
} | |
}); | |
leaves.sort(function (a, b) { | |
return b.pos - a.pos; | |
}); | |
leaves.forEach(function (leaf) { delete leaf.pos; }); | |
return leaves; | |
}; | |
// returns revs of all conflicts that is leaves such that | |
// 1. are not deleted and | |
// 2. are different than winning revision | |
PouchMerge.collectConflicts = function (metadata) { | |
var win = PouchMerge.winningRev(metadata); | |
var leaves = PouchMerge.collectLeaves(metadata.rev_tree); | |
var conflicts = []; | |
leaves.forEach(function (leaf) { | |
if (leaf.rev !== win && !leaf.opts.deleted) { | |
conflicts.push(leaf.rev); | |
} | |
}); | |
return conflicts; | |
}; | |
PouchMerge.rootToLeaf = function (tree) { | |
var paths = []; | |
PouchMerge.traverseRevTree(tree, function (isLeaf, pos, id, history, opts) { | |
history = history ? history.slice(0) : []; | |
history.push({id: id, opts: opts}); | |
if (isLeaf) { | |
var rootPos = pos + 1 - history.length; | |
paths.unshift({pos: rootPos, ids: history}); | |
} | |
return history; | |
}); | |
return paths; | |
}; | |
module.exports = PouchMerge; | |
},{"pouchdb-extend":147}],115:[function(require,module,exports){ | |
'use strict'; | |
var utils = require('./utils'); | |
var EE = require('events').EventEmitter; | |
var Checkpointer = require('./checkpointer'); | |
var MAX_SIMULTANEOUS_REVS = 50; | |
var RETRY_DEFAULT = false; | |
function randomNumber(min, max) { | |
min = parseInt(min, 10); | |
max = parseInt(max, 10); | |
if (min !== min) { | |
min = 0; | |
} | |
if (max !== max || max <= min) { | |
max = (min || 1) << 1; //doubling | |
} else { | |
max = max + 1; | |
} | |
var ratio = Math.random(); | |
var range = max - min; | |
return ~~(range * ratio + min); // ~~ coerces to an int, but fast. | |
} | |
function defaultBackOff(min) { | |
var max = 0; | |
if (!min) { | |
max = 2000; | |
} | |
return randomNumber(min, max); | |
} | |
function backOff(repId, src, target, opts, returnValue, result, error) { | |
if (opts.retry === false) { | |
returnValue.emit('error', error); | |
returnValue.removeAllListeners(); | |
return; | |
} | |
opts.default_back_off = opts.default_back_off || 0; | |
opts.retries = opts.retries || 0; | |
if (typeof opts.back_off_function !== 'function') { | |
opts.back_off_function = defaultBackOff; | |
} | |
opts.retries++; | |
if (opts.max_retries && opts.retries > opts.max_retries) { | |
returnValue.emit('error', new Error('tried ' + | |
opts.retries + ' times but replication failed')); | |
returnValue.removeAllListeners(); | |
return; | |
} | |
returnValue.emit('requestError', error); | |
if (returnValue.state === 'active') { | |
returnValue.emit('paused', error); | |
returnValue.state = 'stopped'; | |
returnValue.once('active', function () { | |
opts.current_back_off = opts.default_back_off; | |
}); | |
} | |
opts.current_back_off = opts.current_back_off || opts.default_back_off; | |
opts.current_back_off = opts.back_off_function(opts.current_back_off); | |
setTimeout(function () { | |
replicate(repId, src, target, opts, returnValue); | |
}, opts.current_back_off); | |
} | |
// We create a basic promise so the caller can cancel the replication possibly | |
// before we have actually started listening to changes etc | |
utils.inherits(Replication, EE); | |
function Replication() { | |
EE.call(this); | |
this.cancelled = false; | |
this.state = 'pending'; | |
var self = this; | |
var promise = new utils.Promise(function (fulfill, reject) { | |
self.once('complete', fulfill); | |
self.once('error', reject); | |
}); | |
self.then = function (resolve, reject) { | |
return promise.then(resolve, reject); | |
}; | |
self.catch = function (reject) { | |
return promise.catch(reject); | |
}; | |
// As we allow error handling via "error" event as well, | |
// put a stub in here so that rejecting never throws UnhandledError. | |
self.catch(function () {}); | |
} | |
Replication.prototype.cancel = function () { | |
this.cancelled = true; | |
this.state = 'cancelled'; | |
this.emit('cancel'); | |
}; | |
Replication.prototype.ready = function (src, target) { | |
var self = this; | |
function onDestroy() { | |
self.cancel(); | |
} | |
src.once('destroyed', onDestroy); | |
target.once('destroyed', onDestroy); | |
function cleanup() { | |
src.removeListener('destroyed', onDestroy); | |
target.removeListener('destroyed', onDestroy); | |
} | |
this.then(cleanup, cleanup); | |
}; | |
// TODO: check CouchDB's replication id generation | |
// Generate a unique id particular to this replication | |
function genReplicationId(src, target, opts) { | |
var filterFun = opts.filter ? opts.filter.toString() : ''; | |
return src.id().then(function (src_id) { | |
return target.id().then(function (target_id) { | |
var queryData = src_id + target_id + filterFun + | |
JSON.stringify(opts.query_params) + opts.doc_ids; | |
return utils.MD5(queryData).then(function (md5) { | |
// can't use straight-up md5 alphabet, because | |
// the char '/' is interpreted as being for attachments, | |
// and + is also not url-safe | |
md5 = md5.replace(/\//g, '.').replace(/\+/g, '_'); | |
return '_local/' + md5; | |
}); | |
}); | |
}); | |
} | |
function replicate(repId, src, target, opts, returnValue, result) { | |
var batches = []; // list of batches to be processed | |
var currentBatch; // the batch currently being processed | |
var pendingBatch = { | |
seq: 0, | |
changes: [], | |
docs: [] | |
}; // next batch, not yet ready to be processed | |
var writingCheckpoint = false; // true while checkpoint is being written | |
var changesCompleted = false; // true when all changes received | |
var replicationCompleted = false; // true when replication has completed | |
var last_seq = 0; | |
var continuous = opts.continuous || opts.live || false; | |
var batch_size = opts.batch_size || 100; | |
var batches_limit = opts.batches_limit || 10; | |
var changesPending = false; // true while src.changes is running | |
var doc_ids = opts.doc_ids; | |
var state = { | |
cancelled: false | |
}; | |
var checkpointer = new Checkpointer(src, target, repId, state); | |
var allErrors = []; | |
var changedDocs = []; | |
result = result || { | |
ok: true, | |
start_time: new Date(), | |
docs_read: 0, | |
docs_written: 0, | |
doc_write_failures: 0, | |
errors: [] | |
}; | |
var changesOpts = {}; | |
returnValue.ready(src, target); | |
function writeDocs() { | |
if (currentBatch.docs.length === 0) { | |
return; | |
} | |
var docs = currentBatch.docs; | |
return target.bulkDocs({docs: docs, new_edits: false}).then(function (res) { | |
if (state.cancelled) { | |
completeReplication(); | |
throw new Error('cancelled'); | |
} | |
var errors = []; | |
var errorsById = {}; | |
res.forEach(function (res) { | |
if (res.error) { | |
result.doc_write_failures++; | |
errors.push(res); | |
errorsById[res.id] = res; | |
} | |
}); | |
allErrors = allErrors.concat(errors); | |
result.docs_written += currentBatch.docs.length - errors.length; | |
var non403s = errors.filter(function (error) { | |
return error.name !== 'unauthorized' && error.name !== 'forbidden'; | |
}); | |
changedDocs = []; | |
docs.forEach(function(doc) { | |
var error = errorsById[doc._id]; | |
if (error) { | |
returnValue.emit('denied', utils.clone(error)); | |
} else { | |
changedDocs.push(doc); | |
} | |
}); | |
if (non403s.length > 0) { | |
var error = new Error('bulkDocs error'); | |
error.other_errors = errors; | |
abortReplication('target.bulkDocs failed to write docs', error); | |
throw new Error('bulkWrite partial failure'); | |
} | |
}, function (err) { | |
result.doc_write_failures += docs.length; | |
throw err; | |
}); | |
} | |
function processDiffDoc(id) { | |
var diffs = currentBatch.diffs; | |
var allMissing = diffs[id].missing; | |
// avoid url too long error by batching | |
var missingBatches = []; | |
for (var i = 0; i < allMissing.length; i += MAX_SIMULTANEOUS_REVS) { | |
missingBatches.push(allMissing.slice(i, Math.min(allMissing.length, | |
i + MAX_SIMULTANEOUS_REVS))); | |
} | |
return utils.Promise.all(missingBatches.map(function (missing) { | |
var opts = { | |
revs: true, | |
open_revs: missing, | |
attachments: true | |
}; | |
return src.get(id, opts).then(function (docs) { | |
docs.forEach(function (doc) { | |
if (state.cancelled) { | |
return completeReplication(); | |
} | |
if (doc.ok) { | |
result.docs_read++; | |
currentBatch.pendingRevs++; | |
currentBatch.docs.push(doc.ok); | |
} | |
}); | |
delete diffs[id]; | |
}); | |
})); | |
} | |
function getAllDocs() { | |
var diffKeys = Object.keys(currentBatch.diffs); | |
return utils.Promise.all(diffKeys.map(processDiffDoc)); | |
} | |
function getRevisionOneDocs() { | |
// filter out the generation 1 docs and get them | |
// leaving the non-generation one docs to be got otherwise | |
var ids = Object.keys(currentBatch.diffs).filter(function (id) { | |
var missing = currentBatch.diffs[id].missing; | |
return missing.length === 1 && missing[0].slice(0, 2) === '1-'; | |
}); | |
if (!ids.length) { // nothing to fetch | |
return utils.Promise.resolve(); | |
} | |
return src.allDocs({ | |
keys: ids, | |
include_docs: true | |
}).then(function (res) { | |
if (state.cancelled) { | |
completeReplication(); | |
throw (new Error('cancelled')); | |
} | |
res.rows.forEach(function (row) { | |
if (row.doc && !row.deleted && | |
row.value.rev.slice(0, 2) === '1-' && ( | |
!row.doc._attachments || | |
Object.keys(row.doc._attachments).length === 0 | |
) | |
) { | |
result.docs_read++; | |
currentBatch.pendingRevs++; | |
currentBatch.docs.push(row.doc); | |
delete currentBatch.diffs[row.id]; | |
} | |
}); | |
}); | |
} | |
function getDocs() { | |
return getRevisionOneDocs().then(getAllDocs); | |
} | |
function finishBatch() { | |
writingCheckpoint = true; | |
return checkpointer.writeCheckpoint(currentBatch.seq).then(function () { | |
writingCheckpoint = false; | |
if (state.cancelled) { | |
completeReplication(); | |
throw new Error('cancelled'); | |
} | |
result.last_seq = last_seq = currentBatch.seq; | |
var outResult = utils.clone(result); | |
outResult.docs = changedDocs; | |
returnValue.emit('change', outResult); | |
currentBatch = undefined; | |
getChanges(); | |
}).catch(function (err) { | |
writingCheckpoint = false; | |
abortReplication('writeCheckpoint completed with error', err); | |
throw err; | |
}); | |
} | |
function getDiffs() { | |
var diff = {}; | |
currentBatch.changes.forEach(function (change) { | |
// Couchbase Sync Gateway emits these, but we can ignore them | |
if (change.id === "_user/") { | |
return; | |
} | |
diff[change.id] = change.changes.map(function (x) { | |
return x.rev; | |
}); | |
}); | |
return target.revsDiff(diff).then(function (diffs) { | |
if (state.cancelled) { | |
completeReplication(); | |
throw new Error('cancelled'); | |
} | |
// currentBatch.diffs elements are deleted as the documents are written | |
currentBatch.diffs = diffs; | |
currentBatch.pendingRevs = 0; | |
}); | |
} | |
function startNextBatch() { | |
if (state.cancelled || currentBatch) { | |
return; | |
} | |
if (batches.length === 0) { | |
processPendingBatch(true); | |
return; | |
} | |
currentBatch = batches.shift(); | |
getDiffs() | |
.then(getDocs) | |
.then(writeDocs) | |
.then(finishBatch) | |
.then(startNextBatch) | |
.catch(function (err) { | |
abortReplication('batch processing terminated with error', err); | |
}); | |
} | |
function processPendingBatch(immediate) { | |
if (pendingBatch.changes.length === 0) { | |
if (batches.length === 0 && !currentBatch) { | |
if ((continuous && changesOpts.live) || changesCompleted) { | |
returnValue.state = 'pending'; | |
returnValue.emit('paused'); | |
returnValue.emit('uptodate', result); | |
} | |
if (changesCompleted) { | |
completeReplication(); | |
} | |
} | |
return; | |
} | |
if ( | |
immediate || | |
changesCompleted || | |
pendingBatch.changes.length >= batch_size | |
) { | |
batches.push(pendingBatch); | |
pendingBatch = { | |
seq: 0, | |
changes: [], | |
docs: [] | |
}; | |
if (returnValue.state === 'pending' || returnValue.state === 'stopped') { | |
returnValue.state = 'active'; | |
returnValue.emit('active'); | |
} | |
startNextBatch(); | |
} | |
} | |
function abortReplication(reason, err) { | |
if (replicationCompleted) { | |
return; | |
} | |
if (!err.message) { | |
err.message = reason; | |
} | |
result.ok = false; | |
result.status = 'aborting'; | |
result.errors.push(err); | |
allErrors = allErrors.concat(err); | |
batches = []; | |
pendingBatch = { | |
seq: 0, | |
changes: [], | |
docs: [] | |
}; | |
completeReplication(); | |
} | |
function completeReplication() { | |
if (replicationCompleted) { | |
return; | |
} | |
if (state.cancelled) { | |
result.status = 'cancelled'; | |
if (writingCheckpoint) { | |
return; | |
} | |
} | |
result.status = result.status || 'complete'; | |
result.end_time = new Date(); | |
result.last_seq = last_seq; | |
replicationCompleted = state.cancelled = true; | |
var non403s = allErrors.filter(function (error) { | |
return error.name !== 'unauthorized' && error.name !== 'forbidden'; | |
}); | |
if (non403s.length > 0) { | |
var error = allErrors.pop(); | |
if (allErrors.length > 0) { | |
error.other_errors = allErrors; | |
} | |
error.result = result; | |
backOff(repId, src, target, opts, returnValue, result, error); | |
} else { | |
result.errors = allErrors; | |
returnValue.emit('complete', result); | |
returnValue.removeAllListeners(); | |
} | |
} | |
function onChange(change) { | |
if (state.cancelled) { | |
return completeReplication(); | |
} | |
var filter = utils.filterChange(opts)(change); | |
if (!filter) { | |
return; | |
} | |
if ( | |
pendingBatch.changes.length === 0 && | |
batches.length === 0 && | |
!currentBatch | |
) { | |
returnValue.emit('outofdate', result); | |
} | |
pendingBatch.seq = change.seq; | |
pendingBatch.changes.push(change); | |
processPendingBatch(batches.length === 0); | |
} | |
function onChangesComplete(changes) { | |
changesPending = false; | |
if (state.cancelled) { | |
return completeReplication(); | |
} | |
// if no results were returned then we're done, | |
// else fetch more | |
if (changes.results.length > 0) { | |
changesOpts.since = changes.last_seq; | |
getChanges(); | |
} else { | |
if (continuous) { | |
changesOpts.live = true; | |
getChanges(); | |
} else { | |
changesCompleted = true; | |
} | |
} | |
processPendingBatch(true); | |
} | |
function onChangesError(err) { | |
changesPending = false; | |
if (state.cancelled) { | |
return completeReplication(); | |
} | |
abortReplication('changes rejected', err); | |
} | |
function getChanges() { | |
if (!( | |
!changesPending && | |
!changesCompleted && | |
batches.length < batches_limit | |
)) { | |
return; | |
} | |
changesPending = true; | |
function abortChanges() { | |
changes.cancel(); | |
} | |
function removeListener() { | |
returnValue.removeListener('cancel', abortChanges); | |
} | |
returnValue.once('cancel', abortChanges); | |
var changes = src.changes(changesOpts) | |
.on('change', onChange); | |
changes.then(removeListener, removeListener); | |
changes.then(onChangesComplete) | |
.catch(onChangesError); | |
} | |
function startChanges() { | |
checkpointer.getCheckpoint().then(function (checkpoint) { | |
last_seq = checkpoint; | |
changesOpts = { | |
since: last_seq, | |
limit: batch_size, | |
batch_size: batch_size, | |
style: 'all_docs', | |
doc_ids: doc_ids, | |
returnDocs: true // required so we know when we're done | |
}; | |
if (opts.filter) { | |
if (typeof opts.filter !== 'string') { | |
// required for the client-side filter in onChange | |
changesOpts.include_docs = true; | |
} else { // ddoc filter | |
changesOpts.filter = opts.filter; | |
} | |
} | |
if (opts.query_params) { | |
changesOpts.query_params = opts.query_params; | |
} | |
if (opts.view) { | |
changesOpts.view = opts.view; | |
} | |
getChanges(); | |
}).catch(function (err) { | |
abortReplication('getCheckpoint rejected with ', err); | |
}); | |
} | |
if (returnValue.cancelled) { // cancelled immediately | |
completeReplication(); | |
return; | |
} | |
returnValue.once('cancel', completeReplication); | |
if (typeof opts.onChange === 'function') { | |
returnValue.on('change', opts.onChange); | |
} | |
if (typeof opts.complete === 'function') { | |
returnValue.once('error', opts.complete); | |
returnValue.once('complete', function (result) { | |
opts.complete(null, result); | |
}); | |
} | |
if (typeof opts.since === 'undefined') { | |
startChanges(); | |
} else { | |
writingCheckpoint = true; | |
checkpointer.writeCheckpoint(opts.since).then(function () { | |
writingCheckpoint = false; | |
if (state.cancelled) { | |
completeReplication(); | |
return; | |
} | |
last_seq = opts.since; | |
startChanges(); | |
}).catch(function (err) { | |
writingCheckpoint = false; | |
abortReplication('writeCheckpoint completed with error', err); | |
throw err; | |
}); | |
} | |
} | |
exports.toPouch = toPouch; | |
function toPouch(db, opts) { | |
var PouchConstructor = opts.PouchConstructor; | |
if (typeof db === 'string') { | |
return new PouchConstructor(db, opts); | |
} else if (db.then) { | |
return db; | |
} else { | |
return utils.Promise.resolve(db); | |
} | |
} | |
exports.replicate = replicateWrapper; | |
function replicateWrapper(src, target, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
if (typeof opts === 'undefined') { | |
opts = {}; | |
} | |
if (!opts.complete) { | |
opts.complete = callback || function () {}; | |
} | |
opts = utils.clone(opts); | |
opts.continuous = opts.continuous || opts.live; | |
opts.retry = ('retry' in opts) ? opts.retry : RETRY_DEFAULT; | |
/*jshint validthis:true */ | |
opts.PouchConstructor = opts.PouchConstructor || this; | |
var replicateRet = new Replication(opts); | |
toPouch(src, opts).then(function (src) { | |
return toPouch(target, opts).then(function (target) { | |
return genReplicationId(src, target, opts).then(function (repId) { | |
replicate(repId, src, target, opts, replicateRet); | |
}); | |
}); | |
}).catch(function (err) { | |
replicateRet.emit('error', err); | |
opts.complete(err); | |
}); | |
return replicateRet; | |
} | |
},{"./checkpointer":96,"./utils":119,"events":80}],116:[function(require,module,exports){ | |
"use strict"; | |
var PouchDB = require("./constructor"); | |
var utils = require('./utils'); | |
var EventEmitter = require('events').EventEmitter; | |
PouchDB.adapters = {}; | |
PouchDB.preferredAdapters = []; | |
PouchDB.prefix = '_pouch_'; | |
var eventEmitter = new EventEmitter(); | |
var eventEmitterMethods = [ | |
'on', | |
'addListener', | |
'emit', | |
'listeners', | |
'once', | |
'removeAllListeners', | |
'removeListener', | |
'setMaxListeners' | |
]; | |
eventEmitterMethods.forEach(function (method) { | |
PouchDB[method] = eventEmitter[method].bind(eventEmitter); | |
}); | |
PouchDB.setMaxListeners(0); | |
PouchDB.parseAdapter = function (name, opts) { | |
var match = name.match(/([a-z\-]*):\/\/(.*)/); | |
var adapter, adapterName; | |
if (match) { | |
// the http adapter expects the fully qualified name | |
name = /http(s?)/.test(match[1]) ? match[1] + '://' + match[2] : match[2]; | |
adapter = match[1]; | |
if (!PouchDB.adapters[adapter].valid()) { | |
throw 'Invalid adapter'; | |
} | |
return {name: name, adapter: match[1]}; | |
} | |
// check for browsers that have been upgraded from websql-only to websql+idb | |
var skipIdb = 'idb' in PouchDB.adapters && 'websql' in PouchDB.adapters && | |
utils.hasLocalStorage() && | |
localStorage['_pouch__websqldb_' + PouchDB.prefix + name]; | |
if (opts.adapter) { | |
adapterName = opts.adapter; | |
} else if (typeof opts !== 'undefined' && opts.db) { | |
adapterName = 'leveldb'; | |
} else { // automatically determine adapter | |
for (var i = 0; i < PouchDB.preferredAdapters.length; ++i) { | |
adapterName = PouchDB.preferredAdapters[i]; | |
if (adapterName in PouchDB.adapters) { | |
if (skipIdb && adapterName === 'idb') { | |
// log it, because this can be confusing during development | |
console.log('PouchDB is downgrading "' + name + '" to WebSQL to' + | |
' avoid data loss, because it was already opened with WebSQL.'); | |
continue; // keep using websql to avoid user data loss | |
} | |
break; | |
} | |
} | |
} | |
adapter = PouchDB.adapters[adapterName]; | |
// if adapter is invalid, then an error will be thrown later | |
var usePrefix = (adapter && 'use_prefix' in adapter) ? | |
adapter.use_prefix : true; | |
return { | |
name: usePrefix ? (PouchDB.prefix + name) : name, | |
adapter: adapterName | |
}; | |
}; | |
PouchDB.destroy = utils.toPromise(function (name, opts, callback) { | |
console.log('PouchDB.destroy() is deprecated and will be removed. ' + | |
'Please use db.destroy() instead.'); | |
if (typeof opts === 'function' || typeof opts === 'undefined') { | |
callback = opts; | |
opts = {}; | |
} | |
if (name && typeof name === 'object') { | |
opts = name; | |
name = undefined; | |
} | |
new PouchDB(name, opts, function (err, db) { | |
if (err) { | |
return callback(err); | |
} | |
db.destroy(callback); | |
}); | |
}); | |
PouchDB.adapter = function (id, obj, addToPreferredAdapters) { | |
if (obj.valid()) { | |
PouchDB.adapters[id] = obj; | |
if (addToPreferredAdapters) { | |
PouchDB.preferredAdapters.push(id); | |
} | |
} | |
}; | |
PouchDB.plugin = function (obj) { | |
Object.keys(obj).forEach(function (id) { | |
PouchDB.prototype[id] = obj[id]; | |
}); | |
}; | |
PouchDB.defaults = function (defaultOpts) { | |
function PouchAlt(name, opts, callback) { | |
if (typeof opts === 'function' || typeof opts === 'undefined') { | |
callback = opts; | |
opts = {}; | |
} | |
if (name && typeof name === 'object') { | |
opts = name; | |
name = undefined; | |
} | |
opts = utils.extend(true, {}, defaultOpts, opts); | |
PouchDB.call(this, name, opts, callback); | |
} | |
utils.inherits(PouchAlt, PouchDB); | |
PouchAlt.destroy = utils.toPromise(function (name, opts, callback) { | |
if (typeof opts === 'function' || typeof opts === 'undefined') { | |
callback = opts; | |
opts = {}; | |
} | |
if (name && typeof name === 'object') { | |
opts = name; | |
name = undefined; | |
} | |
opts = utils.extend(true, {}, defaultOpts, opts); | |
return PouchDB.destroy(name, opts, callback); | |
}); | |
eventEmitterMethods.forEach(function (method) { | |
PouchAlt[method] = eventEmitter[method].bind(eventEmitter); | |
}); | |
PouchAlt.setMaxListeners(0); | |
PouchAlt.preferredAdapters = PouchDB.preferredAdapters.slice(); | |
Object.keys(PouchDB).forEach(function (key) { | |
if (!(key in PouchAlt)) { | |
PouchAlt[key] = PouchDB[key]; | |
} | |
}); | |
return PouchAlt; | |
}; | |
module.exports = PouchDB; | |
},{"./constructor":97,"./utils":119,"events":80}],117:[function(require,module,exports){ | |
'use strict'; | |
var utils = require('./utils'); | |
var replication = require('./replicate'); | |
var replicate = replication.replicate; | |
var EE = require('events').EventEmitter; | |
utils.inherits(Sync, EE); | |
module.exports = sync; | |
function sync(src, target, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
if (typeof opts === 'undefined') { | |
opts = {}; | |
} | |
opts = utils.clone(opts); | |
/*jshint validthis:true */ | |
opts.PouchConstructor = opts.PouchConstructor || this; | |
src = replication.toPouch(src, opts); | |
target = replication.toPouch(target, opts); | |
return new Sync(src, target, opts, callback); | |
} | |
function Sync(src, target, opts, callback) { | |
var self = this; | |
this.canceled = false; | |
var onChange, complete; | |
if ('onChange' in opts) { | |
onChange = opts.onChange; | |
delete opts.onChange; | |
} | |
if (typeof callback === 'function' && !opts.complete) { | |
complete = callback; | |
} else if ('complete' in opts) { | |
complete = opts.complete; | |
delete opts.complete; | |
} | |
this.push = replicate(src, target, opts); | |
this.pull = replicate(target, src, opts); | |
var emittedCancel = false; | |
function onCancel(data) { | |
if (!emittedCancel) { | |
emittedCancel = true; | |
self.emit('cancel', data); | |
} | |
} | |
function pullChange(change) { | |
self.emit('change', { | |
direction: 'pull', | |
change: change | |
}); | |
} | |
function pushChange(change) { | |
self.emit('change', { | |
direction: 'push', | |
change: change | |
}); | |
} | |
function pushDenied(doc) { | |
self.emit('denied', { | |
direction: 'push', | |
doc: doc | |
}); | |
} | |
function pullDenied(doc) { | |
self.emit('denied', { | |
direction: 'pull', | |
doc: doc | |
}); | |
} | |
var listeners = {}; | |
var removed = {}; | |
function removeAll(type) { // type is 'push' or 'pull' | |
return function (event, func) { | |
var isChange = event === 'change' && | |
(func === pullChange || func === pushChange); | |
var isCancel = event === 'cancel' && func === onCancel; | |
var isOtherEvent = event in listeners && func === listeners[event]; | |
if (isChange || isCancel || isOtherEvent) { | |
if (!(event in removed)) { | |
removed[event] = {}; | |
} | |
removed[event][type] = true; | |
if (Object.keys(removed[event]).length === 2) { | |
// both push and pull have asked to be removed | |
self.removeAllListeners(event); | |
} | |
} | |
}; | |
} | |
if (opts.live) { | |
this.push.on('complete', self.pull.cancel.bind(self.pull)); | |
this.pull.on('complete', self.push.cancel.bind(self.push)); | |
} | |
this.on('newListener', function (event) { | |
if (event === 'change') { | |
self.pull.on('change', pullChange); | |
self.push.on('change', pushChange); | |
} else if (event === 'denied') { | |
self.pull.on('denied', pullDenied); | |
self.push.on('denied', pushDenied); | |
} else if (event === 'cancel') { | |
self.pull.on('cancel', onCancel); | |
self.push.on('cancel', onCancel); | |
} else if (event !== 'error' && | |
event !== 'removeListener' && | |
event !== 'complete' && !(event in listeners)) { | |
listeners[event] = function (e) { | |
self.emit(event, e); | |
}; | |
self.pull.on(event, listeners[event]); | |
self.push.on(event, listeners[event]); | |
} | |
}); | |
this.on('removeListener', function (event) { | |
if (event === 'change') { | |
self.pull.removeListener('change', pullChange); | |
self.push.removeListener('change', pushChange); | |
} else if (event === 'cancel') { | |
self.pull.removeListener('cancel', onCancel); | |
self.push.removeListener('cancel', onCancel); | |
} else if (event in listeners) { | |
if (typeof listeners[event] === 'function') { | |
self.pull.removeListener(event, listeners[event]); | |
self.push.removeListener(event, listeners[event]); | |
delete listeners[event]; | |
} | |
} | |
}); | |
this.pull.on('removeListener', removeAll('pull')); | |
this.push.on('removeListener', removeAll('push')); | |
var promise = utils.Promise.all([ | |
this.push, | |
this.pull | |
]).then(function (resp) { | |
var out = { | |
push: resp[0], | |
pull: resp[1] | |
}; | |
self.emit('complete', out); | |
if (complete) { | |
complete(null, out); | |
} | |
self.removeAllListeners(); | |
return out; | |
}, function (err) { | |
self.cancel(); | |
self.emit('error', err); | |
if (complete) { | |
complete(err); | |
} | |
self.removeAllListeners(); | |
throw err; | |
}); | |
this.then = function (success, err) { | |
return promise.then(success, err); | |
}; | |
this.catch = function (err) { | |
return promise.catch(err); | |
}; | |
} | |
Sync.prototype.cancel = function () { | |
if (!this.canceled) { | |
this.canceled = true; | |
this.push.cancel(); | |
this.pull.cancel(); | |
} | |
}; | |
},{"./replicate":115,"./utils":119,"events":80}],118:[function(require,module,exports){ | |
'use strict'; | |
module.exports = TaskQueue; | |
function TaskQueue() { | |
this.isReady = false; | |
this.failed = false; | |
this.queue = []; | |
} | |
TaskQueue.prototype.execute = function () { | |
var d, func; | |
if (this.failed) { | |
while ((d = this.queue.shift())) { | |
if (typeof d === 'function') { | |
d(this.failed); | |
continue; | |
} | |
func = d.parameters[d.parameters.length - 1]; | |
if (typeof func === 'function') { | |
func(this.failed); | |
} else if (d.name === 'changes' && typeof func.complete === 'function') { | |
func.complete(this.failed); | |
} | |
} | |
} else if (this.isReady) { | |
while ((d = this.queue.shift())) { | |
if (typeof d === 'function') { | |
d(); | |
} else { | |
d.task = this.db[d.name].apply(this.db, d.parameters); | |
} | |
} | |
} | |
}; | |
TaskQueue.prototype.fail = function (err) { | |
this.failed = err; | |
this.execute(); | |
}; | |
TaskQueue.prototype.ready = function (db) { | |
if (this.failed) { | |
return false; | |
} else if (arguments.length === 0) { | |
return this.isReady; | |
} | |
this.isReady = db ? true: false; | |
this.db = db; | |
this.execute(); | |
}; | |
TaskQueue.prototype.addTask = function (name, parameters) { | |
if (typeof name === 'function') { | |
this.queue.push(name); | |
if (this.failed) { | |
this.execute(); | |
} | |
} else { | |
var task = { name: name, parameters: parameters }; | |
this.queue.push(task); | |
if (this.failed) { | |
this.execute(); | |
} | |
return task; | |
} | |
}; | |
},{}],119:[function(require,module,exports){ | |
(function (process){ | |
/*jshint strict: false */ | |
/*global chrome */ | |
var merge = require('./merge'); | |
exports.extend = require('pouchdb-extend'); | |
exports.ajax = require('./deps/ajax'); | |
exports.createBlob = require('./deps/blob'); | |
exports.uuid = require('./deps/uuid'); | |
exports.getArguments = require('argsarray'); | |
var buffer = require('./deps/buffer'); | |
var errors = require('./deps/errors'); | |
var EventEmitter = require('events').EventEmitter; | |
var collections = require('pouchdb-collections'); | |
exports.Map = collections.Map; | |
exports.Set = collections.Set; | |
var parseDoc = require('./deps/parse-doc'); | |
var Promise = require('./deps/promise'); | |
exports.Promise = Promise; | |
exports.lastIndexOf = function (str, char) { | |
for (var i = str.length - 1; i >= 0; i--) { | |
if (str.charAt(i) === char) { | |
return i; | |
} | |
} | |
return -1; | |
}; | |
exports.clone = function (obj) { | |
return exports.extend(true, {}, obj); | |
}; | |
// like underscore/lodash _.pick() | |
exports.pick = function (obj, arr) { | |
var res = {}; | |
for (var i = 0, len = arr.length; i < len; i++) { | |
var prop = arr[i]; | |
res[prop] = obj[prop]; | |
} | |
return res; | |
}; | |
exports.inherits = require('inherits'); | |
function isChromeApp() { | |
return (typeof chrome !== "undefined" && | |
typeof chrome.storage !== "undefined" && | |
typeof chrome.storage.local !== "undefined"); | |
} | |
// Pretty dumb name for a function, just wraps callback calls so we dont | |
// to if (callback) callback() everywhere | |
exports.call = exports.getArguments(function (args) { | |
if (!args.length) { | |
return; | |
} | |
var fun = args.shift(); | |
if (typeof fun === 'function') { | |
fun.apply(this, args); | |
} | |
}); | |
exports.isLocalId = function (id) { | |
return (/^_local/).test(id); | |
}; | |
// check if a specific revision of a doc has been deleted | |
// - metadata: the metadata object from the doc store | |
// - rev: (optional) the revision to check. defaults to winning revision | |
exports.isDeleted = function (metadata, rev) { | |
if (!rev) { | |
rev = merge.winningRev(metadata); | |
} | |
var dashIndex = rev.indexOf('-'); | |
if (dashIndex !== -1) { | |
rev = rev.substring(dashIndex + 1); | |
} | |
var deleted = false; | |
merge.traverseRevTree(metadata.rev_tree, | |
function (isLeaf, pos, id, acc, opts) { | |
if (id === rev) { | |
deleted = !!opts.deleted; | |
} | |
}); | |
return deleted; | |
}; | |
exports.revExists = function (metadata, rev) { | |
var found = false; | |
merge.traverseRevTree(metadata.rev_tree, function (leaf, pos, id) { | |
if ((pos + '-' + id) === rev) { | |
found = true; | |
} | |
}); | |
return found; | |
}; | |
exports.filterChange = function filterChange(opts) { | |
var req = {}; | |
var hasFilter = opts.filter && typeof opts.filter === 'function'; | |
req.query = opts.query_params; | |
return function filter(change) { | |
if (!change.doc) { | |
// CSG sends events on the changes feed that don't have documents, | |
// this hack makes a whole lot of existing code robust. | |
change.doc = {}; | |
} | |
if (opts.filter && hasFilter && !opts.filter.call(this, change.doc, req)) { | |
return false; | |
} | |
if (!opts.include_docs) { | |
delete change.doc; | |
} else if (!opts.attachments) { | |
for (var att in change.doc._attachments) { | |
if (change.doc._attachments.hasOwnProperty(att)) { | |
change.doc._attachments[att].stub = true; | |
} | |
} | |
} | |
return true; | |
}; | |
}; | |
exports.parseDoc = parseDoc.parseDoc; | |
exports.invalidIdError = parseDoc.invalidIdError; | |
exports.isCordova = function () { | |
return (typeof cordova !== "undefined" || | |
typeof PhoneGap !== "undefined" || | |
typeof phonegap !== "undefined"); | |
}; | |
exports.hasLocalStorage = function () { | |
if (isChromeApp()) { | |
return false; | |
} | |
try { | |
return localStorage; | |
} catch (e) { | |
return false; | |
} | |
}; | |
exports.Changes = Changes; | |
exports.inherits(Changes, EventEmitter); | |
function Changes() { | |
if (!(this instanceof Changes)) { | |
return new Changes(); | |
} | |
var self = this; | |
EventEmitter.call(this); | |
this.isChrome = isChromeApp(); | |
this.listeners = {}; | |
this.hasLocal = false; | |
if (!this.isChrome) { | |
this.hasLocal = exports.hasLocalStorage(); | |
} | |
if (this.isChrome) { | |
chrome.storage.onChanged.addListener(function (e) { | |
// make sure it's event addressed to us | |
if (e.db_name != null) { | |
//object only has oldValue, newValue members | |
self.emit(e.dbName.newValue); | |
} | |
}); | |
} else if (this.hasLocal) { | |
if (typeof addEventListener !== 'undefined') { | |
addEventListener("storage", function (e) { | |
self.emit(e.key); | |
}); | |
} else { // old IE | |
window.attachEvent("storage", function (e) { | |
self.emit(e.key); | |
}); | |
} | |
} | |
} | |
Changes.prototype.addListener = function (dbName, id, db, opts) { | |
if (this.listeners[id]) { | |
return; | |
} | |
var self = this; | |
var inprogress = false; | |
function eventFunction() { | |
if (!self.listeners[id]) { | |
return; | |
} | |
if (inprogress) { | |
inprogress = 'waiting'; | |
return; | |
} | |
inprogress = true; | |
db.changes({ | |
style: opts.style, | |
include_docs: opts.include_docs, | |
attachments: opts.attachments, | |
conflicts: opts.conflicts, | |
continuous: false, | |
descending: false, | |
filter: opts.filter, | |
doc_ids: opts.doc_ids, | |
view: opts.view, | |
since: opts.since, | |
query_params: opts.query_params | |
}).on('change', function (c) { | |
if (c.seq > opts.since && !opts.cancelled) { | |
opts.since = c.seq; | |
exports.call(opts.onChange, c); | |
} | |
}).on('complete', function () { | |
if (inprogress === 'waiting') { | |
process.nextTick(function () { | |
self.notify(dbName); | |
}); | |
} | |
inprogress = false; | |
}).on('error', function () { | |
inprogress = false; | |
}); | |
} | |
this.listeners[id] = eventFunction; | |
this.on(dbName, eventFunction); | |
}; | |
Changes.prototype.removeListener = function (dbName, id) { | |
if (!(id in this.listeners)) { | |
return; | |
} | |
EventEmitter.prototype.removeListener.call(this, dbName, | |
this.listeners[id]); | |
}; | |
Changes.prototype.notifyLocalWindows = function (dbName) { | |
//do a useless change on a storage thing | |
//in order to get other windows's listeners to activate | |
if (this.isChrome) { | |
chrome.storage.local.set({dbName: dbName}); | |
} else if (this.hasLocal) { | |
localStorage[dbName] = (localStorage[dbName] === "a") ? "b" : "a"; | |
} | |
}; | |
Changes.prototype.notify = function (dbName) { | |
this.emit(dbName); | |
this.notifyLocalWindows(dbName); | |
}; | |
if (typeof atob === 'function') { | |
exports.atob = function (str) { | |
return atob(str); | |
}; | |
} else { | |
exports.atob = function (str) { | |
var base64 = new buffer(str, 'base64'); | |
// Node.js will just skip the characters it can't encode instead of | |
// throwing and exception | |
if (base64.toString('base64') !== str) { | |
throw ("Cannot base64 encode full string"); | |
} | |
return base64.toString('binary'); | |
}; | |
} | |
if (typeof btoa === 'function') { | |
exports.btoa = function (str) { | |
return btoa(str); | |
}; | |
} else { | |
exports.btoa = function (str) { | |
return new buffer(str, 'binary').toString('base64'); | |
}; | |
} | |
// From http://stackoverflow.com/questions/14967647/ (continues on next line) | |
// encode-decode-image-with-base64-breaks-image (2013-04-21) | |
exports.fixBinary = function (bin) { | |
if (!process.browser) { | |
// don't need to do this in Node | |
return bin; | |
} | |
var length = bin.length; | |
var buf = new ArrayBuffer(length); | |
var arr = new Uint8Array(buf); | |
for (var i = 0; i < length; i++) { | |
arr[i] = bin.charCodeAt(i); | |
} | |
return buf; | |
}; | |
// shim for browsers that don't support it | |
exports.readAsBinaryString = function (blob, callback) { | |
var reader = new FileReader(); | |
var hasBinaryString = typeof reader.readAsBinaryString === 'function'; | |
reader.onloadend = function (e) { | |
var result = e.target.result || ''; | |
if (hasBinaryString) { | |
return callback(result); | |
} | |
callback(exports.arrayBufferToBinaryString(result)); | |
}; | |
if (hasBinaryString) { | |
reader.readAsBinaryString(blob); | |
} else { | |
reader.readAsArrayBuffer(blob); | |
} | |
}; | |
// simplified API. universal browser support is assumed | |
exports.readAsArrayBuffer = function (blob, callback) { | |
var reader = new FileReader(); | |
reader.onloadend = function (e) { | |
var result = e.target.result || new ArrayBuffer(0); | |
callback(result); | |
}; | |
reader.readAsArrayBuffer(blob); | |
}; | |
exports.once = function (fun) { | |
var called = false; | |
return exports.getArguments(function (args) { | |
if (called) { | |
throw new Error('once called more than once'); | |
} else { | |
called = true; | |
fun.apply(this, args); | |
} | |
}); | |
}; | |
exports.toPromise = function (func) { | |
//create the function we will be returning | |
return exports.getArguments(function (args) { | |
var self = this; | |
var tempCB = | |
(typeof args[args.length - 1] === 'function') ? args.pop() : false; | |
// if the last argument is a function, assume its a callback | |
var usedCB; | |
if (tempCB) { | |
// if it was a callback, create a new callback which calls it, | |
// but do so async so we don't trap any errors | |
usedCB = function (err, resp) { | |
process.nextTick(function () { | |
tempCB(err, resp); | |
}); | |
}; | |
} | |
var promise = new Promise(function (fulfill, reject) { | |
var resp; | |
try { | |
var callback = exports.once(function (err, mesg) { | |
if (err) { | |
reject(err); | |
} else { | |
fulfill(mesg); | |
} | |
}); | |
// create a callback for this invocation | |
// apply the function in the orig context | |
args.push(callback); | |
resp = func.apply(self, args); | |
if (resp && typeof resp.then === 'function') { | |
fulfill(resp); | |
} | |
} catch (e) { | |
reject(e); | |
} | |
}); | |
// if there is a callback, call it back | |
if (usedCB) { | |
promise.then(function (result) { | |
usedCB(null, result); | |
}, usedCB); | |
} | |
promise.cancel = function () { | |
return this; | |
}; | |
return promise; | |
}); | |
}; | |
exports.adapterFun = function (name, callback) { | |
var log = require('debug')('pouchdb:api'); | |
function logApiCall(self, name, args) { | |
if (!log.enabled) { | |
return; | |
} | |
var logArgs = [self._db_name, name]; | |
for (var i = 0; i < args.length - 1; i++) { | |
logArgs.push(args[i]); | |
} | |
log.apply(null, logArgs); | |
// override the callback itself to log the response | |
var origCallback = args[args.length - 1]; | |
args[args.length - 1] = function (err, res) { | |
var responseArgs = [self._db_name, name]; | |
responseArgs = responseArgs.concat( | |
err ? ['error', err] : ['success', res] | |
); | |
log.apply(null, responseArgs); | |
origCallback(err, res); | |
}; | |
} | |
return exports.toPromise(exports.getArguments(function (args) { | |
if (this._closed) { | |
return Promise.reject(new Error('database is closed')); | |
} | |
var self = this; | |
logApiCall(self, name, args); | |
if (!this.taskqueue.isReady) { | |
return new Promise(function (fulfill, reject) { | |
self.taskqueue.addTask(function (failed) { | |
if (failed) { | |
reject(failed); | |
} else { | |
fulfill(self[name].apply(self, args)); | |
} | |
}); | |
}); | |
} | |
return callback.apply(this, args); | |
})); | |
}; | |
//Can't find original post, but this is close | |
//http://stackoverflow.com/questions/6965107/ (continues on next line) | |
//converting-between-strings-and-arraybuffers | |
exports.arrayBufferToBinaryString = function (buffer) { | |
var binary = ""; | |
var bytes = new Uint8Array(buffer); | |
var length = bytes.byteLength; | |
for (var i = 0; i < length; i++) { | |
binary += String.fromCharCode(bytes[i]); | |
} | |
return binary; | |
}; | |
exports.cancellableFun = function (fun, self, opts) { | |
opts = opts ? exports.clone(true, {}, opts) : {}; | |
var emitter = new EventEmitter(); | |
var oldComplete = opts.complete || function () { }; | |
var complete = opts.complete = exports.once(function (err, resp) { | |
if (err) { | |
oldComplete(err); | |
} else { | |
emitter.emit('end', resp); | |
oldComplete(null, resp); | |
} | |
emitter.removeAllListeners(); | |
}); | |
var oldOnChange = opts.onChange || function () {}; | |
var lastChange = 0; | |
self.on('destroyed', function () { | |
emitter.removeAllListeners(); | |
}); | |
opts.onChange = function (change) { | |
oldOnChange(change); | |
if (change.seq <= lastChange) { | |
return; | |
} | |
lastChange = change.seq; | |
emitter.emit('change', change); | |
if (change.deleted) { | |
emitter.emit('delete', change); | |
} else if (change.changes.length === 1 && | |
change.changes[0].rev.slice(0, 1) === '1-') { | |
emitter.emit('create', change); | |
} else { | |
emitter.emit('update', change); | |
} | |
}; | |
var promise = new Promise(function (fulfill, reject) { | |
opts.complete = function (err, res) { | |
if (err) { | |
reject(err); | |
} else { | |
fulfill(res); | |
} | |
}; | |
}); | |
promise.then(function (result) { | |
complete(null, result); | |
}, complete); | |
// this needs to be overwridden by caller, dont fire complete until | |
// the task is ready | |
promise.cancel = function () { | |
promise.isCancelled = true; | |
if (self.taskqueue.isReady) { | |
opts.complete(null, {status: 'cancelled'}); | |
} | |
}; | |
if (!self.taskqueue.isReady) { | |
self.taskqueue.addTask(function () { | |
if (promise.isCancelled) { | |
opts.complete(null, {status: 'cancelled'}); | |
} else { | |
fun(self, opts, promise); | |
} | |
}); | |
} else { | |
fun(self, opts, promise); | |
} | |
promise.on = emitter.on.bind(emitter); | |
promise.once = emitter.once.bind(emitter); | |
promise.addListener = emitter.addListener.bind(emitter); | |
promise.removeListener = emitter.removeListener.bind(emitter); | |
promise.removeAllListeners = emitter.removeAllListeners.bind(emitter); | |
promise.setMaxListeners = emitter.setMaxListeners.bind(emitter); | |
promise.listeners = emitter.listeners.bind(emitter); | |
promise.emit = emitter.emit.bind(emitter); | |
return promise; | |
}; | |
exports.MD5 = exports.toPromise(require('./deps/md5')); | |
exports.explain404 = require('./deps/explain404'); | |
exports.info = function (str) { | |
if (typeof console !== 'undefined' && 'info' in console) { | |
console.info(str); | |
} | |
}; | |
exports.parseUri = require('./deps/parse-uri'); | |
exports.compare = function (left, right) { | |
return left < right ? -1 : left > right ? 1 : 0; | |
}; | |
exports.updateDoc = function updateDoc(prev, docInfo, results, | |
i, cb, writeDoc, newEdits) { | |
if (exports.revExists(prev, docInfo.metadata.rev)) { | |
results[i] = docInfo; | |
return cb(); | |
} | |
// TODO: some of these can be pre-calculated, but it's safer to just | |
// call merge.winningRev() and exports.isDeleted() all over again | |
var previousWinningRev = merge.winningRev(prev); | |
var previouslyDeleted = exports.isDeleted(prev, previousWinningRev); | |
var deleted = exports.isDeleted(docInfo.metadata); | |
var isRoot = /^1-/.test(docInfo.metadata.rev); | |
if (previouslyDeleted && !deleted && newEdits && isRoot) { | |
var newDoc = docInfo.data; | |
newDoc._rev = previousWinningRev; | |
newDoc._id = docInfo.metadata.id; | |
docInfo = exports.parseDoc(newDoc, newEdits); | |
} | |
var merged = merge.merge(prev.rev_tree, docInfo.metadata.rev_tree[0], 1000); | |
var inConflict = newEdits && (((previouslyDeleted && deleted) || | |
(!previouslyDeleted && merged.conflicts !== 'new_leaf') || | |
(previouslyDeleted && !deleted && merged.conflicts === 'new_branch'))); | |
if (inConflict) { | |
var err = errors.error(errors.REV_CONFLICT); | |
results[i] = err; | |
return cb(); | |
} | |
var newRev = docInfo.metadata.rev; | |
docInfo.metadata.rev_tree = merged.tree; | |
if (prev.rev_map) { | |
docInfo.metadata.rev_map = prev.rev_map; // used by leveldb | |
} | |
// recalculate | |
var winningRev = merge.winningRev(docInfo.metadata); | |
var winningRevIsDeleted = exports.isDeleted(docInfo.metadata, winningRev); | |
// calculate the total number of documents that were added/removed, | |
// from the perspective of total_rows/doc_count | |
var delta = (previouslyDeleted === winningRevIsDeleted) ? 0 : | |
previouslyDeleted < winningRevIsDeleted ? -1 : 1; | |
var newRevIsDeleted = exports.isDeleted(docInfo.metadata, newRev); | |
writeDoc(docInfo, winningRev, winningRevIsDeleted, newRevIsDeleted, | |
true, delta, i, cb); | |
}; | |
exports.processDocs = function processDocs(docInfos, api, fetchedDocs, | |
tx, results, writeDoc, opts, | |
overallCallback) { | |
if (!docInfos.length) { | |
return; | |
} | |
function insertDoc(docInfo, resultsIdx, callback) { | |
// Cant insert new deleted documents | |
var winningRev = merge.winningRev(docInfo.metadata); | |
var deleted = exports.isDeleted(docInfo.metadata, winningRev); | |
if ('was_delete' in opts && deleted) { | |
results[resultsIdx] = errors.error(errors.MISSING_DOC, 'deleted'); | |
return callback(); | |
} | |
var delta = deleted ? 0 : 1; | |
writeDoc(docInfo, winningRev, deleted, deleted, false, | |
delta, resultsIdx, callback); | |
} | |
var newEdits = opts.new_edits; | |
var idsToDocs = new exports.Map(); | |
var docsDone = 0; | |
var docsToDo = docInfos.length; | |
function checkAllDocsDone() { | |
if (++docsDone === docsToDo && overallCallback) { | |
overallCallback(); | |
} | |
} | |
docInfos.forEach(function (currentDoc, resultsIdx) { | |
if (currentDoc._id && exports.isLocalId(currentDoc._id)) { | |
api[currentDoc._deleted ? '_removeLocal' : '_putLocal']( | |
currentDoc, {ctx: tx}, function (err) { | |
if (err) { | |
results[resultsIdx] = err; | |
} else { | |
results[resultsIdx] = {ok: true}; | |
} | |
checkAllDocsDone(); | |
}); | |
return; | |
} | |
var id = currentDoc.metadata.id; | |
if (idsToDocs.has(id)) { | |
docsToDo--; // duplicate | |
idsToDocs.get(id).push([currentDoc, resultsIdx]); | |
} else { | |
idsToDocs.set(id, [[currentDoc, resultsIdx]]); | |
} | |
}); | |
// in the case of new_edits, the user can provide multiple docs | |
// with the same id. these need to be processed sequentially | |
idsToDocs.forEach(function (docs, id) { | |
var numDone = 0; | |
function docWritten() { | |
if (++numDone < docs.length) { | |
nextDoc(); | |
} else { | |
checkAllDocsDone(); | |
} | |
} | |
function nextDoc() { | |
var value = docs[numDone]; | |
var currentDoc = value[0]; | |
var resultsIdx = value[1]; | |
if (fetchedDocs.has(id)) { | |
exports.updateDoc(fetchedDocs.get(id), currentDoc, results, | |
resultsIdx, docWritten, writeDoc, newEdits); | |
} else { | |
insertDoc(currentDoc, resultsIdx, docWritten); | |
} | |
} | |
nextDoc(); | |
}); | |
}; | |
exports.preprocessAttachments = function preprocessAttachments( | |
docInfos, blobType, callback) { | |
if (!docInfos.length) { | |
return callback(); | |
} | |
var docv = 0; | |
function parseBase64(data) { | |
try { | |
return exports.atob(data); | |
} catch (e) { | |
var err = errors.error(errors.BAD_ARG, | |
'Attachments need to be base64 encoded'); | |
return {error: err}; | |
} | |
} | |
function preprocessAttachment(att, callback) { | |
if (att.stub) { | |
return callback(); | |
} | |
if (typeof att.data === 'string') { | |
// input is a base64 string | |
var asBinary = parseBase64(att.data); | |
if (asBinary.error) { | |
return callback(asBinary.error); | |
} | |
att.length = asBinary.length; | |
if (blobType === 'blob') { | |
att.data = exports.createBlob([exports.fixBinary(asBinary)], | |
{type: att.content_type}); | |
} else if (blobType === 'base64') { | |
att.data = exports.btoa(asBinary); | |
} else { // binary | |
att.data = asBinary; | |
} | |
exports.MD5(asBinary).then(function (result) { | |
att.digest = 'md5-' + result; | |
callback(); | |
}); | |
} else { // input is a blob | |
exports.readAsArrayBuffer(att.data, function (buff) { | |
if (blobType === 'binary') { | |
att.data = exports.arrayBufferToBinaryString(buff); | |
} else if (blobType === 'base64') { | |
att.data = exports.btoa(exports.arrayBufferToBinaryString(buff)); | |
} | |
exports.MD5(buff).then(function (result) { | |
att.digest = 'md5-' + result; | |
att.length = buff.byteLength; | |
callback(); | |
}); | |
}); | |
} | |
} | |
var overallErr; | |
docInfos.forEach(function (docInfo) { | |
var attachments = docInfo.data && docInfo.data._attachments ? | |
Object.keys(docInfo.data._attachments) : []; | |
var recv = 0; | |
if (!attachments.length) { | |
return done(); | |
} | |
function processedAttachment(err) { | |
overallErr = err; | |
recv++; | |
if (recv === attachments.length) { | |
done(); | |
} | |
} | |
for (var key in docInfo.data._attachments) { | |
if (docInfo.data._attachments.hasOwnProperty(key)) { | |
preprocessAttachment(docInfo.data._attachments[key], | |
processedAttachment); | |
} | |
} | |
}); | |
function done() { | |
docv++; | |
if (docInfos.length === docv) { | |
if (overallErr) { | |
callback(overallErr); | |
} else { | |
callback(); | |
} | |
} | |
} | |
}; | |
// compact a tree by marking its non-leafs as missing, | |
// and return a list of revs to delete | |
exports.compactTree = function compactTree(metadata) { | |
var revs = []; | |
merge.traverseRevTree(metadata.rev_tree, function (isLeaf, pos, | |
revHash, ctx, opts) { | |
if (opts.status === 'available' && !isLeaf) { | |
revs.push(pos + '-' + revHash); | |
opts.status = 'missing'; | |
} | |
}); | |
return revs; | |
}; | |
var vuvuzela = require('vuvuzela'); | |
exports.safeJsonParse = function safeJsonParse(str) { | |
try { | |
return JSON.parse(str); | |
} catch (e) { | |
return vuvuzela.parse(str); | |
} | |
}; | |
exports.safeJsonStringify = function safeJsonStringify(json) { | |
try { | |
return JSON.stringify(json); | |
} catch (e) { | |
return vuvuzela.stringify(json); | |
} | |
}; | |
}).call(this,require('_process')) | |
},{"./deps/ajax":98,"./deps/blob":99,"./deps/buffer":100,"./deps/errors":101,"./deps/explain404":102,"./deps/md5":103,"./deps/parse-doc":104,"./deps/parse-uri":106,"./deps/promise":107,"./deps/uuid":110,"./merge":114,"_process":81,"argsarray":121,"debug":122,"events":80,"inherits":125,"pouchdb-collections":146,"pouchdb-extend":147,"vuvuzela":156}],120:[function(require,module,exports){ | |
module.exports = "3.5.0"; | |
},{}],121:[function(require,module,exports){ | |
'use strict'; | |
module.exports = argsArray; | |
function argsArray(fun) { | |
return function () { | |
var len = arguments.length; | |
if (len) { | |
var args = []; | |
var i = -1; | |
while (++i < len) { | |
args[i] = arguments[i]; | |
} | |
return fun.call(this, args); | |
} else { | |
return fun.call(this, []); | |
} | |
}; | |
} | |
},{}],122:[function(require,module,exports){ | |
/** | |
* This is the web browser implementation of `debug()`. | |
* | |
* Expose `debug()` as the module. | |
*/ | |
exports = module.exports = require('./debug'); | |
exports.log = log; | |
exports.formatArgs = formatArgs; | |
exports.save = save; | |
exports.load = load; | |
exports.useColors = useColors; | |
exports.storage = 'undefined' != typeof chrome | |
&& 'undefined' != typeof chrome.storage | |
? chrome.storage.local | |
: localstorage(); | |
/** | |
* Colors. | |
*/ | |
exports.colors = [ | |
'lightseagreen', | |
'forestgreen', | |
'goldenrod', | |
'dodgerblue', | |
'darkorchid', | |
'crimson' | |
]; | |
/** | |
* Currently only WebKit-based Web Inspectors, Firefox >= v31, | |
* and the Firebug extension (any Firefox version) are known | |
* to support "%c" CSS customizations. | |
* | |
* TODO: add a `localStorage` variable to explicitly enable/disable colors | |
*/ | |
function useColors() { | |
// is webkit? http://stackoverflow.com/a/16459606/376773 | |
return ('WebkitAppearance' in document.documentElement.style) || | |
// is firebug? http://stackoverflow.com/a/398120/376773 | |
(window.console && (console.firebug || (console.exception && console.table))) || | |
// is firefox >= v31? | |
// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages | |
(navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31); | |
} | |
/** | |
* Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. | |
*/ | |
exports.formatters.j = function(v) { | |
return JSON.stringify(v); | |
}; | |
/** | |
* Colorize log arguments if enabled. | |
* | |
* @api public | |
*/ | |
function formatArgs() { | |
var args = arguments; | |
var useColors = this.useColors; | |
args[0] = (useColors ? '%c' : '') | |
+ this.namespace | |
+ (useColors ? ' %c' : ' ') | |
+ args[0] | |
+ (useColors ? '%c ' : ' ') | |
+ '+' + exports.humanize(this.diff); | |
if (!useColors) return args; | |
var c = 'color: ' + this.color; | |
args = [args[0], c, 'color: inherit'].concat(Array.prototype.slice.call(args, 1)); | |
// the final "%c" is somewhat tricky, because there could be other | |
// arguments passed either before or after the %c, so we need to | |
// figure out the correct index to insert the CSS into | |
var index = 0; | |
var lastC = 0; | |
args[0].replace(/%[a-z%]/g, function(match) { | |
if ('%%' === match) return; | |
index++; | |
if ('%c' === match) { | |
// we only are interested in the *last* %c | |
// (the user may have provided their own) | |
lastC = index; | |
} | |
}); | |
args.splice(lastC, 0, c); | |
return args; | |
} | |
/** | |
* Invokes `console.log()` when available. | |
* No-op when `console.log` is not a "function". | |
* | |
* @api public | |
*/ | |
function log() { | |
// this hackery is required for IE8/9, where | |
// the `console.log` function doesn't have 'apply' | |
return 'object' === typeof console | |
&& console.log | |
&& Function.prototype.apply.call(console.log, console, arguments); | |
} | |
/** | |
* Save `namespaces`. | |
* | |
* @param {String} namespaces | |
* @api private | |
*/ | |
function save(namespaces) { | |
try { | |
if (null == namespaces) { | |
exports.storage.removeItem('debug'); | |
} else { | |
exports.storage.debug = namespaces; | |
} | |
} catch(e) {} | |
} | |
/** | |
* Load `namespaces`. | |
* | |
* @return {String} returns the previously persisted debug modes | |
* @api private | |
*/ | |
function load() { | |
var r; | |
try { | |
r = exports.storage.debug; | |
} catch(e) {} | |
return r; | |
} | |
/** | |
* Enable namespaces listed in `localStorage.debug` initially. | |
*/ | |
exports.enable(load()); | |
/** | |
* Localstorage attempts to return the localstorage. | |
* | |
* This is necessary because safari throws | |
* when a user disables cookies/localstorage | |
* and you attempt to access it. | |
* | |
* @return {LocalStorage} | |
* @api private | |
*/ | |
function localstorage(){ | |
try { | |
return window.localStorage; | |
} catch (e) {} | |
} | |
},{"./debug":123}],123:[function(require,module,exports){ | |
/** | |
* This is the common logic for both the Node.js and web browser | |
* implementations of `debug()`. | |
* | |
* Expose `debug()` as the module. | |
*/ | |
exports = module.exports = debug; | |
exports.coerce = coerce; | |
exports.disable = disable; | |
exports.enable = enable; | |
exports.enabled = enabled; | |
exports.humanize = require('ms'); | |
/** | |
* The currently active debug mode names, and names to skip. | |
*/ | |
exports.names = []; | |
exports.skips = []; | |
/** | |
* Map of special "%n" handling functions, for the debug "format" argument. | |
* | |
* Valid key names are a single, lowercased letter, i.e. "n". | |
*/ | |
exports.formatters = {}; | |
/** | |
* Previously assigned color. | |
*/ | |
var prevColor = 0; | |
/** | |
* Previous log timestamp. | |
*/ | |
var prevTime; | |
/** | |
* Select a color. | |
* | |
* @return {Number} | |
* @api private | |
*/ | |
function selectColor() { | |
return exports.colors[prevColor++ % exports.colors.length]; | |
} | |
/** | |
* Create a debugger with the given `namespace`. | |
* | |
* @param {String} namespace | |
* @return {Function} | |
* @api public | |
*/ | |
function debug(namespace) { | |
// define the `disabled` version | |
function disabled() { | |
} | |
disabled.enabled = false; | |
// define the `enabled` version | |
function enabled() { | |
var self = enabled; | |
// set `diff` timestamp | |
var curr = +new Date(); | |
var ms = curr - (prevTime || curr); | |
self.diff = ms; | |
self.prev = prevTime; | |
self.curr = curr; | |
prevTime = curr; | |
// add the `color` if not set | |
if (null == self.useColors) self.useColors = exports.useColors(); | |
if (null == self.color && self.useColors) self.color = selectColor(); | |
var args = Array.prototype.slice.call(arguments); | |
args[0] = exports.coerce(args[0]); | |
if ('string' !== typeof args[0]) { | |
// anything else let's inspect with %o | |
args = ['%o'].concat(args); | |
} | |
// apply any `formatters` transformations | |
var index = 0; | |
args[0] = args[0].replace(/%([a-z%])/g, function(match, format) { | |
// if we encounter an escaped % then don't increase the array index | |
if (match === '%%') return match; | |
index++; | |
var formatter = exports.formatters[format]; | |
if ('function' === typeof formatter) { | |
var val = args[index]; | |
match = formatter.call(self, val); | |
// now we need to remove `args[index]` since it's inlined in the `format` | |
args.splice(index, 1); | |
index--; | |
} | |
return match; | |
}); | |
if ('function' === typeof exports.formatArgs) { | |
args = exports.formatArgs.apply(self, args); | |
} | |
var logFn = enabled.log || exports.log || console.log.bind(console); | |
logFn.apply(self, args); | |
} | |
enabled.enabled = true; | |
var fn = exports.enabled(namespace) ? enabled : disabled; | |
fn.namespace = namespace; | |
return fn; | |
} | |
/** | |
* Enables a debug mode by namespaces. This can include modes | |
* separated by a colon and wildcards. | |
* | |
* @param {String} namespaces | |
* @api public | |
*/ | |
function enable(namespaces) { | |
exports.save(namespaces); | |
var split = (namespaces || '').split(/[\s,]+/); | |
var len = split.length; | |
for (var i = 0; i < len; i++) { | |
if (!split[i]) continue; // ignore empty strings | |
namespaces = split[i].replace(/\*/g, '.*?'); | |
if (namespaces[0] === '-') { | |
exports.skips.push(new RegExp('^' + namespaces.substr(1) + '$')); | |
} else { | |
exports.names.push(new RegExp('^' + namespaces + '$')); | |
} | |
} | |
} | |
/** | |
* Disable debug output. | |
* | |
* @api public | |
*/ | |
function disable() { | |
exports.enable(''); | |
} | |
/** | |
* Returns true if the given mode name is enabled, false otherwise. | |
* | |
* @param {String} name | |
* @return {Boolean} | |
* @api public | |
*/ | |
function enabled(name) { | |
var i, len; | |
for (i = 0, len = exports.skips.length; i < len; i++) { | |
if (exports.skips[i].test(name)) { | |
return false; | |
} | |
} | |
for (i = 0, len = exports.names.length; i < len; i++) { | |
if (exports.names[i].test(name)) { | |
return true; | |
} | |
} | |
return false; | |
} | |
/** | |
* Coerce `val`. | |
* | |
* @param {Mixed} val | |
* @return {Mixed} | |
* @api private | |
*/ | |
function coerce(val) { | |
if (val instanceof Error) return val.stack || val.message; | |
return val; | |
} | |
},{"ms":124}],124:[function(require,module,exports){ | |
/** | |
* Helpers. | |
*/ | |
var s = 1000; | |
var m = s * 60; | |
var h = m * 60; | |
var d = h * 24; | |
var y = d * 365.25; | |
/** | |
* Parse or format the given `val`. | |
* | |
* Options: | |
* | |
* - `long` verbose formatting [false] | |
* | |
* @param {String|Number} val | |
* @param {Object} options | |
* @return {String|Number} | |
* @api public | |
*/ | |
module.exports = function(val, options){ | |
options = options || {}; | |
if ('string' == typeof val) return parse(val); | |
return options.long | |
? long(val) | |
: short(val); | |
}; | |
/** | |
* Parse the given `str` and return milliseconds. | |
* | |
* @param {String} str | |
* @return {Number} | |
* @api private | |
*/ | |
function parse(str) { | |
str = '' + str; | |
if (str.length > 10000) return; | |
var match = /^((?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|years?|yrs?|y)?$/i.exec(str); | |
if (!match) return; | |
var n = parseFloat(match[1]); | |
var type = (match[2] || 'ms').toLowerCase(); | |
switch (type) { | |
case 'years': | |
case 'year': | |
case 'yrs': | |
case 'yr': | |
case 'y': | |
return n * y; | |
case 'days': | |
case 'day': | |
case 'd': | |
return n * d; | |
case 'hours': | |
case 'hour': | |
case 'hrs': | |
case 'hr': | |
case 'h': | |
return n * h; | |
case 'minutes': | |
case 'minute': | |
case 'mins': | |
case 'min': | |
case 'm': | |
return n * m; | |
case 'seconds': | |
case 'second': | |
case 'secs': | |
case 'sec': | |
case 's': | |
return n * s; | |
case 'milliseconds': | |
case 'millisecond': | |
case 'msecs': | |
case 'msec': | |
case 'ms': | |
return n; | |
} | |
} | |
/** | |
* Short format for `ms`. | |
* | |
* @param {Number} ms | |
* @return {String} | |
* @api private | |
*/ | |
function short(ms) { | |
if (ms >= d) return Math.round(ms / d) + 'd'; | |
if (ms >= h) return Math.round(ms / h) + 'h'; | |
if (ms >= m) return Math.round(ms / m) + 'm'; | |
if (ms >= s) return Math.round(ms / s) + 's'; | |
return ms + 'ms'; | |
} | |
/** | |
* Long format for `ms`. | |
* | |
* @param {Number} ms | |
* @return {String} | |
* @api private | |
*/ | |
function long(ms) { | |
return plural(ms, d, 'day') | |
|| plural(ms, h, 'hour') | |
|| plural(ms, m, 'minute') | |
|| plural(ms, s, 'second') | |
|| ms + ' ms'; | |
} | |
/** | |
* Pluralization helper. | |
*/ | |
function plural(ms, n, name) { | |
if (ms < n) return; | |
if (ms < n * 1.5) return Math.floor(ms / n) + ' ' + name; | |
return Math.ceil(ms / n) + ' ' + name + 's'; | |
} | |
},{}],125:[function(require,module,exports){ | |
if (typeof Object.create === 'function') { | |
// implementation from standard node.js 'util' module | |
module.exports = function inherits(ctor, superCtor) { | |
ctor.super_ = superCtor | |
ctor.prototype = Object.create(superCtor.prototype, { | |
constructor: { | |
value: ctor, | |
enumerable: false, | |
writable: true, | |
configurable: true | |
} | |
}); | |
}; | |
} else { | |
// old school shim for old browsers | |
module.exports = function inherits(ctor, superCtor) { | |
ctor.super_ = superCtor | |
var TempCtor = function () {} | |
TempCtor.prototype = superCtor.prototype | |
ctor.prototype = new TempCtor() | |
ctor.prototype.constructor = ctor | |
} | |
} | |
},{}],126:[function(require,module,exports){ | |
'use strict'; | |
module.exports = INTERNAL; | |
function INTERNAL() {} | |
},{}],127:[function(require,module,exports){ | |
'use strict'; | |
var Promise = require('./promise'); | |
var reject = require('./reject'); | |
var resolve = require('./resolve'); | |
var INTERNAL = require('./INTERNAL'); | |
var handlers = require('./handlers'); | |
module.exports = all; | |
function all(iterable) { | |
if (Object.prototype.toString.call(iterable) !== '[object Array]') { | |
return reject(new TypeError('must be an array')); | |
} | |
var len = iterable.length; | |
var called = false; | |
if (!len) { | |
return resolve([]); | |
} | |
var values = new Array(len); | |
var resolved = 0; | |
var i = -1; | |
var promise = new Promise(INTERNAL); | |
while (++i < len) { | |
allResolver(iterable[i], i); | |
} | |
return promise; | |
function allResolver(value, i) { | |
resolve(value).then(resolveFromAll, function (error) { | |
if (!called) { | |
called = true; | |
handlers.reject(promise, error); | |
} | |
}); | |
function resolveFromAll(outValue) { | |
values[i] = outValue; | |
if (++resolved === len & !called) { | |
called = true; | |
handlers.resolve(promise, values); | |
} | |
} | |
} | |
} | |
},{"./INTERNAL":126,"./handlers":128,"./promise":130,"./reject":133,"./resolve":134}],128:[function(require,module,exports){ | |
'use strict'; | |
var tryCatch = require('./tryCatch'); | |
var resolveThenable = require('./resolveThenable'); | |
var states = require('./states'); | |
exports.resolve = function (self, value) { | |
var result = tryCatch(getThen, value); | |
if (result.status === 'error') { | |
return exports.reject(self, result.value); | |
} | |
var thenable = result.value; | |
if (thenable) { | |
resolveThenable.safely(self, thenable); | |
} else { | |
self.state = states.FULFILLED; | |
self.outcome = value; | |
var i = -1; | |
var len = self.queue.length; | |
while (++i < len) { | |
self.queue[i].callFulfilled(value); | |
} | |
} | |
return self; | |
}; | |
exports.reject = function (self, error) { | |
self.state = states.REJECTED; | |
self.outcome = error; | |
var i = -1; | |
var len = self.queue.length; | |
while (++i < len) { | |
self.queue[i].callRejected(error); | |
} | |
return self; | |
}; | |
function getThen(obj) { | |
// Make sure we only access the accessor once as required by the spec | |
var then = obj && obj.then; | |
if (obj && typeof obj === 'object' && typeof then === 'function') { | |
return function appyThen() { | |
then.apply(obj, arguments); | |
}; | |
} | |
} | |
},{"./resolveThenable":135,"./states":136,"./tryCatch":137}],129:[function(require,module,exports){ | |
module.exports = exports = require('./promise'); | |
exports.resolve = require('./resolve'); | |
exports.reject = require('./reject'); | |
exports.all = require('./all'); | |
exports.race = require('./race'); | |
},{"./all":127,"./promise":130,"./race":132,"./reject":133,"./resolve":134}],130:[function(require,module,exports){ | |
'use strict'; | |
var unwrap = require('./unwrap'); | |
var INTERNAL = require('./INTERNAL'); | |
var resolveThenable = require('./resolveThenable'); | |
var states = require('./states'); | |
var QueueItem = require('./queueItem'); | |
module.exports = Promise; | |
function Promise(resolver) { | |
if (!(this instanceof Promise)) { | |
return new Promise(resolver); | |
} | |
if (typeof resolver !== 'function') { | |
throw new TypeError('resolver must be a function'); | |
} | |
this.state = states.PENDING; | |
this.queue = []; | |
this.outcome = void 0; | |
if (resolver !== INTERNAL) { | |
resolveThenable.safely(this, resolver); | |
} | |
} | |
Promise.prototype['catch'] = function (onRejected) { | |
return this.then(null, onRejected); | |
}; | |
Promise.prototype.then = function (onFulfilled, onRejected) { | |
if (typeof onFulfilled !== 'function' && this.state === states.FULFILLED || | |
typeof onRejected !== 'function' && this.state === states.REJECTED) { | |
return this; | |
} | |
var promise = new Promise(INTERNAL); | |
if (this.state !== states.PENDING) { | |
var resolver = this.state === states.FULFILLED ? onFulfilled : onRejected; | |
unwrap(promise, resolver, this.outcome); | |
} else { | |
this.queue.push(new QueueItem(promise, onFulfilled, onRejected)); | |
} | |
return promise; | |
}; | |
},{"./INTERNAL":126,"./queueItem":131,"./resolveThenable":135,"./states":136,"./unwrap":138}],131:[function(require,module,exports){ | |
'use strict'; | |
var handlers = require('./handlers'); | |
var unwrap = require('./unwrap'); | |
module.exports = QueueItem; | |
function QueueItem(promise, onFulfilled, onRejected) { | |
this.promise = promise; | |
if (typeof onFulfilled === 'function') { | |
this.onFulfilled = onFulfilled; | |
this.callFulfilled = this.otherCallFulfilled; | |
} | |
if (typeof onRejected === 'function') { | |
this.onRejected = onRejected; | |
this.callRejected = this.otherCallRejected; | |
} | |
} | |
QueueItem.prototype.callFulfilled = function (value) { | |
handlers.resolve(this.promise, value); | |
}; | |
QueueItem.prototype.otherCallFulfilled = function (value) { | |
unwrap(this.promise, this.onFulfilled, value); | |
}; | |
QueueItem.prototype.callRejected = function (value) { | |
handlers.reject(this.promise, value); | |
}; | |
QueueItem.prototype.otherCallRejected = function (value) { | |
unwrap(this.promise, this.onRejected, value); | |
}; | |
},{"./handlers":128,"./unwrap":138}],132:[function(require,module,exports){ | |
'use strict'; | |
var Promise = require('./promise'); | |
var reject = require('./reject'); | |
var resolve = require('./resolve'); | |
var INTERNAL = require('./INTERNAL'); | |
var handlers = require('./handlers'); | |
module.exports = race; | |
function race(iterable) { | |
if (Object.prototype.toString.call(iterable) !== '[object Array]') { | |
return reject(new TypeError('must be an array')); | |
} | |
var len = iterable.length; | |
var called = false; | |
if (!len) { | |
return resolve([]); | |
} | |
var i = -1; | |
var promise = new Promise(INTERNAL); | |
while (++i < len) { | |
resolver(iterable[i]); | |
} | |
return promise; | |
function resolver(value) { | |
resolve(value).then(function (response) { | |
if (!called) { | |
called = true; | |
handlers.resolve(promise, response); | |
} | |
}, function (error) { | |
if (!called) { | |
called = true; | |
handlers.reject(promise, error); | |
} | |
}); | |
} | |
} | |
},{"./INTERNAL":126,"./handlers":128,"./promise":130,"./reject":133,"./resolve":134}],133:[function(require,module,exports){ | |
'use strict'; | |
var Promise = require('./promise'); | |
var INTERNAL = require('./INTERNAL'); | |
var handlers = require('./handlers'); | |
module.exports = reject; | |
function reject(reason) { | |
var promise = new Promise(INTERNAL); | |
return handlers.reject(promise, reason); | |
} | |
},{"./INTERNAL":126,"./handlers":128,"./promise":130}],134:[function(require,module,exports){ | |
'use strict'; | |
var Promise = require('./promise'); | |
var INTERNAL = require('./INTERNAL'); | |
var handlers = require('./handlers'); | |
module.exports = resolve; | |
var FALSE = handlers.resolve(new Promise(INTERNAL), false); | |
var NULL = handlers.resolve(new Promise(INTERNAL), null); | |
var UNDEFINED = handlers.resolve(new Promise(INTERNAL), void 0); | |
var ZERO = handlers.resolve(new Promise(INTERNAL), 0); | |
var EMPTYSTRING = handlers.resolve(new Promise(INTERNAL), ''); | |
function resolve(value) { | |
if (value) { | |
if (value instanceof Promise) { | |
return value; | |
} | |
return handlers.resolve(new Promise(INTERNAL), value); | |
} | |
var valueType = typeof value; | |
switch (valueType) { | |
case 'boolean': | |
return FALSE; | |
case 'undefined': | |
return UNDEFINED; | |
case 'object': | |
return NULL; | |
case 'number': | |
return ZERO; | |
case 'string': | |
return EMPTYSTRING; | |
} | |
} | |
},{"./INTERNAL":126,"./handlers":128,"./promise":130}],135:[function(require,module,exports){ | |
'use strict'; | |
var handlers = require('./handlers'); | |
var tryCatch = require('./tryCatch'); | |
function safelyResolveThenable(self, thenable) { | |
// Either fulfill, reject or reject with error | |
var called = false; | |
function onError(value) { | |
if (called) { | |
return; | |
} | |
called = true; | |
handlers.reject(self, value); | |
} | |
function onSuccess(value) { | |
if (called) { | |
return; | |
} | |
called = true; | |
handlers.resolve(self, value); | |
} | |
function tryToUnwrap() { | |
thenable(onSuccess, onError); | |
} | |
var result = tryCatch(tryToUnwrap); | |
if (result.status === 'error') { | |
onError(result.value); | |
} | |
} | |
exports.safely = safelyResolveThenable; | |
},{"./handlers":128,"./tryCatch":137}],136:[function(require,module,exports){ | |
// Lazy man's symbols for states | |
exports.REJECTED = ['REJECTED']; | |
exports.FULFILLED = ['FULFILLED']; | |
exports.PENDING = ['PENDING']; | |
},{}],137:[function(require,module,exports){ | |
'use strict'; | |
module.exports = tryCatch; | |
function tryCatch(func, value) { | |
var out = {}; | |
try { | |
out.value = func(value); | |
out.status = 'success'; | |
} catch (e) { | |
out.status = 'error'; | |
out.value = e; | |
} | |
return out; | |
} | |
},{}],138:[function(require,module,exports){ | |
'use strict'; | |
var immediate = require('immediate'); | |
var handlers = require('./handlers'); | |
module.exports = unwrap; | |
function unwrap(promise, func, value) { | |
immediate(function () { | |
var returnValue; | |
try { | |
returnValue = func(value); | |
} catch (e) { | |
return handlers.reject(promise, e); | |
} | |
if (returnValue === promise) { | |
handlers.reject(promise, new TypeError('Cannot resolve promise with itself')); | |
} else { | |
handlers.resolve(promise, returnValue); | |
} | |
}); | |
} | |
},{"./handlers":128,"immediate":139}],139:[function(require,module,exports){ | |
'use strict'; | |
var types = [ | |
require('./nextTick'), | |
require('./mutation.js'), | |
require('./messageChannel'), | |
require('./stateChange'), | |
require('./timeout') | |
]; | |
var draining; | |
var queue = []; | |
//named nextTick for less confusing stack traces | |
function nextTick() { | |
draining = true; | |
var i, oldQueue; | |
var len = queue.length; | |
while (len) { | |
oldQueue = queue; | |
queue = []; | |
i = -1; | |
while (++i < len) { | |
oldQueue[i](); | |
} | |
len = queue.length; | |
} | |
draining = false; | |
} | |
var scheduleDrain; | |
var i = -1; | |
var len = types.length; | |
while (++ i < len) { | |
if (types[i] && types[i].test && types[i].test()) { | |
scheduleDrain = types[i].install(nextTick); | |
break; | |
} | |
} | |
module.exports = immediate; | |
function immediate(task) { | |
if (queue.push(task) === 1 && !draining) { | |
scheduleDrain(); | |
} | |
} | |
},{"./messageChannel":140,"./mutation.js":141,"./nextTick":79,"./stateChange":142,"./timeout":143}],140:[function(require,module,exports){ | |
(function (global){ | |
'use strict'; | |
exports.test = function () { | |
if (global.setImmediate) { | |
// we can only get here in IE10 | |
// which doesn't handel postMessage well | |
return false; | |
} | |
return typeof global.MessageChannel !== 'undefined'; | |
}; | |
exports.install = function (func) { | |
var channel = new global.MessageChannel(); | |
channel.port1.onmessage = func; | |
return function () { | |
channel.port2.postMessage(0); | |
}; | |
}; | |
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{}],141:[function(require,module,exports){ | |
(function (global){ | |
'use strict'; | |
//based off rsvp https://github.com/tildeio/rsvp.js | |
//license https://github.com/tildeio/rsvp.js/blob/master/LICENSE | |
//https://github.com/tildeio/rsvp.js/blob/master/lib/rsvp/asap.js | |
var Mutation = global.MutationObserver || global.WebKitMutationObserver; | |
exports.test = function () { | |
return Mutation; | |
}; | |
exports.install = function (handle) { | |
var called = 0; | |
var observer = new Mutation(handle); | |
var element = global.document.createTextNode(''); | |
observer.observe(element, { | |
characterData: true | |
}); | |
return function () { | |
element.data = (called = ++called % 2); | |
}; | |
}; | |
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{}],142:[function(require,module,exports){ | |
(function (global){ | |
'use strict'; | |
exports.test = function () { | |
return 'document' in global && 'onreadystatechange' in global.document.createElement('script'); | |
}; | |
exports.install = function (handle) { | |
return function () { | |
// Create a <script> element; its readystatechange event will be fired asynchronously once it is inserted | |
// into the document. Do so, thus queuing up the task. Remember to clean up once it's been called. | |
var scriptEl = global.document.createElement('script'); | |
scriptEl.onreadystatechange = function () { | |
handle(); | |
scriptEl.onreadystatechange = null; | |
scriptEl.parentNode.removeChild(scriptEl); | |
scriptEl = null; | |
}; | |
global.document.documentElement.appendChild(scriptEl); | |
return handle; | |
}; | |
}; | |
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{}],143:[function(require,module,exports){ | |
'use strict'; | |
exports.test = function () { | |
return true; | |
}; | |
exports.install = function (t) { | |
return function () { | |
setTimeout(t, 0); | |
}; | |
}; | |
},{}],144:[function(require,module,exports){ | |
'use strict'; | |
var MIN_MAGNITUDE = -324; // verified by -Number.MIN_VALUE | |
var MAGNITUDE_DIGITS = 3; // ditto | |
var SEP = ''; // set to '_' for easier debugging | |
var utils = require('./utils'); | |
exports.collate = function (a, b) { | |
if (a === b) { | |
return 0; | |
} | |
a = exports.normalizeKey(a); | |
b = exports.normalizeKey(b); | |
var ai = collationIndex(a); | |
var bi = collationIndex(b); | |
if ((ai - bi) !== 0) { | |
return ai - bi; | |
} | |
if (a === null) { | |
return 0; | |
} | |
switch (typeof a) { | |
case 'number': | |
return a - b; | |
case 'boolean': | |
return a === b ? 0 : (a < b ? -1 : 1); | |
case 'string': | |
return stringCollate(a, b); | |
} | |
return Array.isArray(a) ? arrayCollate(a, b) : objectCollate(a, b); | |
}; | |
// couch considers null/NaN/Infinity/-Infinity === undefined, | |
// for the purposes of mapreduce indexes. also, dates get stringified. | |
exports.normalizeKey = function (key) { | |
switch (typeof key) { | |
case 'undefined': | |
return null; | |
case 'number': | |
if (key === Infinity || key === -Infinity || isNaN(key)) { | |
return null; | |
} | |
return key; | |
case 'object': | |
var origKey = key; | |
if (Array.isArray(key)) { | |
var len = key.length; | |
key = new Array(len); | |
for (var i = 0; i < len; i++) { | |
key[i] = exports.normalizeKey(origKey[i]); | |
} | |
} else if (key instanceof Date) { | |
return key.toJSON(); | |
} else if (key !== null) { // generic object | |
key = {}; | |
for (var k in origKey) { | |
if (origKey.hasOwnProperty(k)) { | |
var val = origKey[k]; | |
if (typeof val !== 'undefined') { | |
key[k] = exports.normalizeKey(val); | |
} | |
} | |
} | |
} | |
} | |
return key; | |
}; | |
function indexify(key) { | |
if (key !== null) { | |
switch (typeof key) { | |
case 'boolean': | |
return key ? 1 : 0; | |
case 'number': | |
return numToIndexableString(key); | |
case 'string': | |
// We've to be sure that key does not contain \u0000 | |
// Do order-preserving replacements: | |
// 0 -> 1, 1 | |
// 1 -> 1, 2 | |
// 2 -> 2, 2 | |
return key | |
.replace(/\u0002/g, '\u0002\u0002') | |
.replace(/\u0001/g, '\u0001\u0002') | |
.replace(/\u0000/g, '\u0001\u0001'); | |
case 'object': | |
var isArray = Array.isArray(key); | |
var arr = isArray ? key : Object.keys(key); | |
var i = -1; | |
var len = arr.length; | |
var result = ''; | |
if (isArray) { | |
while (++i < len) { | |
result += exports.toIndexableString(arr[i]); | |
} | |
} else { | |
while (++i < len) { | |
var objKey = arr[i]; | |
result += exports.toIndexableString(objKey) + | |
exports.toIndexableString(key[objKey]); | |
} | |
} | |
return result; | |
} | |
} | |
return ''; | |
} | |
// convert the given key to a string that would be appropriate | |
// for lexical sorting, e.g. within a database, where the | |
// sorting is the same given by the collate() function. | |
exports.toIndexableString = function (key) { | |
var zero = '\u0000'; | |
key = exports.normalizeKey(key); | |
return collationIndex(key) + SEP + indexify(key) + zero; | |
}; | |
function parseNumber(str, i) { | |
var originalIdx = i; | |
var num; | |
var zero = str[i] === '1'; | |
if (zero) { | |
num = 0; | |
i++; | |
} else { | |
var neg = str[i] === '0'; | |
i++; | |
var numAsString = ''; | |
var magAsString = str.substring(i, i + MAGNITUDE_DIGITS); | |
var magnitude = parseInt(magAsString, 10) + MIN_MAGNITUDE; | |
if (neg) { | |
magnitude = -magnitude; | |
} | |
i += MAGNITUDE_DIGITS; | |
while (true) { | |
var ch = str[i]; | |
if (ch === '\u0000') { | |
break; | |
} else { | |
numAsString += ch; | |
} | |
i++; | |
} | |
numAsString = numAsString.split('.'); | |
if (numAsString.length === 1) { | |
num = parseInt(numAsString, 10); | |
} else { | |
num = parseFloat(numAsString[0] + '.' + numAsString[1]); | |
} | |
if (neg) { | |
num = num - 10; | |
} | |
if (magnitude !== 0) { | |
// parseFloat is more reliable than pow due to rounding errors | |
// e.g. Number.MAX_VALUE would return Infinity if we did | |
// num * Math.pow(10, magnitude); | |
num = parseFloat(num + 'e' + magnitude); | |
} | |
} | |
return {num: num, length : i - originalIdx}; | |
} | |
// move up the stack while parsing | |
// this function moved outside of parseIndexableString for performance | |
function pop(stack, metaStack) { | |
var obj = stack.pop(); | |
if (metaStack.length) { | |
var lastMetaElement = metaStack[metaStack.length - 1]; | |
if (obj === lastMetaElement.element) { | |
// popping a meta-element, e.g. an object whose value is another object | |
metaStack.pop(); | |
lastMetaElement = metaStack[metaStack.length - 1]; | |
} | |
var element = lastMetaElement.element; | |
var lastElementIndex = lastMetaElement.index; | |
if (Array.isArray(element)) { | |
element.push(obj); | |
} else if (lastElementIndex === stack.length - 2) { // obj with key+value | |
var key = stack.pop(); | |
element[key] = obj; | |
} else { | |
stack.push(obj); // obj with key only | |
} | |
} | |
} | |
exports.parseIndexableString = function (str) { | |
var stack = []; | |
var metaStack = []; // stack for arrays and objects | |
var i = 0; | |
while (true) { | |
var collationIndex = str[i++]; | |
if (collationIndex === '\u0000') { | |
if (stack.length === 1) { | |
return stack.pop(); | |
} else { | |
pop(stack, metaStack); | |
continue; | |
} | |
} | |
switch (collationIndex) { | |
case '1': | |
stack.push(null); | |
break; | |
case '2': | |
stack.push(str[i] === '1'); | |
i++; | |
break; | |
case '3': | |
var parsedNum = parseNumber(str, i); | |
stack.push(parsedNum.num); | |
i += parsedNum.length; | |
break; | |
case '4': | |
var parsedStr = ''; | |
while (true) { | |
var ch = str[i]; | |
if (ch === '\u0000') { | |
break; | |
} | |
parsedStr += ch; | |
i++; | |
} | |
// perform the reverse of the order-preserving replacement | |
// algorithm (see above) | |
parsedStr = parsedStr.replace(/\u0001\u0001/g, '\u0000') | |
.replace(/\u0001\u0002/g, '\u0001') | |
.replace(/\u0002\u0002/g, '\u0002'); | |
stack.push(parsedStr); | |
break; | |
case '5': | |
var arrayElement = { element: [], index: stack.length }; | |
stack.push(arrayElement.element); | |
metaStack.push(arrayElement); | |
break; | |
case '6': | |
var objElement = { element: {}, index: stack.length }; | |
stack.push(objElement.element); | |
metaStack.push(objElement); | |
break; | |
default: | |
throw new Error( | |
'bad collationIndex or unexpectedly reached end of input: ' + collationIndex); | |
} | |
} | |
}; | |
function arrayCollate(a, b) { | |
var len = Math.min(a.length, b.length); | |
for (var i = 0; i < len; i++) { | |
var sort = exports.collate(a[i], b[i]); | |
if (sort !== 0) { | |
return sort; | |
} | |
} | |
return (a.length === b.length) ? 0 : | |
(a.length > b.length) ? 1 : -1; | |
} | |
function stringCollate(a, b) { | |
// See: https://github.com/daleharvey/pouchdb/issues/40 | |
// This is incompatible with the CouchDB implementation, but its the | |
// best we can do for now | |
return (a === b) ? 0 : ((a > b) ? 1 : -1); | |
} | |
function objectCollate(a, b) { | |
var ak = Object.keys(a), bk = Object.keys(b); | |
var len = Math.min(ak.length, bk.length); | |
for (var i = 0; i < len; i++) { | |
// First sort the keys | |
var sort = exports.collate(ak[i], bk[i]); | |
if (sort !== 0) { | |
return sort; | |
} | |
// if the keys are equal sort the values | |
sort = exports.collate(a[ak[i]], b[bk[i]]); | |
if (sort !== 0) { | |
return sort; | |
} | |
} | |
return (ak.length === bk.length) ? 0 : | |
(ak.length > bk.length) ? 1 : -1; | |
} | |
// The collation is defined by erlangs ordered terms | |
// the atoms null, true, false come first, then numbers, strings, | |
// arrays, then objects | |
// null/undefined/NaN/Infinity/-Infinity are all considered null | |
function collationIndex(x) { | |
var id = ['boolean', 'number', 'string', 'object']; | |
var idx = id.indexOf(typeof x); | |
//false if -1 otherwise true, but fast!!!!1 | |
if (~idx) { | |
if (x === null) { | |
return 1; | |
} | |
if (Array.isArray(x)) { | |
return 5; | |
} | |
return idx < 3 ? (idx + 2) : (idx + 3); | |
} | |
if (Array.isArray(x)) { | |
return 5; | |
} | |
} | |
// conversion: | |
// x yyy zz...zz | |
// x = 0 for negative, 1 for 0, 2 for positive | |
// y = exponent (for negative numbers negated) moved so that it's >= 0 | |
// z = mantisse | |
function numToIndexableString(num) { | |
if (num === 0) { | |
return '1'; | |
} | |
// convert number to exponential format for easier and | |
// more succinct string sorting | |
var expFormat = num.toExponential().split(/e\+?/); | |
var magnitude = parseInt(expFormat[1], 10); | |
var neg = num < 0; | |
var result = neg ? '0' : '2'; | |
// first sort by magnitude | |
// it's easier if all magnitudes are positive | |
var magForComparison = ((neg ? -magnitude : magnitude) - MIN_MAGNITUDE); | |
var magString = utils.padLeft((magForComparison).toString(), '0', MAGNITUDE_DIGITS); | |
result += SEP + magString; | |
// then sort by the factor | |
var factor = Math.abs(parseFloat(expFormat[0])); // [1..10) | |
if (neg) { // for negative reverse ordering | |
factor = 10 - factor; | |
} | |
var factorStr = factor.toFixed(20); | |
// strip zeros from the end | |
factorStr = factorStr.replace(/\.?0+$/, ''); | |
result += SEP + factorStr; | |
return result; | |
} | |
},{"./utils":145}],145:[function(require,module,exports){ | |
'use strict'; | |
function pad(str, padWith, upToLength) { | |
var padding = ''; | |
var targetLength = upToLength - str.length; | |
while (padding.length < targetLength) { | |
padding += padWith; | |
} | |
return padding; | |
} | |
exports.padLeft = function (str, padWith, upToLength) { | |
var padding = pad(str, padWith, upToLength); | |
return padding + str; | |
}; | |
exports.padRight = function (str, padWith, upToLength) { | |
var padding = pad(str, padWith, upToLength); | |
return str + padding; | |
}; | |
exports.stringLexCompare = function (a, b) { | |
var aLen = a.length; | |
var bLen = b.length; | |
var i; | |
for (i = 0; i < aLen; i++) { | |
if (i === bLen) { | |
// b is shorter substring of a | |
return 1; | |
} | |
var aChar = a.charAt(i); | |
var bChar = b.charAt(i); | |
if (aChar !== bChar) { | |
return aChar < bChar ? -1 : 1; | |
} | |
} | |
if (aLen < bLen) { | |
// a is shorter substring of b | |
return -1; | |
} | |
return 0; | |
}; | |
/* | |
* returns the decimal form for the given integer, i.e. writes | |
* out all the digits (in base-10) instead of using scientific notation | |
*/ | |
exports.intToDecimalForm = function (int) { | |
var isNeg = int < 0; | |
var result = ''; | |
do { | |
var remainder = isNeg ? -Math.ceil(int % 10) : Math.floor(int % 10); | |
result = remainder + result; | |
int = isNeg ? Math.ceil(int / 10) : Math.floor(int / 10); | |
} while (int); | |
if (isNeg && result !== '0') { | |
result = '-' + result; | |
} | |
return result; | |
}; | |
},{}],146:[function(require,module,exports){ | |
'use strict'; | |
exports.Map = LazyMap; // TODO: use ES6 map | |
exports.Set = LazySet; // TODO: use ES6 set | |
// based on https://github.com/montagejs/collections | |
function LazyMap() { | |
this.store = {}; | |
} | |
LazyMap.prototype.mangle = function (key) { | |
if (typeof key !== "string") { | |
throw new TypeError("key must be a string but Got " + key); | |
} | |
return '$' + key; | |
}; | |
LazyMap.prototype.unmangle = function (key) { | |
return key.substring(1); | |
}; | |
LazyMap.prototype.get = function (key) { | |
var mangled = this.mangle(key); | |
if (mangled in this.store) { | |
return this.store[mangled]; | |
} else { | |
return void 0; | |
} | |
}; | |
LazyMap.prototype.set = function (key, value) { | |
var mangled = this.mangle(key); | |
this.store[mangled] = value; | |
return true; | |
}; | |
LazyMap.prototype.has = function (key) { | |
var mangled = this.mangle(key); | |
return mangled in this.store; | |
}; | |
LazyMap.prototype.delete = function (key) { | |
var mangled = this.mangle(key); | |
if (mangled in this.store) { | |
delete this.store[mangled]; | |
return true; | |
} | |
return false; | |
}; | |
LazyMap.prototype.forEach = function (cb) { | |
var self = this; | |
var keys = Object.keys(self.store); | |
keys.forEach(function (key) { | |
var value = self.store[key]; | |
key = self.unmangle(key); | |
cb(value, key); | |
}); | |
}; | |
function LazySet(array) { | |
this.store = new LazyMap(); | |
// init with an array | |
if (array && Array.isArray(array)) { | |
for (var i = 0, len = array.length; i < len; i++) { | |
this.add(array[i]); | |
} | |
} | |
} | |
LazySet.prototype.add = function (key) { | |
return this.store.set(key, true); | |
}; | |
LazySet.prototype.has = function (key) { | |
return this.store.has(key); | |
}; | |
LazySet.prototype.delete = function (key) { | |
return this.store.delete(key); | |
}; | |
},{}],147:[function(require,module,exports){ | |
"use strict"; | |
// Extends method | |
// (taken from http://code.jquery.com/jquery-1.9.0.js) | |
// Populate the class2type map | |
var class2type = {}; | |
var types = [ | |
"Boolean", "Number", "String", "Function", "Array", | |
"Date", "RegExp", "Object", "Error" | |
]; | |
for (var i = 0; i < types.length; i++) { | |
var typename = types[i]; | |
class2type["[object " + typename + "]"] = typename.toLowerCase(); | |
} | |
var core_toString = class2type.toString; | |
var core_hasOwn = class2type.hasOwnProperty; | |
function type(obj) { | |
if (obj === null) { | |
return String(obj); | |
} | |
return typeof obj === "object" || typeof obj === "function" ? | |
class2type[core_toString.call(obj)] || "object" : | |
typeof obj; | |
} | |
function isWindow(obj) { | |
return obj !== null && obj === obj.window; | |
} | |
function isPlainObject(obj) { | |
// Must be an Object. | |
// Because of IE, we also have to check the presence of | |
// the constructor property. | |
// Make sure that DOM nodes and window objects don't pass through, as well | |
if (!obj || type(obj) !== "object" || obj.nodeType || isWindow(obj)) { | |
return false; | |
} | |
try { | |
// Not own constructor property must be Object | |
if (obj.constructor && | |
!core_hasOwn.call(obj, "constructor") && | |
!core_hasOwn.call(obj.constructor.prototype, "isPrototypeOf")) { | |
return false; | |
} | |
} catch ( e ) { | |
// IE8,9 Will throw exceptions on certain host objects #9897 | |
return false; | |
} | |
// Own properties are enumerated firstly, so to speed up, | |
// if last one is own, then all properties are own. | |
var key; | |
for (key in obj) {} | |
return key === undefined || core_hasOwn.call(obj, key); | |
} | |
function isFunction(obj) { | |
return type(obj) === "function"; | |
} | |
var isArray = Array.isArray || function (obj) { | |
return type(obj) === "array"; | |
}; | |
function extend() { | |
// originally extend() was recursive, but this ended up giving us | |
// "call stack exceeded", so it's been unrolled to use a literal stack | |
// (see https://github.com/pouchdb/pouchdb/issues/2543) | |
var stack = []; | |
var i = -1; | |
var len = arguments.length; | |
var args = new Array(len); | |
while (++i < len) { | |
args[i] = arguments[i]; | |
} | |
var container = {}; | |
stack.push({args: args, result: {container: container, key: 'key'}}); | |
var next; | |
while ((next = stack.pop())) { | |
extendInner(stack, next.args, next.result); | |
} | |
return container.key; | |
} | |
function extendInner(stack, args, result) { | |
var options, name, src, copy, copyIsArray, clone, | |
target = args[0] || {}, | |
i = 1, | |
length = args.length, | |
deep = false, | |
numericStringRegex = /\d+/, | |
optionsIsArray; | |
// Handle a deep copy situation | |
if (typeof target === "boolean") { | |
deep = target; | |
target = args[1] || {}; | |
// skip the boolean and the target | |
i = 2; | |
} | |
// Handle case when target is a string or something (possible in deep copy) | |
if (typeof target !== "object" && !isFunction(target)) { | |
target = {}; | |
} | |
// extend jQuery itself if only one argument is passed | |
if (length === i) { | |
/* jshint validthis: true */ | |
target = this; | |
--i; | |
} | |
for (; i < length; i++) { | |
// Only deal with non-null/undefined values | |
if ((options = args[i]) != null) { | |
optionsIsArray = isArray(options); | |
// Extend the base object | |
for (name in options) { | |
//if (options.hasOwnProperty(name)) { | |
if (!(name in Object.prototype)) { | |
if (optionsIsArray && !numericStringRegex.test(name)) { | |
continue; | |
} | |
src = target[name]; | |
copy = options[name]; | |
// Prevent never-ending loop | |
if (target === copy) { | |
continue; | |
} | |
// Recurse if we're merging plain objects or arrays | |
if (deep && copy && (isPlainObject(copy) || | |
(copyIsArray = isArray(copy)))) { | |
if (copyIsArray) { | |
copyIsArray = false; | |
clone = src && isArray(src) ? src : []; | |
} else { | |
clone = src && isPlainObject(src) ? src : {}; | |
} | |
// Never move original objects, clone them | |
stack.push({ | |
args: [deep, clone, copy], | |
result: { | |
container: target, | |
key: name | |
} | |
}); | |
// Don't bring in undefined values | |
} else if (copy !== undefined) { | |
if (!(isArray(options) && isFunction(copy))) { | |
target[name] = copy; | |
} | |
} | |
} | |
} | |
} | |
} | |
// "Return" the modified object by setting the key | |
// on the given container | |
result.container[result.key] = target; | |
} | |
module.exports = extend; | |
},{}],148:[function(require,module,exports){ | |
'use strict'; | |
var upsert = require('./upsert'); | |
var utils = require('./utils'); | |
var Promise = utils.Promise; | |
module.exports = function (opts) { | |
var sourceDB = opts.db; | |
var viewName = opts.viewName; | |
var mapFun = opts.map; | |
var reduceFun = opts.reduce; | |
var temporary = opts.temporary; | |
// the "undefined" part is for backwards compatibility | |
var viewSignature = mapFun.toString() + (reduceFun && reduceFun.toString()) + | |
'undefined'; | |
if (!temporary && sourceDB._cachedViews) { | |
var cachedView = sourceDB._cachedViews[viewSignature]; | |
if (cachedView) { | |
return Promise.resolve(cachedView); | |
} | |
} | |
return sourceDB.info().then(function (info) { | |
var depDbName = info.db_name + '-mrview-' + | |
(temporary ? 'temp' : utils.MD5(viewSignature)); | |
// save the view name in the source PouchDB so it can be cleaned up if necessary | |
// (e.g. when the _design doc is deleted, remove all associated view data) | |
function diffFunction(doc) { | |
doc.views = doc.views || {}; | |
var fullViewName = viewName; | |
if (fullViewName.indexOf('/') === -1) { | |
fullViewName = viewName + '/' + viewName; | |
} | |
var depDbs = doc.views[fullViewName] = doc.views[fullViewName] || {}; | |
/* istanbul ignore if */ | |
if (depDbs[depDbName]) { | |
return; // no update necessary | |
} | |
depDbs[depDbName] = true; | |
return doc; | |
} | |
return upsert(sourceDB, '_local/mrviews', diffFunction).then(function () { | |
return sourceDB.registerDependentDatabase(depDbName).then(function (res) { | |
var db = res.db; | |
db.auto_compaction = true; | |
var view = { | |
name: depDbName, | |
db: db, | |
sourceDB: sourceDB, | |
adapter: sourceDB.adapter, | |
mapFun: mapFun, | |
reduceFun: reduceFun | |
}; | |
return view.db.get('_local/lastSeq')["catch"](function (err) { | |
/* istanbul ignore if */ | |
if (err.status !== 404) { | |
throw err; | |
} | |
}).then(function (lastSeqDoc) { | |
view.seq = lastSeqDoc ? lastSeqDoc.seq : 0; | |
if (!temporary) { | |
sourceDB._cachedViews = sourceDB._cachedViews || {}; | |
sourceDB._cachedViews[viewSignature] = view; | |
view.db.on('destroyed', function () { | |
delete sourceDB._cachedViews[viewSignature]; | |
}); | |
} | |
return view; | |
}); | |
}); | |
}); | |
}); | |
}; | |
},{"./upsert":152,"./utils":153}],149:[function(require,module,exports){ | |
'use strict'; | |
module.exports = function (func, emit, sum, log, isArray, toJSON) { | |
/*jshint evil:true,unused:false */ | |
return eval("'use strict'; (" + func.replace(/;\s*$/, "") + ");"); | |
}; | |
},{}],150:[function(require,module,exports){ | |
(function (process){ | |
'use strict'; | |
var pouchCollate = require('pouchdb-collate'); | |
var TaskQueue = require('./taskqueue'); | |
var collate = pouchCollate.collate; | |
var toIndexableString = pouchCollate.toIndexableString; | |
var normalizeKey = pouchCollate.normalizeKey; | |
var createView = require('./create-view'); | |
var evalFunc = require('./evalfunc'); | |
var log; | |
/* istanbul ignore else */ | |
if ((typeof console !== 'undefined') && (typeof console.log === 'function')) { | |
log = Function.prototype.bind.call(console.log, console); | |
} else { | |
log = function () {}; | |
} | |
var utils = require('./utils'); | |
var Promise = utils.Promise; | |
var persistentQueues = {}; | |
var tempViewQueue = new TaskQueue(); | |
var CHANGES_BATCH_SIZE = 50; | |
function parseViewName(name) { | |
// can be either 'ddocname/viewname' or just 'viewname' | |
// (where the ddoc name is the same) | |
return name.indexOf('/') === -1 ? [name, name] : name.split('/'); | |
} | |
function isGenOne(changes) { | |
// only return true if the current change is 1- | |
// and there are no other leafs | |
return changes.length === 1 && /^1-/.test(changes[0].rev); | |
} | |
function emitError(db, e) { | |
try { | |
db.emit('error', e); | |
} catch (err) { | |
console.error( | |
'The user\'s map/reduce function threw an uncaught error.\n' + | |
'You can debug this error by doing:\n' + | |
'myDatabase.on(\'error\', function (err) { debugger; });\n' + | |
'Please double-check your map/reduce function.'); | |
console.error(e); | |
} | |
} | |
function tryCode(db, fun, args) { | |
// emit an event if there was an error thrown by a map/reduce function. | |
// putting try/catches in a single function also avoids deoptimizations. | |
try { | |
return { | |
output : fun.apply(null, args) | |
}; | |
} catch (e) { | |
emitError(db, e); | |
return {error: e}; | |
} | |
} | |
function sortByKeyThenValue(x, y) { | |
var keyCompare = collate(x.key, y.key); | |
return keyCompare !== 0 ? keyCompare : collate(x.value, y.value); | |
} | |
function sliceResults(results, limit, skip) { | |
skip = skip || 0; | |
if (typeof limit === 'number') { | |
return results.slice(skip, limit + skip); | |
} else if (skip > 0) { | |
return results.slice(skip); | |
} | |
return results; | |
} | |
function rowToDocId(row) { | |
var val = row.value; | |
// Users can explicitly specify a joined doc _id, or it | |
// defaults to the doc _id that emitted the key/value. | |
var docId = (val && typeof val === 'object' && val._id) || row.id; | |
return docId; | |
} | |
function createBuiltInError(name) { | |
var message = 'builtin ' + name + | |
' function requires map values to be numbers' + | |
' or number arrays'; | |
return new BuiltInError(message); | |
} | |
function sum(values) { | |
var result = 0; | |
for (var i = 0, len = values.length; i < len; i++) { | |
var num = values[i]; | |
if (typeof num !== 'number') { | |
if (Array.isArray(num)) { | |
// lists of numbers are also allowed, sum them separately | |
result = typeof result === 'number' ? [result] : result; | |
for (var j = 0, jLen = num.length; j < jLen; j++) { | |
var jNum = num[j]; | |
if (typeof jNum !== 'number') { | |
throw createBuiltInError('_sum'); | |
} else if (typeof result[j] === 'undefined') { | |
result.push(jNum); | |
} else { | |
result[j] += jNum; | |
} | |
} | |
} else { // not array/number | |
throw createBuiltInError('_sum'); | |
} | |
} else if (typeof result === 'number') { | |
result += num; | |
} else { // add number to array | |
result[0] += num; | |
} | |
} | |
return result; | |
} | |
var builtInReduce = { | |
_sum: function (keys, values) { | |
return sum(values); | |
}, | |
_count: function (keys, values) { | |
return values.length; | |
}, | |
_stats: function (keys, values) { | |
// no need to implement rereduce=true, because Pouch | |
// will never call it | |
function sumsqr(values) { | |
var _sumsqr = 0; | |
for (var i = 0, len = values.length; i < len; i++) { | |
var num = values[i]; | |
_sumsqr += (num * num); | |
} | |
return _sumsqr; | |
} | |
return { | |
sum : sum(values), | |
min : Math.min.apply(null, values), | |
max : Math.max.apply(null, values), | |
count : values.length, | |
sumsqr : sumsqr(values) | |
}; | |
} | |
}; | |
function addHttpParam(paramName, opts, params, asJson) { | |
// add an http param from opts to params, optionally json-encoded | |
var val = opts[paramName]; | |
if (typeof val !== 'undefined') { | |
if (asJson) { | |
val = encodeURIComponent(JSON.stringify(val)); | |
} | |
params.push(paramName + '=' + val); | |
} | |
} | |
function checkQueryParseError(options, fun) { | |
var startkeyName = options.descending ? 'endkey' : 'startkey'; | |
var endkeyName = options.descending ? 'startkey' : 'endkey'; | |
if (typeof options[startkeyName] !== 'undefined' && | |
typeof options[endkeyName] !== 'undefined' && | |
collate(options[startkeyName], options[endkeyName]) > 0) { | |
throw new QueryParseError('No rows can match your key range, reverse your ' + | |
'start_key and end_key or set {descending : true}'); | |
} else if (fun.reduce && options.reduce !== false) { | |
if (options.include_docs) { | |
throw new QueryParseError('{include_docs:true} is invalid for reduce'); | |
} else if (options.keys && options.keys.length > 1 && | |
!options.group && !options.group_level) { | |
throw new QueryParseError('Multi-key fetches for reduce views must use {group: true}'); | |
} | |
} | |
if (options.group_level) { | |
if (typeof options.group_level !== 'number') { | |
throw new QueryParseError('Invalid value for integer: "' + options.group_level + '"'); | |
} | |
if (options.group_level < 0) { | |
throw new QueryParseError('Invalid value for positive integer: ' + | |
'"' + options.group_level + '"'); | |
} | |
} | |
} | |
function httpQuery(db, fun, opts) { | |
// List of parameters to add to the PUT request | |
var params = []; | |
var body; | |
var method = 'GET'; | |
// If opts.reduce exists and is defined, then add it to the list | |
// of parameters. | |
// If reduce=false then the results are that of only the map function | |
// not the final result of map and reduce. | |
addHttpParam('reduce', opts, params); | |
addHttpParam('include_docs', opts, params); | |
addHttpParam('attachments', opts, params); | |
addHttpParam('limit', opts, params); | |
addHttpParam('descending', opts, params); | |
addHttpParam('group', opts, params); | |
addHttpParam('group_level', opts, params); | |
addHttpParam('skip', opts, params); | |
addHttpParam('stale', opts, params); | |
addHttpParam('conflicts', opts, params); | |
addHttpParam('startkey', opts, params, true); | |
addHttpParam('endkey', opts, params, true); | |
addHttpParam('inclusive_end', opts, params); | |
addHttpParam('key', opts, params, true); | |
// Format the list of parameters into a valid URI query string | |
params = params.join('&'); | |
params = params === '' ? '' : '?' + params; | |
// If keys are supplied, issue a POST request to circumvent GET query string limits | |
// see http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options | |
if (typeof opts.keys !== 'undefined') { | |
var MAX_URL_LENGTH = 2000; | |
// according to http://stackoverflow.com/a/417184/680742, | |
// the de facto URL length limit is 2000 characters | |
var keysAsString = | |
'keys=' + encodeURIComponent(JSON.stringify(opts.keys)); | |
if (keysAsString.length + params.length + 1 <= MAX_URL_LENGTH) { | |
// If the keys are short enough, do a GET. we do this to work around | |
// Safari not understanding 304s on POSTs (see pouchdb/pouchdb#1239) | |
params += (params[0] === '?' ? '&' : '?') + keysAsString; | |
} else { | |
method = 'POST'; | |
if (typeof fun === 'string') { | |
body = JSON.stringify({keys: opts.keys}); | |
} else { // fun is {map : mapfun}, so append to this | |
fun.keys = opts.keys; | |
} | |
} | |
} | |
// We are referencing a query defined in the design doc | |
if (typeof fun === 'string') { | |
var parts = parseViewName(fun); | |
return db.request({ | |
method: method, | |
url: '_design/' + parts[0] + '/_view/' + parts[1] + params, | |
body: body | |
}); | |
} | |
// We are using a temporary view, terrible for performance but good for testing | |
body = body || {}; | |
Object.keys(fun).forEach(function (key) { | |
if (Array.isArray(fun[key])) { | |
body[key] = fun[key]; | |
} else { | |
body[key] = fun[key].toString(); | |
} | |
}); | |
return db.request({ | |
method: 'POST', | |
url: '_temp_view' + params, | |
body: body | |
}); | |
} | |
function defaultsTo(value) { | |
return function (reason) { | |
/* istanbul ignore else */ | |
if (reason.status === 404) { | |
return value; | |
} else { | |
throw reason; | |
} | |
}; | |
} | |
// returns a promise for a list of docs to update, based on the input docId. | |
// the order doesn't matter, because post-3.2.0, bulkDocs | |
// is an atomic operation in all three adapters. | |
function getDocsToPersist(docId, view, docIdsToChangesAndEmits) { | |
var metaDocId = '_local/doc_' + docId; | |
var defaultMetaDoc = {_id: metaDocId, keys: []}; | |
var docData = docIdsToChangesAndEmits[docId]; | |
var indexableKeysToKeyValues = docData.indexableKeysToKeyValues; | |
var changes = docData.changes; | |
function getMetaDoc() { | |
if (isGenOne(changes)) { | |
// generation 1, so we can safely assume initial state | |
// for performance reasons (avoids unnecessary GETs) | |
return Promise.resolve(defaultMetaDoc); | |
} | |
return view.db.get(metaDocId)["catch"](defaultsTo(defaultMetaDoc)); | |
} | |
function getKeyValueDocs(metaDoc) { | |
if (!metaDoc.keys.length) { | |
// no keys, no need for a lookup | |
return Promise.resolve({rows: []}); | |
} | |
return view.db.allDocs({ | |
keys: metaDoc.keys, | |
include_docs: true | |
}); | |
} | |
function processKvDocs(metaDoc, kvDocsRes) { | |
var kvDocs = []; | |
var oldKeysMap = {}; | |
for (var i = 0, len = kvDocsRes.rows.length; i < len; i++) { | |
var row = kvDocsRes.rows[i]; | |
var doc = row.doc; | |
if (!doc) { // deleted | |
continue; | |
} | |
kvDocs.push(doc); | |
oldKeysMap[doc._id] = true; | |
doc._deleted = !indexableKeysToKeyValues[doc._id]; | |
if (!doc._deleted) { | |
var keyValue = indexableKeysToKeyValues[doc._id]; | |
if ('value' in keyValue) { | |
doc.value = keyValue.value; | |
} | |
} | |
} | |
var newKeys = Object.keys(indexableKeysToKeyValues); | |
newKeys.forEach(function (key) { | |
if (!oldKeysMap[key]) { | |
// new doc | |
var kvDoc = { | |
_id: key | |
}; | |
var keyValue = indexableKeysToKeyValues[key]; | |
if ('value' in keyValue) { | |
kvDoc.value = keyValue.value; | |
} | |
kvDocs.push(kvDoc); | |
} | |
}); | |
metaDoc.keys = utils.uniq(newKeys.concat(metaDoc.keys)); | |
kvDocs.push(metaDoc); | |
return kvDocs; | |
} | |
return getMetaDoc().then(function (metaDoc) { | |
return getKeyValueDocs(metaDoc).then(function (kvDocsRes) { | |
return processKvDocs(metaDoc, kvDocsRes); | |
}); | |
}); | |
} | |
// updates all emitted key/value docs and metaDocs in the mrview database | |
// for the given batch of documents from the source database | |
function saveKeyValues(view, docIdsToChangesAndEmits, seq) { | |
var seqDocId = '_local/lastSeq'; | |
return view.db.get(seqDocId)[ | |
"catch"](defaultsTo({_id: seqDocId, seq: 0})) | |
.then(function (lastSeqDoc) { | |
var docIds = Object.keys(docIdsToChangesAndEmits); | |
return Promise.all(docIds.map(function (docId) { | |
return getDocsToPersist(docId, view, docIdsToChangesAndEmits); | |
})).then(function (listOfDocsToPersist) { | |
var docsToPersist = utils.flatten(listOfDocsToPersist); | |
lastSeqDoc.seq = seq; | |
docsToPersist.push(lastSeqDoc); | |
// write all docs in a single operation, update the seq once | |
return view.db.bulkDocs({docs : docsToPersist}); | |
}); | |
}); | |
} | |
function getQueue(view) { | |
var viewName = typeof view === 'string' ? view : view.name; | |
var queue = persistentQueues[viewName]; | |
if (!queue) { | |
queue = persistentQueues[viewName] = new TaskQueue(); | |
} | |
return queue; | |
} | |
function updateView(view) { | |
return utils.sequentialize(getQueue(view), function () { | |
return updateViewInQueue(view); | |
})(); | |
} | |
function updateViewInQueue(view) { | |
// bind the emit function once | |
var mapResults; | |
var doc; | |
function emit(key, value) { | |
var output = {id: doc._id, key: normalizeKey(key)}; | |
// Don't explicitly store the value unless it's defined and non-null. | |
// This saves on storage space, because often people don't use it. | |
if (typeof value !== 'undefined' && value !== null) { | |
output.value = normalizeKey(value); | |
} | |
mapResults.push(output); | |
} | |
var mapFun; | |
// for temp_views one can use emit(doc, emit), see #38 | |
if (typeof view.mapFun === "function" && view.mapFun.length === 2) { | |
var origMap = view.mapFun; | |
mapFun = function (doc) { | |
return origMap(doc, emit); | |
}; | |
} else { | |
mapFun = evalFunc(view.mapFun.toString(), emit, sum, log, Array.isArray, JSON.parse); | |
} | |
var currentSeq = view.seq || 0; | |
function processChange(docIdsToChangesAndEmits, seq) { | |
return function () { | |
return saveKeyValues(view, docIdsToChangesAndEmits, seq); | |
}; | |
} | |
var queue = new TaskQueue(); | |
// TODO(neojski): https://github.com/daleharvey/pouchdb/issues/1521 | |
return new Promise(function (resolve, reject) { | |
function complete() { | |
queue.finish().then(function () { | |
view.seq = currentSeq; | |
resolve(); | |
}); | |
} | |
function processNextBatch() { | |
view.sourceDB.changes({ | |
conflicts: true, | |
include_docs: true, | |
style: 'all_docs', | |
since: currentSeq, | |
limit: CHANGES_BATCH_SIZE | |
}).on('complete', function (response) { | |
var results = response.results; | |
if (!results.length) { | |
return complete(); | |
} | |
var docIdsToChangesAndEmits = {}; | |
for (var i = 0, l = results.length; i < l; i++) { | |
var change = results[i]; | |
if (change.doc._id[0] !== '_') { | |
mapResults = []; | |
doc = change.doc; | |
if (!doc._deleted) { | |
tryCode(view.sourceDB, mapFun, [doc]); | |
} | |
mapResults.sort(sortByKeyThenValue); | |
var indexableKeysToKeyValues = {}; | |
var lastKey; | |
for (var j = 0, jl = mapResults.length; j < jl; j++) { | |
var obj = mapResults[j]; | |
var complexKey = [obj.key, obj.id]; | |
if (collate(obj.key, lastKey) === 0) { | |
complexKey.push(j); // dup key+id, so make it unique | |
} | |
var indexableKey = toIndexableString(complexKey); | |
indexableKeysToKeyValues[indexableKey] = obj; | |
lastKey = obj.key; | |
} | |
docIdsToChangesAndEmits[change.doc._id] = { | |
indexableKeysToKeyValues: indexableKeysToKeyValues, | |
changes: change.changes | |
}; | |
} | |
currentSeq = change.seq; | |
} | |
queue.add(processChange(docIdsToChangesAndEmits, currentSeq)); | |
if (results.length < CHANGES_BATCH_SIZE) { | |
return complete(); | |
} | |
return processNextBatch(); | |
}).on('error', onError); | |
/* istanbul ignore next */ | |
function onError(err) { | |
reject(err); | |
} | |
} | |
processNextBatch(); | |
}); | |
} | |
function reduceView(view, results, options) { | |
if (options.group_level === 0) { | |
delete options.group_level; | |
} | |
var shouldGroup = options.group || options.group_level; | |
var reduceFun; | |
if (builtInReduce[view.reduceFun]) { | |
reduceFun = builtInReduce[view.reduceFun]; | |
} else { | |
reduceFun = evalFunc( | |
view.reduceFun.toString(), null, sum, log, Array.isArray, JSON.parse); | |
} | |
var groups = []; | |
var lvl = options.group_level; | |
results.forEach(function (e) { | |
var last = groups[groups.length - 1]; | |
var key = shouldGroup ? e.key : null; | |
// only set group_level for array keys | |
if (shouldGroup && Array.isArray(key) && typeof lvl === 'number') { | |
key = key.length > lvl ? key.slice(0, lvl) : key; | |
} | |
if (last && collate(last.key[0][0], key) === 0) { | |
last.key.push([key, e.id]); | |
last.value.push(e.value); | |
return; | |
} | |
groups.push({key: [ | |
[key, e.id] | |
], value: [e.value]}); | |
}); | |
for (var i = 0, len = groups.length; i < len; i++) { | |
var e = groups[i]; | |
var reduceTry = tryCode(view.sourceDB, reduceFun, [e.key, e.value, false]); | |
if (reduceTry.error && reduceTry.error instanceof BuiltInError) { | |
// CouchDB returns an error if a built-in errors out | |
throw reduceTry.error; | |
} | |
// CouchDB just sets the value to null if a non-built-in errors out | |
e.value = reduceTry.error ? null : reduceTry.output; | |
e.key = e.key[0][0]; | |
} | |
// no total_rows/offset when reducing | |
return {rows: sliceResults(groups, options.limit, options.skip)}; | |
} | |
function queryView(view, opts) { | |
return utils.sequentialize(getQueue(view), function () { | |
return queryViewInQueue(view, opts); | |
})(); | |
} | |
function queryViewInQueue(view, opts) { | |
var totalRows; | |
var shouldReduce = view.reduceFun && opts.reduce !== false; | |
var skip = opts.skip || 0; | |
if (typeof opts.keys !== 'undefined' && !opts.keys.length) { | |
// equivalent query | |
opts.limit = 0; | |
delete opts.keys; | |
} | |
function fetchFromView(viewOpts) { | |
viewOpts.include_docs = true; | |
return view.db.allDocs(viewOpts).then(function (res) { | |
totalRows = res.total_rows; | |
return res.rows.map(function (result) { | |
// implicit migration - in older versions of PouchDB, | |
// we explicitly stored the doc as {id: ..., key: ..., value: ...} | |
// this is tested in a migration test | |
/* istanbul ignore next */ | |
if ('value' in result.doc && typeof result.doc.value === 'object' && | |
result.doc.value !== null) { | |
var keys = Object.keys(result.doc.value).sort(); | |
// this detection method is not perfect, but it's unlikely the user | |
// emitted a value which was an object with these 3 exact keys | |
var expectedKeys = ['id', 'key', 'value']; | |
if (!(keys < expectedKeys || keys > expectedKeys)) { | |
return result.doc.value; | |
} | |
} | |
var parsedKeyAndDocId = pouchCollate.parseIndexableString(result.doc._id); | |
return { | |
key: parsedKeyAndDocId[0], | |
id: parsedKeyAndDocId[1], | |
value: ('value' in result.doc ? result.doc.value : null) | |
}; | |
}); | |
}); | |
} | |
function onMapResultsReady(rows) { | |
var finalResults; | |
if (shouldReduce) { | |
finalResults = reduceView(view, rows, opts); | |
} else { | |
finalResults = { | |
total_rows: totalRows, | |
offset: skip, | |
rows: rows | |
}; | |
} | |
if (opts.include_docs) { | |
var docIds = utils.uniq(rows.map(rowToDocId)); | |
return view.sourceDB.allDocs({ | |
keys: docIds, | |
include_docs: true, | |
conflicts: opts.conflicts, | |
attachments: opts.attachments | |
}).then(function (allDocsRes) { | |
var docIdsToDocs = {}; | |
allDocsRes.rows.forEach(function (row) { | |
if (row.doc) { | |
docIdsToDocs['$' + row.id] = row.doc; | |
} | |
}); | |
rows.forEach(function (row) { | |
var docId = rowToDocId(row); | |
var doc = docIdsToDocs['$' + docId]; | |
if (doc) { | |
row.doc = doc; | |
} | |
}); | |
return finalResults; | |
}); | |
} else { | |
return finalResults; | |
} | |
} | |
var flatten = function (array) { | |
return array.reduce(function (prev, cur) { | |
return prev.concat(cur); | |
}); | |
}; | |
if (typeof opts.keys !== 'undefined') { | |
var keys = opts.keys; | |
var fetchPromises = keys.map(function (key) { | |
var viewOpts = { | |
startkey : toIndexableString([key]), | |
endkey : toIndexableString([key, {}]) | |
}; | |
return fetchFromView(viewOpts); | |
}); | |
return Promise.all(fetchPromises).then(flatten).then(onMapResultsReady); | |
} else { // normal query, no 'keys' | |
var viewOpts = { | |
descending : opts.descending | |
}; | |
if (typeof opts.startkey !== 'undefined') { | |
viewOpts.startkey = opts.descending ? | |
toIndexableString([opts.startkey, {}]) : | |
toIndexableString([opts.startkey]); | |
} | |
if (typeof opts.endkey !== 'undefined') { | |
var inclusiveEnd = opts.inclusive_end !== false; | |
if (opts.descending) { | |
inclusiveEnd = !inclusiveEnd; | |
} | |
viewOpts.endkey = toIndexableString(inclusiveEnd ? [opts.endkey, {}] : [opts.endkey]); | |
} | |
if (typeof opts.key !== 'undefined') { | |
var keyStart = toIndexableString([opts.key]); | |
var keyEnd = toIndexableString([opts.key, {}]); | |
if (viewOpts.descending) { | |
viewOpts.endkey = keyStart; | |
viewOpts.startkey = keyEnd; | |
} else { | |
viewOpts.startkey = keyStart; | |
viewOpts.endkey = keyEnd; | |
} | |
} | |
if (!shouldReduce) { | |
if (typeof opts.limit === 'number') { | |
viewOpts.limit = opts.limit; | |
} | |
viewOpts.skip = skip; | |
} | |
return fetchFromView(viewOpts).then(onMapResultsReady); | |
} | |
} | |
function httpViewCleanup(db) { | |
return db.request({ | |
method: 'POST', | |
url: '_view_cleanup' | |
}); | |
} | |
function localViewCleanup(db) { | |
return db.get('_local/mrviews').then(function (metaDoc) { | |
var docsToViews = {}; | |
Object.keys(metaDoc.views).forEach(function (fullViewName) { | |
var parts = parseViewName(fullViewName); | |
var designDocName = '_design/' + parts[0]; | |
var viewName = parts[1]; | |
docsToViews[designDocName] = docsToViews[designDocName] || {}; | |
docsToViews[designDocName][viewName] = true; | |
}); | |
var opts = { | |
keys : Object.keys(docsToViews), | |
include_docs : true | |
}; | |
return db.allDocs(opts).then(function (res) { | |
var viewsToStatus = {}; | |
res.rows.forEach(function (row) { | |
var ddocName = row.key.substring(8); | |
Object.keys(docsToViews[row.key]).forEach(function (viewName) { | |
var fullViewName = ddocName + '/' + viewName; | |
/* istanbul ignore if */ | |
if (!metaDoc.views[fullViewName]) { | |
// new format, without slashes, to support PouchDB 2.2.0 | |
// migration test in pouchdb's browser.migration.js verifies this | |
fullViewName = viewName; | |
} | |
var viewDBNames = Object.keys(metaDoc.views[fullViewName]); | |
// design doc deleted, or view function nonexistent | |
var statusIsGood = row.doc && row.doc.views && row.doc.views[viewName]; | |
viewDBNames.forEach(function (viewDBName) { | |
viewsToStatus[viewDBName] = viewsToStatus[viewDBName] || statusIsGood; | |
}); | |
}); | |
}); | |
var dbsToDelete = Object.keys(viewsToStatus).filter(function (viewDBName) { | |
return !viewsToStatus[viewDBName]; | |
}); | |
var destroyPromises = dbsToDelete.map(function (viewDBName) { | |
return utils.sequentialize(getQueue(viewDBName), function () { | |
return new db.constructor(viewDBName, db.__opts).destroy(); | |
})(); | |
}); | |
return Promise.all(destroyPromises).then(function () { | |
return {ok: true}; | |
}); | |
}); | |
}, defaultsTo({ok: true})); | |
} | |
exports.viewCleanup = utils.callbackify(function () { | |
var db = this; | |
if (db.type() === 'http') { | |
return httpViewCleanup(db); | |
} | |
return localViewCleanup(db); | |
}); | |
function queryPromised(db, fun, opts) { | |
if (db.type() === 'http') { | |
return httpQuery(db, fun, opts); | |
} | |
if (typeof fun !== 'string') { | |
// temp_view | |
checkQueryParseError(opts, fun); | |
var createViewOpts = { | |
db : db, | |
viewName : 'temp_view/temp_view', | |
map : fun.map, | |
reduce : fun.reduce, | |
temporary : true | |
}; | |
tempViewQueue.add(function () { | |
return createView(createViewOpts).then(function (view) { | |
function cleanup() { | |
return view.db.destroy(); | |
} | |
return utils.fin(updateView(view).then(function () { | |
return queryView(view, opts); | |
}), cleanup); | |
}); | |
}); | |
return tempViewQueue.finish(); | |
} else { | |
// persistent view | |
var fullViewName = fun; | |
var parts = parseViewName(fullViewName); | |
var designDocName = parts[0]; | |
var viewName = parts[1]; | |
return db.get('_design/' + designDocName).then(function (doc) { | |
var fun = doc.views && doc.views[viewName]; | |
if (!fun || typeof fun.map !== 'string') { | |
throw new NotFoundError('ddoc ' + designDocName + ' has no view named ' + | |
viewName); | |
} | |
checkQueryParseError(opts, fun); | |
var createViewOpts = { | |
db : db, | |
viewName : fullViewName, | |
map : fun.map, | |
reduce : fun.reduce | |
}; | |
return createView(createViewOpts).then(function (view) { | |
if (opts.stale === 'ok' || opts.stale === 'update_after') { | |
if (opts.stale === 'update_after') { | |
process.nextTick(function () { | |
updateView(view); | |
}); | |
} | |
return queryView(view, opts); | |
} else { // stale not ok | |
return updateView(view).then(function () { | |
return queryView(view, opts); | |
}); | |
} | |
}); | |
}); | |
} | |
} | |
exports.query = function (fun, opts, callback) { | |
if (typeof opts === 'function') { | |
callback = opts; | |
opts = {}; | |
} | |
opts = utils.extend(true, {}, opts); | |
if (typeof fun === 'function') { | |
fun = {map : fun}; | |
} | |
var db = this; | |
var promise = Promise.resolve().then(function () { | |
return queryPromised(db, fun, opts); | |
}); | |
utils.promisedCallback(promise, callback); | |
return promise; | |
}; | |
function QueryParseError(message) { | |
this.status = 400; | |
this.name = 'query_parse_error'; | |
this.message = message; | |
this.error = true; | |
try { | |
Error.captureStackTrace(this, QueryParseError); | |
} catch (e) {} | |
} | |
utils.inherits(QueryParseError, Error); | |
function NotFoundError(message) { | |
this.status = 404; | |
this.name = 'not_found'; | |
this.message = message; | |
this.error = true; | |
try { | |
Error.captureStackTrace(this, NotFoundError); | |
} catch (e) {} | |
} | |
utils.inherits(NotFoundError, Error); | |
function BuiltInError(message) { | |
this.status = 500; | |
this.name = 'invalid_value'; | |
this.message = message; | |
this.error = true; | |
try { | |
Error.captureStackTrace(this, BuiltInError); | |
} catch (e) {} | |
} | |
utils.inherits(BuiltInError, Error); | |
}).call(this,require('_process')) | |
},{"./create-view":148,"./evalfunc":149,"./taskqueue":151,"./utils":153,"_process":81,"pouchdb-collate":144}],151:[function(require,module,exports){ | |
'use strict'; | |
/* | |
* Simple task queue to sequentialize actions. Assumes callbacks will eventually fire (once). | |
*/ | |
var Promise = require('./utils').Promise; | |
function TaskQueue() { | |
this.promise = new Promise(function (fulfill) {fulfill(); }); | |
} | |
TaskQueue.prototype.add = function (promiseFactory) { | |
this.promise = this.promise["catch"](function () { | |
// just recover | |
}).then(function () { | |
return promiseFactory(); | |
}); | |
return this.promise; | |
}; | |
TaskQueue.prototype.finish = function () { | |
return this.promise; | |
}; | |
module.exports = TaskQueue; | |
},{"./utils":153}],152:[function(require,module,exports){ | |
'use strict'; | |
var upsert = require('pouchdb-upsert').upsert; | |
module.exports = function (db, doc, diffFun) { | |
return upsert.apply(db, [doc, diffFun]); | |
}; | |
},{"pouchdb-upsert":154}],153:[function(require,module,exports){ | |
(function (process,global){ | |
'use strict'; | |
/* istanbul ignore if */ | |
if (typeof global.Promise === 'function') { | |
exports.Promise = global.Promise; | |
} else { | |
exports.Promise = require('lie'); | |
} | |
exports.inherits = require('inherits'); | |
exports.extend = require('pouchdb-extend'); | |
var argsarray = require('argsarray'); | |
exports.promisedCallback = function (promise, callback) { | |
if (callback) { | |
promise.then(function (res) { | |
process.nextTick(function () { | |
callback(null, res); | |
}); | |
}, function (reason) { | |
process.nextTick(function () { | |
callback(reason); | |
}); | |
}); | |
} | |
return promise; | |
}; | |
exports.callbackify = function (fun) { | |
return argsarray(function (args) { | |
var cb = args.pop(); | |
var promise = fun.apply(this, args); | |
if (typeof cb === 'function') { | |
exports.promisedCallback(promise, cb); | |
} | |
return promise; | |
}); | |
}; | |
// Promise finally util similar to Q.finally | |
exports.fin = function (promise, cb) { | |
return promise.then(function (res) { | |
var promise2 = cb(); | |
if (typeof promise2.then === 'function') { | |
return promise2.then(function () { | |
return res; | |
}); | |
} | |
return res; | |
}, function (reason) { | |
var promise2 = cb(); | |
if (typeof promise2.then === 'function') { | |
return promise2.then(function () { | |
throw reason; | |
}); | |
} | |
throw reason; | |
}); | |
}; | |
exports.sequentialize = function (queue, promiseFactory) { | |
return function () { | |
var args = arguments; | |
var that = this; | |
return queue.add(function () { | |
return promiseFactory.apply(that, args); | |
}); | |
}; | |
}; | |
exports.flatten = function (arrs) { | |
var res = []; | |
for (var i = 0, len = arrs.length; i < len; i++) { | |
res = res.concat(arrs[i]); | |
} | |
return res; | |
}; | |
// uniq an array of strings, order not guaranteed | |
// similar to underscore/lodash _.uniq | |
exports.uniq = function (arr) { | |
var map = {}; | |
for (var i = 0, len = arr.length; i < len; i++) { | |
map['$' + arr[i]] = true; | |
} | |
var keys = Object.keys(map); | |
var output = new Array(keys.length); | |
for (i = 0, len = keys.length; i < len; i++) { | |
output[i] = keys[i].substring(1); | |
} | |
return output; | |
}; | |
var crypto = require('crypto'); | |
var Md5 = require('spark-md5'); | |
exports.MD5 = function (string) { | |
/* istanbul ignore else */ | |
if (!process.browser) { | |
return crypto.createHash('md5').update(string).digest('hex'); | |
} else { | |
return Md5.hash(string); | |
} | |
}; | |
}).call(this,require('_process'),typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"_process":81,"argsarray":121,"crypto":79,"inherits":125,"lie":129,"pouchdb-extend":147,"spark-md5":155}],154:[function(require,module,exports){ | |
(function (global){ | |
'use strict'; | |
var PouchPromise; | |
/* istanbul ignore next */ | |
if (typeof window !== 'undefined' && window.PouchDB) { | |
PouchPromise = window.PouchDB.utils.Promise; | |
} else { | |
PouchPromise = typeof global.Promise === 'function' ? global.Promise : require('lie'); | |
} | |
// this is essentially the "update sugar" function from daleharvey/pouchdb#1388 | |
// the diffFun tells us what delta to apply to the doc. it either returns | |
// the doc, or false if it doesn't need to do an update after all | |
function upsertInner(db, docId, diffFun) { | |
return new PouchPromise(function (fulfill, reject) { | |
if (typeof docId !== 'string') { | |
return reject(new Error('doc id is required')); | |
} | |
db.get(docId, function (err, doc) { | |
if (err) { | |
/* istanbul ignore next */ | |
if (err.status !== 404) { | |
return reject(err); | |
} | |
doc = {}; | |
} | |
// the user might change the _rev, so save it for posterity | |
var docRev = doc._rev; | |
var newDoc = diffFun(doc); | |
if (!newDoc) { | |
// if the diffFun returns falsy, we short-circuit as | |
// an optimization | |
return fulfill({updated: false, rev: docRev}); | |
} | |
// users aren't allowed to modify these values, | |
// so reset them here | |
newDoc._id = docId; | |
newDoc._rev = docRev; | |
fulfill(tryAndPut(db, newDoc, diffFun)); | |
}); | |
}); | |
} | |
function tryAndPut(db, doc, diffFun) { | |
return db.put(doc).then(function (res) { | |
return { | |
updated: true, | |
rev: res.rev | |
}; | |
}, function (err) { | |
/* istanbul ignore next */ | |
if (err.status !== 409) { | |
throw err; | |
} | |
return upsertInner(db, doc._id, diffFun); | |
}); | |
} | |
exports.upsert = function upsert(docId, diffFun, cb) { | |
var db = this; | |
var promise = upsertInner(db, docId, diffFun); | |
if (typeof cb !== 'function') { | |
return promise; | |
} | |
promise.then(function (resp) { | |
cb(null, resp); | |
}, cb); | |
}; | |
exports.putIfNotExists = function putIfNotExists(docId, doc, cb) { | |
var db = this; | |
if (typeof docId !== 'string') { | |
cb = doc; | |
doc = docId; | |
docId = doc._id; | |
} | |
var diffFun = function (existingDoc) { | |
if (existingDoc._rev) { | |
return false; // do nothing | |
} | |
return doc; | |
}; | |
var promise = upsertInner(db, docId, diffFun); | |
if (typeof cb !== 'function') { | |
return promise; | |
} | |
promise.then(function (resp) { | |
cb(null, resp); | |
}, cb); | |
}; | |
/* istanbul ignore next */ | |
if (typeof window !== 'undefined' && window.PouchDB) { | |
window.PouchDB.plugin(exports); | |
} | |
}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) | |
},{"lie":129}],155:[function(require,module,exports){ | |
/*jshint bitwise:false*/ | |
/*global unescape*/ | |
(function (factory) { | |
if (typeof exports === 'object') { | |
// Node/CommonJS | |
module.exports = factory(); | |
} else if (typeof define === 'function' && define.amd) { | |
// AMD | |
define(factory); | |
} else { | |
// Browser globals (with support for web workers) | |
var glob; | |
try { | |
glob = window; | |
} catch (e) { | |
glob = self; | |
} | |
glob.SparkMD5 = factory(); | |
} | |
}(function (undefined) { | |
'use strict'; | |
//////////////////////////////////////////////////////////////////////////// | |
/* | |
* Fastest md5 implementation around (JKM md5) | |
* Credits: Joseph Myers | |
* | |
* @see http://www.myersdaily.org/joseph/javascript/md5-text.html | |
* @see http://jsperf.com/md5-shootout/7 | |
*/ | |
/* this function is much faster, | |
so if possible we use it. Some IEs | |
are the only ones I know of that | |
need the idiotic second function, | |
generated by an if clause. */ | |
var add32 = function (a, b) { | |
return (a + b) & 0xFFFFFFFF; | |
}, | |
cmn = function (q, a, b, x, s, t) { | |
a = add32(add32(a, q), add32(x, t)); | |
return add32((a << s) | (a >>> (32 - s)), b); | |
}, | |
ff = function (a, b, c, d, x, s, t) { | |
return cmn((b & c) | ((~b) & d), a, b, x, s, t); | |
}, | |
gg = function (a, b, c, d, x, s, t) { | |
return cmn((b & d) | (c & (~d)), a, b, x, s, t); | |
}, | |
hh = function (a, b, c, d, x, s, t) { | |
return cmn(b ^ c ^ d, a, b, x, s, t); | |
}, | |
ii = function (a, b, c, d, x, s, t) { | |
return cmn(c ^ (b | (~d)), a, b, x, s, t); | |
}, | |
md5cycle = function (x, k) { | |
var a = x[0], | |
b = x[1], | |
c = x[2], | |
d = x[3]; | |
a = ff(a, b, c, d, k[0], 7, -680876936); | |
d = ff(d, a, b, c, k[1], 12, -389564586); | |
c = ff(c, d, a, b, k[2], 17, 606105819); | |
b = ff(b, c, d, a, k[3], 22, -1044525330); | |
a = ff(a, b, c, d, k[4], 7, -176418897); | |
d = ff(d, a, b, c, k[5], 12, 1200080426); | |
c = ff(c, d, a, b, k[6], 17, -1473231341); | |
b = ff(b, c, d, a, k[7], 22, -45705983); | |
a = ff(a, b, c, d, k[8], 7, 1770035416); | |
d = ff(d, a, b, c, k[9], 12, -1958414417); | |
c = ff(c, d, a, b, k[10], 17, -42063); | |
b = ff(b, c, d, a, k[11], 22, -1990404162); | |
a = ff(a, b, c, d, k[12], 7, 1804603682); | |
d = ff(d, a, b, c, k[13], 12, -40341101); | |
c = ff(c, d, a, b, k[14], 17, -1502002290); | |
b = ff(b, c, d, a, k[15], 22, 1236535329); | |
a = gg(a, b, c, d, k[1], 5, -165796510); | |
d = gg(d, a, b, c, k[6], 9, -1069501632); | |
c = gg(c, d, a, b, k[11], 14, 643717713); | |
b = gg(b, c, d, a, k[0], 20, -373897302); | |
a = gg(a, b, c, d, k[5], 5, -701558691); | |
d = gg(d, a, b, c, k[10], 9, 38016083); | |
c = gg(c, d, a, b, k[15], 14, -660478335); | |
b = gg(b, c, d, a, k[4], 20, -405537848); | |
a = gg(a, b, c, d, k[9], 5, 568446438); | |
d = gg(d, a, b, c, k[14], 9, -1019803690); | |
c = gg(c, d, a, b, k[3], 14, -187363961); | |
b = gg(b, c, d, a, k[8], 20, 1163531501); | |
a = gg(a, b, c, d, k[13], 5, -1444681467); | |
d = gg(d, a, b, c, k[2], 9, -51403784); | |
c = gg(c, d, a, b, k[7], 14, 1735328473); | |
b = gg(b, c, d, a, k[12], 20, -1926607734); | |
a = hh(a, b, c, d, k[5], 4, -378558); | |
d = hh(d, a, b, c, k[8], 11, -2022574463); | |
c = hh(c, d, a, b, k[11], 16, 1839030562); | |
b = hh(b, c, d, a, k[14], 23, -35309556); | |
a = hh(a, b, c, d, k[1], 4, -1530992060); | |
d = hh(d, a, b, c, k[4], 11, 1272893353); | |
c = hh(c, d, a, b, k[7], 16, -155497632); | |
b = hh(b, c, d, a, k[10], 23, -1094730640); | |
a = hh(a, b, c, d, k[13], 4, 681279174); | |
d = hh(d, a, b, c, k[0], 11, -358537222); | |
c = hh(c, d, a, b, k[3], 16, -722521979); | |
b = hh(b, c, d, a, k[6], 23, 76029189); | |
a = hh(a, b, c, d, k[9], 4, -640364487); | |
d = hh(d, a, b, c, k[12], 11, -421815835); | |
c = hh(c, d, a, b, k[15], 16, 530742520); | |
b = hh(b, c, d, a, k[2], 23, -995338651); | |
a = ii(a, b, c, d, k[0], 6, -198630844); | |
d = ii(d, a, b, c, k[7], 10, 1126891415); | |
c = ii(c, d, a, b, k[14], 15, -1416354905); | |
b = ii(b, c, d, a, k[5], 21, -57434055); | |
a = ii(a, b, c, d, k[12], 6, 1700485571); | |
d = ii(d, a, b, c, k[3], 10, -1894986606); | |
c = ii(c, d, a, b, k[10], 15, -1051523); | |
b = ii(b, c, d, a, k[1], 21, -2054922799); | |
a = ii(a, b, c, d, k[8], 6, 1873313359); | |
d = ii(d, a, b, c, k[15], 10, -30611744); | |
c = ii(c, d, a, b, k[6], 15, -1560198380); | |
b = ii(b, c, d, a, k[13], 21, 1309151649); | |
a = ii(a, b, c, d, k[4], 6, -145523070); | |
d = ii(d, a, b, c, k[11], 10, -1120210379); | |
c = ii(c, d, a, b, k[2], 15, 718787259); | |
b = ii(b, c, d, a, k[9], 21, -343485551); | |
x[0] = add32(a, x[0]); | |
x[1] = add32(b, x[1]); | |
x[2] = add32(c, x[2]); | |
x[3] = add32(d, x[3]); | |
}, | |
/* there needs to be support for Unicode here, | |
* unless we pretend that we can redefine the MD-5 | |
* algorithm for multi-byte characters (perhaps | |
* by adding every four 16-bit characters and | |
* shortening the sum to 32 bits). Otherwise | |
* I suggest performing MD-5 as if every character | |
* was two bytes--e.g., 0040 0025 = @%--but then | |
* how will an ordinary MD-5 sum be matched? | |
* There is no way to standardize text to something | |
* like UTF-8 before transformation; speed cost is | |
* utterly prohibitive. The JavaScript standard | |
* itself needs to look at this: it should start | |
* providing access to strings as preformed UTF-8 | |
* 8-bit unsigned value arrays. | |
*/ | |
md5blk = function (s) { | |
var md5blks = [], | |
i; /* Andy King said do it this way. */ | |
for (i = 0; i < 64; i += 4) { | |
md5blks[i >> 2] = s.charCodeAt(i) + (s.charCodeAt(i + 1) << 8) + (s.charCodeAt(i + 2) << 16) + (s.charCodeAt(i + 3) << 24); | |
} | |
return md5blks; | |
}, | |
md5blk_array = function (a) { | |
var md5blks = [], | |
i; /* Andy King said do it this way. */ | |
for (i = 0; i < 64; i += 4) { | |
md5blks[i >> 2] = a[i] + (a[i + 1] << 8) + (a[i + 2] << 16) + (a[i + 3] << 24); | |
} | |
return md5blks; | |
}, | |
md51 = function (s) { | |
var n = s.length, | |
state = [1732584193, -271733879, -1732584194, 271733878], | |
i, | |
length, | |
tail, | |
tmp, | |
lo, | |
hi; | |
for (i = 64; i <= n; i += 64) { | |
md5cycle(state, md5blk(s.substring(i - 64, i))); | |
} | |
s = s.substring(i - 64); | |
length = s.length; | |
tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; | |
for (i = 0; i < length; i += 1) { | |
tail[i >> 2] |= s.charCodeAt(i) << ((i % 4) << 3); | |
} | |
tail[i >> 2] |= 0x80 << ((i % 4) << 3); | |
if (i > 55) { | |
md5cycle(state, tail); | |
for (i = 0; i < 16; i += 1) { | |
tail[i] = 0; | |
} | |
} | |
// Beware that the final length might not fit in 32 bits so we take care of that | |
tmp = n * 8; | |
tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/); | |
lo = parseInt(tmp[2], 16); | |
hi = parseInt(tmp[1], 16) || 0; | |
tail[14] = lo; | |
tail[15] = hi; | |
md5cycle(state, tail); | |
return state; | |
}, | |
md51_array = function (a) { | |
var n = a.length, | |
state = [1732584193, -271733879, -1732584194, 271733878], | |
i, | |
length, | |
tail, | |
tmp, | |
lo, | |
hi; | |
for (i = 64; i <= n; i += 64) { | |
md5cycle(state, md5blk_array(a.subarray(i - 64, i))); | |
} | |
// Not sure if it is a bug, however IE10 will always produce a sub array of length 1 | |
// containing the last element of the parent array if the sub array specified starts | |
// beyond the length of the parent array - weird. | |
// https://connect.microsoft.com/IE/feedback/details/771452/typed-array-subarray-issue | |
a = (i - 64) < n ? a.subarray(i - 64) : new Uint8Array(0); | |
length = a.length; | |
tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; | |
for (i = 0; i < length; i += 1) { | |
tail[i >> 2] |= a[i] << ((i % 4) << 3); | |
} | |
tail[i >> 2] |= 0x80 << ((i % 4) << 3); | |
if (i > 55) { | |
md5cycle(state, tail); | |
for (i = 0; i < 16; i += 1) { | |
tail[i] = 0; | |
} | |
} | |
// Beware that the final length might not fit in 32 bits so we take care of that | |
tmp = n * 8; | |
tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/); | |
lo = parseInt(tmp[2], 16); | |
hi = parseInt(tmp[1], 16) || 0; | |
tail[14] = lo; | |
tail[15] = hi; | |
md5cycle(state, tail); | |
return state; | |
}, | |
hex_chr = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'], | |
rhex = function (n) { | |
var s = '', | |
j; | |
for (j = 0; j < 4; j += 1) { | |
s += hex_chr[(n >> (j * 8 + 4)) & 0x0F] + hex_chr[(n >> (j * 8)) & 0x0F]; | |
} | |
return s; | |
}, | |
hex = function (x) { | |
var i; | |
for (i = 0; i < x.length; i += 1) { | |
x[i] = rhex(x[i]); | |
} | |
return x.join(''); | |
}, | |
md5 = function (s) { | |
return hex(md51(s)); | |
}, | |
//////////////////////////////////////////////////////////////////////////// | |
/** | |
* SparkMD5 OOP implementation. | |
* | |
* Use this class to perform an incremental md5, otherwise use the | |
* static methods instead. | |
*/ | |
SparkMD5 = function () { | |
// call reset to init the instance | |
this.reset(); | |
}; | |
// In some cases the fast add32 function cannot be used.. | |
if (md5('hello') !== '5d41402abc4b2a76b9719d911017c592') { | |
add32 = function (x, y) { | |
var lsw = (x & 0xFFFF) + (y & 0xFFFF), | |
msw = (x >> 16) + (y >> 16) + (lsw >> 16); | |
return (msw << 16) | (lsw & 0xFFFF); | |
}; | |
} | |
/** | |
* Appends a string. | |
* A conversion will be applied if an utf8 string is detected. | |
* | |
* @param {String} str The string to be appended | |
* | |
* @return {SparkMD5} The instance itself | |
*/ | |
SparkMD5.prototype.append = function (str) { | |
// converts the string to utf8 bytes if necessary | |
if (/[\u0080-\uFFFF]/.test(str)) { | |
str = unescape(encodeURIComponent(str)); | |
} | |
// then append as binary | |
this.appendBinary(str); | |
return this; | |
}; | |
/** | |
* Appends a binary string. | |
* | |
* @param {String} contents The binary string to be appended | |
* | |
* @return {SparkMD5} The instance itself | |
*/ | |
SparkMD5.prototype.appendBinary = function (contents) { | |
this._buff += contents; | |
this._length += contents.length; | |
var length = this._buff.length, | |
i; | |
for (i = 64; i <= length; i += 64) { | |
md5cycle(this._state, md5blk(this._buff.substring(i - 64, i))); | |
} | |
this._buff = this._buff.substr(i - 64); | |
return this; | |
}; | |
/** | |
* Finishes the incremental computation, reseting the internal state and | |
* returning the result. | |
* Use the raw parameter to obtain the raw result instead of the hex one. | |
* | |
* @param {Boolean} raw True to get the raw result, false to get the hex result | |
* | |
* @return {String|Array} The result | |
*/ | |
SparkMD5.prototype.end = function (raw) { | |
var buff = this._buff, | |
length = buff.length, | |
i, | |
tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], | |
ret; | |
for (i = 0; i < length; i += 1) { | |
tail[i >> 2] |= buff.charCodeAt(i) << ((i % 4) << 3); | |
} | |
this._finish(tail, length); | |
ret = !!raw ? this._state : hex(this._state); | |
this.reset(); | |
return ret; | |
}; | |
/** | |
* Finish the final calculation based on the tail. | |
* | |
* @param {Array} tail The tail (will be modified) | |
* @param {Number} length The length of the remaining buffer | |
*/ | |
SparkMD5.prototype._finish = function (tail, length) { | |
var i = length, | |
tmp, | |
lo, | |
hi; | |
tail[i >> 2] |= 0x80 << ((i % 4) << 3); | |
if (i > 55) { | |
md5cycle(this._state, tail); | |
for (i = 0; i < 16; i += 1) { | |
tail[i] = 0; | |
} | |
} | |
// Do the final computation based on the tail and length | |
// Beware that the final length may not fit in 32 bits so we take care of that | |
tmp = this._length * 8; | |
tmp = tmp.toString(16).match(/(.*?)(.{0,8})$/); | |
lo = parseInt(tmp[2], 16); | |
hi = parseInt(tmp[1], 16) || 0; | |
tail[14] = lo; | |
tail[15] = hi; | |
md5cycle(this._state, tail); | |
}; | |
/** | |
* Resets the internal state of the computation. | |
* | |
* @return {SparkMD5} The instance itself | |
*/ | |
SparkMD5.prototype.reset = function () { | |
this._buff = ""; | |
this._length = 0; | |
this._state = [1732584193, -271733879, -1732584194, 271733878]; | |
return this; | |
}; | |
/** | |
* Releases memory used by the incremental buffer and other aditional | |
* resources. If you plan to use the instance again, use reset instead. | |
*/ | |
SparkMD5.prototype.destroy = function () { | |
delete this._state; | |
delete this._buff; | |
delete this._length; | |
}; | |
/** | |
* Performs the md5 hash on a string. | |
* A conversion will be applied if utf8 string is detected. | |
* | |
* @param {String} str The string | |
* @param {Boolean} raw True to get the raw result, false to get the hex result | |
* | |
* @return {String|Array} The result | |
*/ | |
SparkMD5.hash = function (str, raw) { | |
// converts the string to utf8 bytes if necessary | |
if (/[\u0080-\uFFFF]/.test(str)) { | |
str = unescape(encodeURIComponent(str)); | |
} | |
var hash = md51(str); | |
return !!raw ? hash : hex(hash); | |
}; | |
/** | |
* Performs the md5 hash on a binary string. | |
* | |
* @param {String} content The binary string | |
* @param {Boolean} raw True to get the raw result, false to get the hex result | |
* | |
* @return {String|Array} The result | |
*/ | |
SparkMD5.hashBinary = function (content, raw) { | |
var hash = md51(content); | |
return !!raw ? hash : hex(hash); | |
}; | |
/** | |
* SparkMD5 OOP implementation for array buffers. | |
* | |
* Use this class to perform an incremental md5 ONLY for array buffers. | |
*/ | |
SparkMD5.ArrayBuffer = function () { | |
// call reset to init the instance | |
this.reset(); | |
}; | |
//////////////////////////////////////////////////////////////////////////// | |
/** | |
* Appends an array buffer. | |
* | |
* @param {ArrayBuffer} arr The array to be appended | |
* | |
* @return {SparkMD5.ArrayBuffer} The instance itself | |
*/ | |
SparkMD5.ArrayBuffer.prototype.append = function (arr) { | |
// TODO: we could avoid the concatenation here but the algorithm would be more complex | |
// if you find yourself needing extra performance, please make a PR. | |
var buff = this._concatArrayBuffer(this._buff, arr), | |
length = buff.length, | |
i; | |
this._length += arr.byteLength; | |
for (i = 64; i <= length; i += 64) { | |
md5cycle(this._state, md5blk_array(buff.subarray(i - 64, i))); | |
} | |
// Avoids IE10 weirdness (documented above) | |
this._buff = (i - 64) < length ? buff.subarray(i - 64) : new Uint8Array(0); | |
return this; | |
}; | |
/** | |
* Finishes the incremental computation, reseting the internal state and | |
* returning the result. | |
* Use the raw parameter to obtain the raw result instead of the hex one. | |
* | |
* @param {Boolean} raw True to get the raw result, false to get the hex result | |
* | |
* @return {String|Array} The result | |
*/ | |
SparkMD5.ArrayBuffer.prototype.end = function (raw) { | |
var buff = this._buff, | |
length = buff.length, | |
tail = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], | |
i, | |
ret; | |
for (i = 0; i < length; i += 1) { | |
tail[i >> 2] |= buff[i] << ((i % 4) << 3); | |
} | |
this._finish(tail, length); | |
ret = !!raw ? this._state : hex(this._state); | |
this.reset(); | |
return ret; | |
}; | |
SparkMD5.ArrayBuffer.prototype._finish = SparkMD5.prototype._finish; | |
/** | |
* Resets the internal state of the computation. | |
* | |
* @return {SparkMD5.ArrayBuffer} The instance itself | |
*/ | |
SparkMD5.ArrayBuffer.prototype.reset = function () { | |
this._buff = new Uint8Array(0); | |
this._length = 0; | |
this._state = [1732584193, -271733879, -1732584194, 271733878]; | |
return this; | |
}; | |
/** | |
* Releases memory used by the incremental buffer and other aditional | |
* resources. If you plan to use the instance again, use reset instead. | |
*/ | |
SparkMD5.ArrayBuffer.prototype.destroy = SparkMD5.prototype.destroy; | |
/** | |
* Concats two array buffers, returning a new one. | |
* | |
* @param {ArrayBuffer} first The first array buffer | |
* @param {ArrayBuffer} second The second array buffer | |
* | |
* @return {ArrayBuffer} The new array buffer | |
*/ | |
SparkMD5.ArrayBuffer.prototype._concatArrayBuffer = function (first, second) { | |
var firstLength = first.length, | |
result = new Uint8Array(firstLength + second.byteLength); | |
result.set(first); | |
result.set(new Uint8Array(second), firstLength); | |
return result; | |
}; | |
/** | |
* Performs the md5 hash on an array buffer. | |
* | |
* @param {ArrayBuffer} arr The array buffer | |
* @param {Boolean} raw True to get the raw result, false to get the hex result | |
* | |
* @return {String|Array} The result | |
*/ | |
SparkMD5.ArrayBuffer.hash = function (arr, raw) { | |
var hash = md51_array(new Uint8Array(arr)); | |
return !!raw ? hash : hex(hash); | |
}; | |
return SparkMD5; | |
})); | |
},{}],156:[function(require,module,exports){ | |
'use strict'; | |
/** | |
* Stringify/parse functions that don't operate | |
* recursively, so they avoid call stack exceeded | |
* errors. | |
*/ | |
exports.stringify = function stringify(input) { | |
var queue = []; | |
queue.push({obj: input}); | |
var res = ''; | |
var next, obj, prefix, val, i, arrayPrefix, keys, k, key, value, objPrefix; | |
while ((next = queue.pop())) { | |
obj = next.obj; | |
prefix = next.prefix || ''; | |
val = next.val || ''; | |
res += prefix; | |
if (val) { | |
res += val; | |
} else if (typeof obj !== 'object') { | |
res += typeof obj === 'undefined' ? null : JSON.stringify(obj); | |
} else if (obj === null) { | |
res += 'null'; | |
} else if (Array.isArray(obj)) { | |
queue.push({val: ']'}); | |
for (i = obj.length - 1; i >= 0; i--) { | |
arrayPrefix = i === 0 ? '' : ','; | |
queue.push({obj: obj[i], prefix: arrayPrefix}); | |
} | |
queue.push({val: '['}); | |
} else { // object | |
keys = []; | |
for (k in obj) { | |
if (obj.hasOwnProperty(k)) { | |
keys.push(k); | |
} | |
} | |
queue.push({val: '}'}); | |
for (i = keys.length - 1; i >= 0; i--) { | |
key = keys[i]; | |
value = obj[key]; | |
objPrefix = (i > 0 ? ',' : ''); | |
objPrefix += JSON.stringify(key) + ':'; | |
queue.push({obj: value, prefix: objPrefix}); | |
} | |
queue.push({val: '{'}); | |
} | |
} | |
return res; | |
}; | |
// Convenience function for the parse function. | |
// This pop function is basically copied from | |
// pouchCollate.parseIndexableString | |
function pop(obj, stack, metaStack) { | |
var lastMetaElement = metaStack[metaStack.length - 1]; | |
if (obj === lastMetaElement.element) { | |
// popping a meta-element, e.g. an object whose value is another object | |
metaStack.pop(); | |
lastMetaElement = metaStack[metaStack.length - 1]; | |
} | |
var element = lastMetaElement.element; | |
var lastElementIndex = lastMetaElement.index; | |
if (Array.isArray(element)) { | |
element.push(obj); | |
} else if (lastElementIndex === stack.length - 2) { // obj with key+value | |
var key = stack.pop(); | |
element[key] = obj; | |
} else { | |
stack.push(obj); // obj with key only | |
} | |
} | |
exports.parse = function (str) { | |
var stack = []; | |
var metaStack = []; // stack for arrays and objects | |
var i = 0; | |
var collationIndex,parsedNum,numChar; | |
var parsedString,lastCh,numConsecutiveSlashes,ch; | |
var arrayElement, objElement; | |
while (true) { | |
collationIndex = str[i++]; | |
if (collationIndex === '}' || | |
collationIndex === ']' || | |
typeof collationIndex === 'undefined') { | |
if (stack.length === 1) { | |
return stack.pop(); | |
} else { | |
pop(stack.pop(), stack, metaStack); | |
continue; | |
} | |
} | |
switch (collationIndex) { | |
case ' ': | |
case '\t': | |
case '\n': | |
case ':': | |
case ',': | |
break; | |
case 'n': | |
i += 3; // 'ull' | |
pop(null, stack, metaStack); | |
break; | |
case 't': | |
i += 3; // 'rue' | |
pop(true, stack, metaStack); | |
break; | |
case 'f': | |
i += 4; // 'alse' | |
pop(false, stack, metaStack); | |
break; | |
case '0': | |
case '1': | |
case '2': | |
case '3': | |
case '4': | |
case '5': | |
case '6': | |
case '7': | |
case '8': | |
case '9': | |
case '-': | |
parsedNum = ''; | |
i--; | |
while (true) { | |
numChar = str[i++]; | |
if (/[\d\.\-e\+]/.test(numChar)) { | |
parsedNum += numChar; | |
} else { | |
i--; | |
break; | |
} | |
} | |
pop(parseFloat(parsedNum), stack, metaStack); | |
break; | |
case '"': | |
parsedString = ''; | |
lastCh = void 0; | |
numConsecutiveSlashes = 0; | |
while (true) { | |
ch = str[i++]; | |
if (ch !== '"' || (lastCh === '\\' && | |
numConsecutiveSlashes % 2 === 1)) { | |
parsedString += ch; | |
lastCh = ch; | |
if (lastCh === '\\') { | |
numConsecutiveSlashes++; | |
} else { | |
numConsecutiveSlashes = 0; | |
} | |
} else { | |
break; | |
} | |
} | |
pop(JSON.parse('"' + parsedString + '"'), stack, metaStack); | |
break; | |
case '[': | |
arrayElement = { element: [], index: stack.length }; | |
stack.push(arrayElement.element); | |
metaStack.push(arrayElement); | |
break; | |
case '{': | |
objElement = { element: {}, index: stack.length }; | |
stack.push(objElement.element); | |
metaStack.push(objElement); | |
break; | |
default: | |
throw new Error( | |
'unexpectedly reached end of input: ' + collationIndex); | |
} | |
} | |
}; | |
},{}],157:[function(require,module,exports){ | |
(function (global, factory) { | |
if (typeof define === "function" && define.amd) { | |
define(["exports", "./record", "./list", "./typed"], factory); | |
} else if (typeof exports !== "undefined") { | |
factory(exports, require("./record"), require("./list"), require("./typed")); | |
} else { | |
var mod = { | |
exports: {} | |
}; | |
factory(mod.exports, global.record, global.list, global.typed); | |
global.index = mod.exports; | |
} | |
})(this, function (exports, _record, _list, _typed) { | |
"use strict"; | |
Object.defineProperty(exports, "Record", { | |
enumerable: true, | |
get: function get() { | |
return _record.Record; | |
} | |
}); | |
Object.defineProperty(exports, "List", { | |
enumerable: true, | |
get: function get() { | |
return _list.List; | |
} | |
}); | |
Object.defineProperty(exports, "Typed", { | |
enumerable: true, | |
get: function get() { | |
return _typed.Typed; | |
} | |
}); | |
Object.defineProperty(exports, "typeOf", { | |
enumerable: true, | |
get: function get() { | |
return _typed.typeOf; | |
} | |
}); | |
Object.defineProperty(exports, "Type", { | |
enumerable: true, | |
get: function get() { | |
return _typed.Type; | |
} | |
}); | |
Object.defineProperty(exports, "Any", { | |
enumerable: true, | |
get: function get() { | |
return _typed.Any; | |
} | |
}); | |
Object.defineProperty(exports, "Union", { | |
enumerable: true, | |
get: function get() { | |
return _typed.Union; | |
} | |
}); | |
Object.defineProperty(exports, "Maybe", { | |
enumerable: true, | |
get: function get() { | |
return _typed.Maybe; | |
} | |
}); | |
}); | |
},{"./list":158,"./record":159,"./typed":160}],158:[function(require,module,exports){ | |
(function (global, factory) { | |
if (typeof define === 'function' && define.amd) { | |
define(['exports', './typed', 'immutable'], factory); | |
} else if (typeof exports !== 'undefined') { | |
factory(exports, require('./typed'), require('immutable')); | |
} else { | |
var mod = { | |
exports: {} | |
}; | |
factory(mod.exports, global.typed, global.Immutable); | |
global.list = mod.exports; | |
} | |
})(this, function (exports, _typed, _immutable) { | |
'use strict'; | |
var _slicedToArray = (function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i['return']) _i['return'](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError('Invalid attempt to destructure non-iterable instance'); } }; })(); | |
var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })(); | |
var _get = function get(_x2, _x3, _x4) { var _again = true; _function: while (_again) { var object = _x2, property = _x3, receiver = _x4; desc = parent = getter = undefined; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x2 = parent; _x3 = property; _x4 = receiver; _again = true; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } }; | |
function _defaults(obj, defaults) { var keys = Object.getOwnPropertyNames(defaults); for (var i = 0; i < keys.length; i++) { var key = keys[i]; var value = Object.getOwnPropertyDescriptor(defaults, key); if (value && value.configurable && obj[key] === undefined) { Object.defineProperty(obj, key, value); } } return obj; } | |
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } | |
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } } | |
function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) _defaults(subClass, superClass); } | |
var ImmutableList = _immutable.List; | |
var Indexed = _immutable.Iterable.Indexed; | |
var $store = _typed.Typed.store; | |
var $type = _typed.Typed.type; | |
var $read = _typed.Typed.read; | |
var $step = _typed.Typed.step; | |
var $init = _typed.Typed.init; | |
var $result = _typed.Typed.result; | |
var $label = _typed.Typed.label; | |
var $typeName = _typed.Typed.typeName; | |
var $empty = _typed.Typed.empty; | |
var change = function change(list, f) { | |
var store = f(list[$store]); | |
if (store === list[$store]) { | |
return list; | |
} else { | |
var result = list.__ownerID ? list : (0, _typed.construct)(list); | |
result[$store] = store; | |
result.size = store.size; | |
return result; | |
} | |
}; | |
var _clear = function _clear(target) { | |
return target.clear(); | |
}; | |
var _pop = function _pop(target) { | |
return target.pop(); | |
}; | |
var _shift = function _shift(target) { | |
return target.shift(); | |
}; | |
var TypeInferer = (function (_Type) { | |
function TypeInferer() { | |
_classCallCheck(this, TypeInferer); | |
_get(Object.getPrototypeOf(TypeInferer.prototype), 'constructor', this).apply(this, arguments); | |
} | |
_inherits(TypeInferer, _Type); | |
_createClass(TypeInferer, [{ | |
key: _typed.Typed.typeName, | |
value: function value() { | |
return 'TypeInferer'; | |
} | |
}, { | |
key: _typed.Typed.read, | |
value: function value(_value) { | |
// typeOf usually creates type for the value with that | |
// value being a default. For type inference we should | |
// actually use a base type instead of type with default | |
// there for we use prototype of the constructor. | |
var type = (0, _typed.typeOf)(_value).constructor.prototype; | |
this.type = this.type ? (0, _typed.Union)(this.type, type) : type; | |
return _value; | |
} | |
}]); | |
return TypeInferer; | |
})(_typed.Type); | |
function BaseImmutableList() {} | |
BaseImmutableList.prototype = ImmutableList.prototype; | |
var TypeInferedList = (function (_BaseImmutableList) { | |
_inherits(TypeInferedList, _BaseImmutableList); | |
_createClass(TypeInferedList, null, [{ | |
key: 'from', | |
value: function from(list) { | |
var result = (0, _typed.construct)(this.prototype); | |
result[$store] = list[$store]; | |
result.size = list.size; | |
return result; | |
} | |
}]); | |
function TypeInferedList(value) { | |
_classCallCheck(this, TypeInferedList); | |
_get(Object.getPrototypeOf(TypeInferedList.prototype), 'constructor', this).call(this); | |
return TypeInferedList.prototype[$read](value); | |
} | |
_createClass(TypeInferedList, [{ | |
key: _typed.Typed.init, | |
value: function value() { | |
var result = (0, _typed.construct)(this).asMutable(); | |
result[$type] = new TypeInferer(); | |
return result; | |
} | |
}, { | |
key: _typed.Typed.result, | |
value: function value(result) { | |
var list = result.asImmutable(); | |
list[$type] = result[$type].type; | |
return list; | |
} | |
}, { | |
key: _typed.Typed.read, | |
value: function value(input) { | |
var Type = this.constructor; | |
if (input === null || input === void 0) { | |
if (!this[$empty]) { | |
var result = (0, _typed.construct)(this); | |
result[$store] = ImmutableList(); | |
result.size = 0; | |
this[$empty] = result; | |
} | |
return this[$empty]; | |
} | |
if (input instanceof Type && input && input.constructor === Type) { | |
return input; | |
} | |
var source = Indexed(input); | |
var isEmpty = source.size === 0; | |
if (isEmpty && this[$empty]) { | |
return this[$empty]; | |
} | |
var list = this[$init](); | |
list.size = source.size; | |
source.forEach(function (value, index) { | |
list.set(index, value); | |
}); | |
list = this[$result](list); | |
if (isEmpty) { | |
this[$empty] = list; | |
} | |
return list; | |
} | |
}, { | |
key: _typed.Typed.step, | |
value: function value(result, _ref) { | |
var _ref2 = _slicedToArray(_ref, 2); | |
var key = _ref2[0]; | |
var _value2 = _ref2[1]; | |
return change(result, function () { | |
var store = arguments.length <= 0 || arguments[0] === undefined ? ImmutableList() : arguments[0]; | |
return store.set(key, _value2); | |
}); | |
} | |
}, { | |
key: _typed.Typed.typeName, | |
value: function value() { | |
return this[$label] || 'Typed.List(' + this[$type][$typeName]() + ')'; | |
} | |
}, { | |
key: 'toString', | |
value: function toString() { | |
return this.__toString(this[$typeName]() + '([', '])'); | |
} | |
}, { | |
key: 'has', | |
value: function has(key) { | |
return this[$store].has(key); | |
} | |
}, { | |
key: 'get', | |
value: function get(index, notSetValue) { | |
return this[$store] ? this[$store].get(index, notSetValue) : notSetValue; | |
} | |
}, { | |
key: 'clear', | |
value: function clear() { | |
if (this.__ownerID) { | |
return change(this, _clear); | |
} | |
return this[$empty] || this[$read](); | |
} | |
}, { | |
key: 'remove', | |
value: function remove(index) { | |
return change(this, function (store) { | |
return store && store.remove(index); | |
}); | |
} | |
}, { | |
key: 'set', | |
value: function set(index, value) { | |
if (index > this.size) { | |
throw TypeError('Index "' + index + '" is out of bound'); | |
} | |
var result = this[$type][$read](value); | |
if (result instanceof TypeError) { | |
throw TypeError('Invalid value: ' + result.message); | |
} | |
return this[$step](this, [index, result]); | |
} | |
}, { | |
key: 'push', | |
value: function push() { | |
var type = this[$type]; | |
var items = []; | |
for (var _len = arguments.length, values = Array(_len), _key = 0; _key < _len; _key++) { | |
values[_key] = arguments[_key]; | |
} | |
var count = values.length; | |
var index = 0; | |
while (index < count) { | |
var value = values[index]; | |
var result = type[$read](value); | |
if (result instanceof TypeError) { | |
throw TypeError('Invalid value: ' + result.message); | |
} | |
items.push(result); | |
index = index + 1; | |
} | |
return change(this, function (store) { | |
return store ? store.push.apply(store, items) : ImmutableList.apply(undefined, items); | |
}); | |
} | |
}, { | |
key: 'pop', | |
value: function pop() { | |
return change(this, _pop); | |
} | |
}, { | |
key: 'unshift', | |
value: function unshift() { | |
var type = this[$type]; | |
var items = []; | |
for (var _len2 = arguments.length, values = Array(_len2), _key2 = 0; _key2 < _len2; _key2++) { | |
values[_key2] = arguments[_key2]; | |
} | |
var count = values.length; | |
var index = 0; | |
while (index < count) { | |
var value = values[index]; | |
var result = type[$read](value); | |
if (result instanceof TypeError) { | |
throw TypeError('Invalid value: ' + result.message); | |
} | |
items.push(result); | |
index = index + 1; | |
} | |
return change(this, function (store) { | |
return store ? store.unshift.apply(store, items) : ImmutableList.apply(undefined, items); | |
}); | |
} | |
}, { | |
key: 'shift', | |
value: function shift() { | |
return change(this, _shift); | |
} | |
}, { | |
key: 'setSize', | |
value: function setSize(size) { | |
if (size > this.size) { | |
throw TypeError('setSize may only downsize'); | |
} | |
return change(this, function (store) { | |
return store.setSize(size); | |
}); | |
} | |
}, { | |
key: 'slice', | |
value: function slice(begin, end) { | |
return change(this, function (store) { | |
return store && store.slice(begin, end); | |
}); | |
} | |
}, { | |
key: 'wasAltered', | |
value: function wasAltered() { | |
return this[$store].wasAltered(); | |
} | |
}, { | |
key: '__ensureOwner', | |
value: function __ensureOwner(ownerID) { | |
var result = this.__ownerID === ownerID ? this : !ownerID ? this : (0, _typed.construct)(this); | |
result.__ownerID = ownerID; | |
result[$store] = this[$store] ? this[$store].__ensureOwner(ownerID) : ImmutableList().__ensureOwner(ownerID); | |
result.size = result[$store].size; | |
return result; | |
} | |
}, { | |
key: '__iterator', | |
value: function __iterator(type, reverse) { | |
var _this = this; | |
return Indexed(this[$store]).map(function (_, key) { | |
return _this.get(key); | |
}).__iterator(type, reverse); | |
} | |
}, { | |
key: '__iterate', | |
value: function __iterate(f, reverse) { | |
var _this2 = this; | |
return Indexed(this[$store]).map(function (_, key) { | |
return _this2.get(key); | |
}).__iterate(f, reverse); | |
} | |
}]); | |
return TypeInferedList; | |
})(BaseImmutableList); | |
TypeInferedList.prototype[_typed.Typed.DELETE] = TypeInferedList.prototype.remove; | |
var BaseTypeInferedList = function BaseTypeInferedList() {}; | |
BaseTypeInferedList.prototype = TypeInferedList.prototype; | |
var TypedList = (function (_BaseTypeInferedList) { | |
function TypedList() { | |
_classCallCheck(this, TypedList); | |
_get(Object.getPrototypeOf(TypedList.prototype), 'constructor', this).call(this); | |
} | |
_inherits(TypedList, _BaseTypeInferedList); | |
_createClass(TypedList, [{ | |
key: _typed.Typed.init, | |
value: function value() { | |
return (0, _typed.construct)(this).asMutable(); | |
} | |
}, { | |
key: _typed.Typed.result, | |
value: function value(result) { | |
return result.asImmutable(); | |
} | |
}, { | |
key: 'map', | |
value: function map(mapper, context) { | |
if (this.size === 0) { | |
return this; | |
} else { | |
var result = TypeInferedList.from(this).map(mapper, context); | |
if (this[$store] === result[$store]) { | |
return this; | |
} | |
if (result[$type] === this[$type]) { | |
var list = (0, _typed.construct)(this); | |
list[$store] = result[$store]; | |
list.size = result.size; | |
return list; | |
} else { | |
return result; | |
} | |
} | |
} | |
}]); | |
return TypedList; | |
})(BaseTypeInferedList); | |
var List = function List(descriptor, label) { | |
var _Object$create; | |
if (descriptor === void 0) { | |
throw TypeError('Typed.List must be passed a type descriptor'); | |
} | |
if (descriptor === _typed.Any) { | |
return _immutable.List; | |
} | |
var type = (0, _typed.typeOf)(descriptor); | |
if (type === _typed.Any) { | |
throw TypeError('Typed.List was passed an invalid type descriptor: ${descriptor}'); | |
} | |
var ListType = function ListType(value) { | |
var isListType = this instanceof ListType; | |
var Type = isListType ? this.constructor : ListType; | |
if (value instanceof Type) { | |
return value; | |
} | |
var result = Type.prototype[$read](value); | |
if (result instanceof TypeError) { | |
throw result; | |
} | |
// `list.map(f)` will in fact cause `list.constructor(items)` to be | |
// invoked there for we need to check if `this[$store]` was | |
// assigned to know if it's that or if it's a `new ListType()` call. | |
if (isListType && !this[$store]) { | |
this[$store] = result[$store]; | |
this.size = result.size; | |
} else { | |
return result; | |
} | |
return this; | |
}; | |
ListType.of = ImmutableList.of; | |
ListType.prototype = Object.create(ListPrototype, (_Object$create = { | |
constructor: { value: ListType } | |
}, _defineProperty(_Object$create, $type, { value: type }), _defineProperty(_Object$create, $label, { value: label }), _Object$create)); | |
return ListType; | |
}; | |
exports.List = List; | |
List.Type = TypedList; | |
List.prototype = TypedList.prototype; | |
var ListPrototype = TypedList.prototype; | |
}); | |
},{"./typed":160,"immutable":82}],159:[function(require,module,exports){ | |
(function (global, factory) { | |
if (typeof define === "function" && define.amd) { | |
define(["exports", "./typed", "immutable"], factory); | |
} else if (typeof exports !== "undefined") { | |
factory(exports, require("./typed"), require("immutable")); | |
} else { | |
var mod = { | |
exports: {} | |
}; | |
factory(mod.exports, global.typed, global.immutable); | |
global.record = mod.exports; | |
} | |
})(this, function (exports, _typed, _immutable) { | |
"use strict"; | |
var _slicedToArray = (function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; })(); | |
var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })(); | |
var _get = function get(_x, _x2, _x3) { var _again = true; _function: while (_again) { var object = _x, property = _x2, receiver = _x3; desc = parent = getter = undefined; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x = parent; _x2 = property; _x3 = receiver; _again = true; continue _function; } } else if ("value" in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } }; | |
function _defaults(obj, defaults) { var keys = Object.getOwnPropertyNames(defaults); for (var i = 0; i < keys.length; i++) { var key = keys[i]; var value = Object.getOwnPropertyDescriptor(defaults, key); if (value && value.configurable && obj[key] === undefined) { Object.defineProperty(obj, key, value); } } return obj; } | |
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } | |
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } | |
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) _defaults(subClass, superClass); } | |
var Keyed = _immutable.Iterable.Keyed; | |
var Getter = function Getter(key) { | |
return function () { | |
return this.get(key); | |
}; | |
}; | |
var Setter = function Setter(key) { | |
return function (value) { | |
if (!this.__ownerID) { | |
throw TypeError("Cannot set on an immutable record."); | |
} | |
this.set(key, value); | |
}; | |
}; | |
var $store = _typed.Typed.store; | |
var $type = _typed.Typed.type; | |
var $step = _typed.Typed.step; | |
var $init = _typed.Typed.init; | |
var $result = _typed.Typed.result; | |
var $read = _typed.Typed.read; | |
var $label = _typed.Typed.label; | |
var $empty = _typed.Typed.empty; | |
var $typeName = _typed.Typed.typeName; | |
var $typeSignature = _typed.Typed.typeSignature; | |
var IterableKeyedBase = function IterableKeyedBase() {}; | |
IterableKeyedBase.prototype = _immutable.Iterable.Keyed.prototype; | |
var TypedRecord = (function (_IterableKeyedBase) { | |
function TypedRecord() { | |
_classCallCheck(this, TypedRecord); | |
_get(Object.getPrototypeOf(TypedRecord.prototype), "constructor", this).call(this); | |
} | |
_inherits(TypedRecord, _IterableKeyedBase); | |
_createClass(TypedRecord, [{ | |
key: _typed.Typed.init, | |
value: function value() { | |
return (0, _typed.construct)(this).asMutable(); | |
} | |
}, { | |
key: _typed.Typed.result, | |
value: function value(result) { | |
return result.asImmutable(); | |
} | |
}, { | |
key: _typed.Typed.read, | |
value: function value(structure) { | |
var Type = this.constructor; | |
if (structure instanceof Type && structure && structure.constructor === Type) { | |
return structure; | |
} | |
if (structure === null || structure && typeof structure !== "object") { | |
return TypeError("Invalid data structure \"" + structure + "\" was passed to " + this[$typeName]()); | |
} | |
var seq = (0, _immutable.Seq)(structure); | |
var type = this[$type]; | |
var isEmpty = seq.size === 0; | |
if (isEmpty && this[$empty]) { | |
return this[$empty]; | |
} | |
var record = undefined; | |
for (var key in type) { | |
var fieldType = type[key]; | |
var value = seq.has(key) ? seq.get(key) : this.get(key); | |
var result = fieldType[$read](value); | |
if (result instanceof TypeError) { | |
return TypeError("Invalid value for \"" + key + "\" field:\n " + result.message); | |
} | |
record = this[$step](record || this[$init](), [key, result]); | |
} | |
record = this[$result](record); | |
if (isEmpty) { | |
this[$empty] = record; | |
} | |
return record; | |
} | |
}, { | |
key: _typed.Typed.step, | |
value: function value(result, _ref) { | |
var _ref2 = _slicedToArray(_ref, 2); | |
var key = _ref2[0]; | |
var _value = _ref2[1]; | |
var store = result[$store] ? result[$store].set(key, _value) : new _immutable.Map([[key, _value]]); | |
if (result[$store] === store) { | |
return result; | |
} | |
var record = result.__ownerID ? result : (0, _typed.construct)(result); | |
record[$store] = store; | |
return record; | |
} | |
}, { | |
key: _typed.Typed.typeSignature, | |
value: function value() { | |
var type = this[$type]; | |
var body = []; | |
for (var key in type) { | |
body.push(key + ": " + type[key][$typeName]()); | |
} | |
return "Typed.Record({" + body.join(", ") + "})"; | |
} | |
}, { | |
key: _typed.Typed.typeName, | |
value: function value() { | |
return this[$label] || this[$typeSignature](); | |
} | |
}, { | |
key: "toString", | |
value: function toString() { | |
return this.__toString(this[$typeName]() + "({", "})"); | |
} | |
}, { | |
key: "has", | |
value: function has(key) { | |
return !!this[$type][key]; | |
} | |
}, { | |
key: "get", | |
value: function get(key, defaultValue) { | |
return !this[$type][key] ? defaultValue : !this[$store] ? defaultValue : this[$store].get(key, defaultValue); | |
} | |
}, { | |
key: "clear", | |
value: function clear() { | |
if (this.__ownerID) { | |
this[$store] && this[$store].clear(); | |
return this; | |
} | |
return this[$empty] || (this[$empty] = new this.constructor()); | |
} | |
}, { | |
key: "remove", | |
value: function remove(key) { | |
return this[$type][key] ? this.set(key, void 0) : this; | |
} | |
}, { | |
key: "set", | |
value: function set(key, value) { | |
var fieldType = this[$type][key]; | |
if (!fieldType) { | |
throw TypeError("Cannot set unknown field \"" + key + "\" on \"" + this[$typeName]() + "\""); | |
} | |
var result = fieldType[$read](value); | |
if (result instanceof TypeError) { | |
throw TypeError("Invalid value for " + key + " field: " + result.message); | |
} | |
return this[$step](this, [key, result]); | |
} | |
}, { | |
key: "__iterator", | |
value: function __iterator(type, reverse) { | |
var _this = this; | |
return Keyed(this[$type]).map(function (_, key) { | |
return _this.get(key); | |
}).__iterator(type, reverse); | |
} | |
}, { | |
key: "__iterate", | |
value: function __iterate(f, reverse) { | |
var _this2 = this; | |
return Keyed(this[$type]).map(function (_, key) { | |
return _this2.get(key); | |
}).__iterate(f, reverse); | |
} | |
}, { | |
key: "__ensureOwner", | |
value: function __ensureOwner(ownerID) { | |
if (ownerID === this.__ownerID) { | |
return this; | |
} | |
var store = this[$store] && this[$store].__ensureOwner(ownerID); | |
var result = !ownerID ? this : (0, _typed.construct)(this); | |
result.__ownerID = ownerID; | |
result[$store] = store; | |
return result; | |
} | |
}, { | |
key: "wasAltered", | |
value: function wasAltered() { | |
return this[$store].wasAltered(); | |
} | |
}]); | |
return TypedRecord; | |
})(IterableKeyedBase); | |
var Record = function Record(descriptor, label) { | |
if (descriptor && typeof descriptor === "object") { | |
var type = Object.create(null); | |
var _keys = Object.keys(descriptor); | |
var size = _keys.length; | |
if (size > 0) { | |
var _properties; | |
var _ret = (function () { | |
var properties = (_properties = { | |
size: { value: size } | |
}, _defineProperty(_properties, $type, { value: type }), _defineProperty(_properties, $label, { value: label }), _properties); | |
var index = 0; | |
while (index < size) { | |
var key = _keys[index]; | |
var fieldType = (0, _typed.typeOf)(descriptor[key]); | |
if (fieldType) { | |
type[key] = fieldType; | |
properties[key] = { get: Getter(key), set: Setter(key), enumerable: true }; | |
} else { | |
throw TypeError("Invalid field descriptor provided for a \"" + key + "\" field"); | |
} | |
index = index + 1; | |
} | |
var RecordType = function RecordType(structure) { | |
var isNew = this instanceof RecordType; | |
var constructor = isNew ? this.constructor : RecordType; | |
if (structure instanceof constructor) { | |
return structure; | |
} | |
var type = constructor.prototype; | |
var result = type[$read](structure); | |
if (result instanceof TypeError) { | |
throw result; | |
} | |
if (isNew) { | |
this[$store] = result[$store]; | |
} else { | |
return result; | |
} | |
}; | |
properties.constructor = { value: RecordType }; | |
RecordType.prototype = Object.create(RecordPrototype, properties); | |
var prototype = RecordType.prototype; | |
return { | |
v: RecordType | |
}; | |
})(); | |
if (typeof _ret === "object") return _ret.v; | |
} else { | |
throw TypeError("Typed.Record descriptor must define at least on field"); | |
} | |
} else { | |
throw TypeError("Typed.Record must be passed a descriptor of fields"); | |
} | |
}; | |
exports.Record = Record; | |
Record.Type = TypedRecord; | |
Record.prototype = TypedRecord.prototype; | |
var RecordPrototype = TypedRecord.prototype; | |
RecordPrototype[_typed.Typed.DELETE] = RecordPrototype.remove; | |
// Large part of the Record API is implemented by Immutabel.Map | |
// and is just copied over. | |
RecordPrototype.deleteIn = _immutable.Map.prototype.deleteIn; | |
RecordPrototype.removeIn = _immutable.Map.prototype.removeIn; | |
RecordPrototype.merge = _immutable.Map.prototype.merge; | |
RecordPrototype.mergeWith = _immutable.Map.prototype.mergeWith; | |
RecordPrototype.mergeIn = _immutable.Map.prototype.mergeIn; | |
RecordPrototype.mergeDeep = _immutable.Map.prototype.mergeDeep; | |
RecordPrototype.mergeDeepWith = _immutable.Map.prototype.mergeDeepWith; | |
RecordPrototype.mergeDeepIn = _immutable.Map.prototype.mergeDeepIn; | |
RecordPrototype.setIn = _immutable.Map.prototype.setIn; | |
RecordPrototype.update = _immutable.Map.prototype.update; | |
RecordPrototype.updateIn = _immutable.Map.prototype.updateIn; | |
RecordPrototype.withMutations = _immutable.Map.prototype.withMutations; | |
RecordPrototype.asMutable = _immutable.Map.prototype.asMutable; | |
RecordPrototype.asImmutable = _immutable.Map.prototype.asImmutable; | |
// Large chuck of API inherited from Iterable does not makes | |
// much sense in the context of records so we undefine it. | |
RecordPrototype.map = void 0; | |
RecordPrototype.filter = void 0; | |
RecordPrototype.filterNot = void 0; | |
RecordPrototype.flip = void 0; | |
RecordPrototype.mapKeys = void 0; | |
RecordPrototype.mapEntries = void 0; | |
RecordPrototype.sort = void 0; | |
RecordPrototype.sortBy = void 0; | |
RecordPrototype.reverse = void 0; | |
RecordPrototype.slice = void 0; | |
RecordPrototype.butLast = void 0; | |
RecordPrototype.flatMap = void 0; | |
RecordPrototype.flatten = void 0; | |
RecordPrototype.rest = void 0; | |
RecordPrototype.skip = void 0; | |
RecordPrototype.skipLast = void 0; | |
RecordPrototype.skipWhile = void 0; | |
RecordPrototype.skipUntil = void 0; | |
RecordPrototype.sortBy = void 0; | |
RecordPrototype.take = void 0; | |
RecordPrototype.takeLast = void 0; | |
RecordPrototype.takeWhile = void 0; | |
RecordPrototype.takeUntil = void 0; | |
}); | |
},{"./typed":160,"immutable":82}],160:[function(require,module,exports){ | |
(function (global, factory) { | |
if (typeof define === 'function' && define.amd) { | |
define(['exports', 'immutable'], factory); | |
} else if (typeof exports !== 'undefined') { | |
factory(exports, require('immutable')); | |
} else { | |
var mod = { | |
exports: {} | |
}; | |
factory(mod.exports, global.Immutable); | |
global.typed = mod.exports; | |
} | |
})(this, function (exports, _immutable) { | |
'use strict'; | |
var _createClass = (function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ('value' in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; })(); | |
var _get = function get(_x3, _x4, _x5) { var _again = true; _function: while (_again) { var object = _x3, property = _x4, receiver = _x5; desc = parent = getter = undefined; _again = false; if (object === null) object = Function.prototype; var desc = Object.getOwnPropertyDescriptor(object, property); if (desc === undefined) { var parent = Object.getPrototypeOf(object); if (parent === null) { return undefined; } else { _x3 = parent; _x4 = property; _x5 = receiver; _again = true; continue _function; } } else if ('value' in desc) { return desc.value; } else { var getter = desc.get; if (getter === undefined) { return undefined; } return getter.call(receiver); } } }; | |
function _defaults(obj, defaults) { var keys = Object.getOwnPropertyNames(defaults); for (var i = 0; i < keys.length; i++) { var key = keys[i]; var value = Object.getOwnPropertyDescriptor(defaults, key); if (value && value.configurable && obj[key] === undefined) { Object.defineProperty(obj, key, value); } } return obj; } | |
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError('Cannot call a class as a function'); } } | |
function _inherits(subClass, superClass) { if (typeof superClass !== 'function' && superClass !== null) { throw new TypeError('Super expression must either be null or a function, not ' + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) _defaults(subClass, superClass); } | |
if (typeof Symbol === 'undefined') { | |
var Symbol = function Symbol(hint) { | |
return '@@' + hint; | |
}; | |
Symbol['for'] = Symbol; | |
} | |
function Construct() {} | |
var construct = function construct(value) { | |
Construct.prototype = value.constructor.prototype; | |
return new Construct(); | |
}; | |
exports.construct = construct; | |
var $type = Symbol['for']('typed/type'); | |
var $store = Symbol['for']('typed/store'); | |
var $empty = Symbol['for']('typed/empty'); | |
var $maybe = Symbol['for']('typed/type/maybe'); | |
var $default = Symbol['for']('typed/type/default'); | |
var $label = Symbol['for']('typed/type/label'); | |
var $init = Symbol['for']('transducer/init'); | |
var $result = Symbol['for']('transducer/result'); | |
var $step = Symbol['for']('transducer/step'); | |
var $read = Symbol['for']('typed/type/read'); | |
var $parse = Symbol['for']('typed/type/parse'); | |
var $typeName = Symbol('typed/type/name'); | |
var $typeSignature = Symbol('typed/type/signature'); | |
var Typed = function Typed(label, parse, defaultValue) { | |
var ValueType = (function (_Type) { | |
function ValueType(defaultValue) { | |
_classCallCheck(this, ValueType); | |
_get(Object.getPrototypeOf(ValueType.prototype), 'constructor', this).call(this); | |
this[$default] = defaultValue; | |
} | |
_inherits(ValueType, _Type); | |
return ValueType; | |
})(Type); | |
var prototype = ValueType.prototype; | |
prototype[$default] = defaultValue; | |
prototype[$parse] = parse; | |
prototype[$label] = label; | |
var TypedValue = function TypedValue(defaultValue) { | |
return defaultValue === void 0 ? prototype : new ValueType(defaultValue); | |
}; | |
TypedValue.prototype = prototype; | |
return TypedValue; | |
}; | |
exports.Typed = Typed; | |
Typed.label = $label; | |
Typed.defaultValue = $default; | |
Typed.read = $read; | |
Typed.typeName = $typeName; | |
Typed.typeSignature = $typeSignature; | |
Typed.type = $type; | |
Typed.store = $store; | |
Typed.init = $init; | |
Typed.result = $result; | |
Typed.step = $step; | |
Typed.DELETE = 'delete'; | |
Typed.empty = $empty; | |
var typeName = function typeName(type) { | |
return type[$typeName](); | |
}; | |
var typeSignature = function typeSignature(type) { | |
return type[$typeSignature](); | |
}; | |
var Type = (function () { | |
function Type() { | |
_classCallCheck(this, Type); | |
} | |
_createClass(Type, [{ | |
key: Typed.read, | |
value: function value() { | |
var _value = arguments.length <= 0 || arguments[0] === undefined ? this[$default] : arguments[0]; | |
return this[$parse](_value); | |
} | |
}, { | |
key: Typed.parse, | |
value: function value(_value2) { | |
throw TypeError('Type implementation must implement "[read.symbol]" method'); | |
} | |
}, { | |
key: Typed.typeName, | |
value: function value() { | |
var label = this[$label]; | |
var defaultValue = this[$default]; | |
return defaultValue === void 0 ? label : label + '(' + JSON.stringify(defaultValue) + ')'; | |
} | |
}]); | |
return Type; | |
})(); | |
exports.Type = Type; | |
var ObjectPrototype = Object.prototype; | |
// Returns `true` if given `x` is a JS array. | |
var isArray = Array.isArray || function (x) { | |
return ObjectPrototype.toString.call(x) === '[object Array]'; | |
}; | |
// Returns `true` if given `x` is a regular expression. | |
var isRegExp = function isRegExp(x) { | |
return ObjectPrototype.toString.call(x) === '[object RegExp]'; | |
}; | |
var typeOf = function typeOf(x) { | |
var type = arguments.length <= 1 || arguments[1] === undefined ? typeof x : arguments[1]; | |
return (function () { | |
return x === void 0 ? x : x === null ? x : x[$read] ? x : x.prototype && x.prototype[$read] ? x.prototype : type === 'number' ? new Typed.Number(x) : type === 'string' ? new Typed.String(x) : type === 'boolean' ? new Typed.Boolean(x) : type === 'symbol' ? new Typed.Symbol(x) : isArray(x) ? Typed.Array(x) : isRegExp(x) ? new Typed.RegExp(x) : x === String ? Typed.String.prototype : x === Number ? Typed.Number.prototype : x === Boolean ? Typed.Boolean.prototype : x === RegExp ? Typed.RegExp.prototype : x === Array ? Typed.Array.prototype : x === Symbol ? Typed.Symbol.prototype : x === Date ? Typed.Date.prototype : Any; | |
})(); | |
}; | |
exports.typeOf = typeOf; | |
var Any = Typed('Any', function (value) { | |
return value; | |
})(); | |
exports.Any = Any; | |
Typed.Any = Any; | |
Typed.Number = Typed('Number', function (value) { | |
return typeof value === 'number' ? value : TypeError('"' + value + '" is not a number'); | |
}); | |
Typed.String = Typed('String', function (value) { | |
return typeof value === 'string' ? value : TypeError('"' + value + '" is not a string'); | |
}); | |
Typed.Symbol = Typed('Symbol', function (value) { | |
return typeof value === 'symbol' ? value : TypeError('"' + value + '" is not a symbol'); | |
}); | |
Typed.Array = Typed('Array', function (value) { | |
return isArray(value) ? value : TypeError('"' + value + '" is not an array'); | |
}); | |
Typed.RegExp = Typed('RegExp', function (value) { | |
return value instanceof RegExp ? value : TypeError('"' + value + '" is not a regexp'); | |
}); | |
Typed.Boolean = Typed('Boolean', function (value) { | |
return value === true ? true : value === false ? false : TypeError('"' + value + '" is not a boolean'); | |
}); | |
var MaybeType = (function (_Type2) { | |
function MaybeType(type) { | |
_classCallCheck(this, MaybeType); | |
_get(Object.getPrototypeOf(MaybeType.prototype), 'constructor', this).call(this); | |
this[$type] = type; | |
} | |
_inherits(MaybeType, _Type2); | |
_createClass(MaybeType, [{ | |
key: Typed.typeName, | |
value: function value() { | |
return 'Maybe(' + this[$type][$typeName]() + ')'; | |
} | |
}, { | |
key: Typed.read, | |
value: function value(_value3) { | |
var result = _value3 == null ? null : this[$type][$read](_value3); | |
return !(result instanceof TypeError) ? result : TypeError('"' + _value3 + '" is not nully nor it is of ' + this[$type][$typeName]() + ' type'); | |
} | |
}]); | |
return MaybeType; | |
})(Type); | |
var Maybe = function Maybe(Type) { | |
var type = typeOf(Type); | |
if (type === Any) { | |
throw TypeError(Type + ' is not a valid type'); | |
} | |
return type[$maybe] || (type[$maybe] = new MaybeType(type)); | |
}; | |
exports.Maybe = Maybe; | |
Maybe.Type = MaybeType; | |
var UnionType = (function (_Type3) { | |
function UnionType(variants) { | |
_classCallCheck(this, UnionType); | |
_get(Object.getPrototypeOf(UnionType.prototype), 'constructor', this).call(this); | |
this[$type] = variants; | |
} | |
_inherits(UnionType, _Type3); | |
_createClass(UnionType, [{ | |
key: Typed.typeName, | |
value: function value() { | |
return 'Union(' + this[$type].map(typeName).join(', ') + ')'; | |
} | |
}, { | |
key: Typed.read, | |
value: function value(_value4) { | |
var variants = this[$type]; | |
var count = variants.length; | |
var index = 0; | |
while (index < count) { | |
var variant = variants[index]; | |
if (_value4 instanceof variant.constructor) { | |
return _value4; | |
} | |
index = index + 1; | |
} | |
index = 0; | |
while (index < count) { | |
var result = variants[index][$read](_value4); | |
if (!(result instanceof TypeError)) { | |
return result; | |
} | |
index = index + 1; | |
} | |
return TypeError('"' + _value4 + '" does not satisfy ' + this[$typeName]() + ' type'); | |
} | |
}]); | |
return UnionType; | |
})(Type); | |
// Returns `xs` excluding any values that are included in `ys`. | |
var subtract = function subtract(xs, ys) { | |
return xs.filter(function (x) { | |
return ys.indexOf(x) < 0; | |
}); | |
}; | |
// Returns array including all values from `xs` and all values from | |
// `ys` that aren't already included in `xs`. It will also attempt | |
// to return either `xs` or `ys` if one of them is a superset of other. | |
// return `xs` or `ys` if | |
var union = function union(xs, ys) { | |
// xs can be superset only if it contains more items then | |
// ys. If that's a case find items in ys that arent included | |
// in xs. If such items do not exist return back `xs` otherwise | |
// return concatination of xs with those items. | |
// those items | |
if (xs.length > ys.length) { | |
var diff = subtract(ys, xs); | |
return diff.length === 0 ? xs : xs.concat(diff); | |
} | |
// if number of items in xs is not greater than number of items in ys | |
// then either xs is either subset or equal of `ys`. There for we find | |
// ys that are not included in `xs` if such items aren't found ys is | |
// either superset or equal so just return ys otherwise return concatination | |
// of those items with `ys`. | |
else { | |
var diff = subtract(xs, ys); | |
return diff.length === 0 ? ys : diff.concat(ys); | |
} | |
}; | |
var Union = function Union() { | |
for (var _len = arguments.length, Types = Array(_len), _key = 0; _key < _len; _key++) { | |
Types[_key] = arguments[_key]; | |
} | |
var count = Types.length; | |
if (count === 0) { | |
throw TypeError('Union must be of at at least one type'); | |
} | |
var variants = null; | |
var type = null; | |
var index = 0; | |
while (index < count) { | |
var variant = typeOf(Types[index]); | |
// If there is `Any` present than union is also `Any`. | |
if (variant === Any) { | |
return Any; | |
} | |
// If this is the first type we met than we assume it's the | |
// one that satisfies all types. | |
if (!variants) { | |
type = variant; | |
variants = type instanceof UnionType ? type[$type] : [variant]; | |
} else if (variants.indexOf(variant) < 0) { | |
// If current reader is of union type | |
if (variant instanceof UnionType) { | |
var variantUnion = union(variants, variant[$type]); | |
// If `reader.readers` matches union of readers, then | |
// current reader is a superset so we use it as a type | |
// that satisfies all types. | |
if (variantUnion === variant[$type]) { | |
type = variant; | |
variants = variantUnion; | |
} | |
// If current readers is not the union than it does not | |
// satisfy currenty reader. There for we update readers | |
// and unset a type. | |
else if (variantUnion !== variants) { | |
type = null; | |
variants = variantUnion; | |
} | |
} else { | |
type = null; | |
variants.push(variant); | |
} | |
} | |
index = index + 1; | |
} | |
return type ? type : new UnionType(variants); | |
}; | |
exports.Union = Union; | |
Union.Type = UnionType; | |
Typed.Number.Range = function (from, to, defaultValue) { | |
if (to === undefined) to = +Infinity; | |
return Typed('Typed.Number.Range(' + from + '..' + to + ')', function (value) { | |
if (typeof value !== 'number') { | |
return TypeError('"' + value + '" is not a number'); | |
} | |
if (!(value >= from && value <= to)) { | |
return TypeError('"' + value + '" isn\'t in the range of ' + from + '..' + to); | |
} | |
return value; | |
}, defaultValue); | |
}; | |
}); | |
},{"immutable":82}],161:[function(require,module,exports){ | |
/* This Source Code Form is subject to the terms of the Mozilla Public | |
* License, v. 2.0. If a copy of the MPL was not distributed with this | |
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ | |
'use strict'; | |
var _defineProperty = require('babel-runtime/helpers/define-property')['default']; | |
var _createClass = require('babel-runtime/helpers/create-class')['default']; | |
var _classCallCheck = require('babel-runtime/helpers/class-call-check')['default']; | |
var _regeneratorRuntime = require('babel-runtime/regenerator')['default']; | |
var _Object$assign = require('babel-runtime/core-js/object/assign')['default']; | |
var _Promise = require('babel-runtime/core-js/promise')['default']; | |
var _Record, _Record2, _Record3, _Record4, _Record5; | |
var PouchDB = require('pouchdb'); | |
var _require = require('../lang/task'); | |
var spawn = _require.spawn; | |
var async = _require.async; | |
var schedule = _require.schedule; | |
var _require2 = require('../lang/functional'); | |
var identity = _require2.identity; | |
var _require3 = require('typed-immutable'); | |
// PouchDB has a sepcial field `_id` for identifing records | |
// and `_rev` for identifiying revisitions. We will refer to | |
// those properties as `[PouchDB.id]` & `[PouchDB.revision]` | |
// instead. | |
var Record = _require3.Record; | |
var List = _require3.List; | |
var Maybe = _require3.Maybe; | |
var Any = _require3.Any; | |
var Union = _require3.Union; | |
PouchDB.id = "_id"; | |
PouchDB.revision = "_rev"; | |
// Type aliases that may be enhanced further sometime later. | |
var ID = String; | |
var Revision = String; | |
var TimeStamp = Number; | |
var Type = String; | |
var URI = String; | |
var TagName = String; | |
var Blob = Any; | |
// Helper function to convert string to a ArrayBuffer instance. | |
var stringToBuffer = function stringToBuffer(string) { | |
return new TextEncoder().encode(string); | |
}; | |
// Helper function to create cryptographic hash of the content. | |
var sha = function sha(string) { | |
return crypto.subtle.digest("SHA-256", stringToBuffer(string)).then(btoa); | |
}; | |
var stub = function stub(record) { | |
return record.constructor.Stub(record); | |
}; | |
var hash = function hash(record) { | |
return record.constructor.hash(record); | |
}; | |
var read = async(_regeneratorRuntime.mark(function callee$0$0(db, record) { | |
var data; | |
return _regeneratorRuntime.wrap(function callee$0$0$(context$1$0) { | |
while (1) switch (context$1$0.prev = context$1$0.next) { | |
case 0: | |
context$1$0.prev = 0; | |
context$1$0.next = 3; | |
return db.get(record[PouchDB.id]); | |
case 3: | |
data = context$1$0.sent; | |
return context$1$0.abrupt('return', new record.constructor(data)); | |
case 7: | |
context$1$0.prev = 7; | |
context$1$0.t0 = context$1$0['catch'](0); | |
if (!(context$1$0.t0.status !== 404)) { | |
context$1$0.next = 11; | |
break; | |
} | |
throw context$1$0.t0; | |
case 11: | |
return context$1$0.abrupt('return', record); | |
case 12: | |
case 'end': | |
return context$1$0.stop(); | |
} | |
}, callee$0$0, this, [[0, 7]]); | |
})); | |
var upsert = _regeneratorRuntime.mark(function upsert(db, record, change) { | |
var current; | |
return _regeneratorRuntime.wrap(function upsert$(context$1$0) { | |
while (1) switch (context$1$0.prev = context$1$0.next) { | |
case 0: | |
if (!true) { | |
context$1$0.next = 14; | |
break; | |
} | |
context$1$0.next = 3; | |
return read(db, record); | |
case 3: | |
current = context$1$0.sent; | |
context$1$0.prev = 4; | |
return context$1$0.abrupt('return', db.put(change(current).toJSON())); | |
case 8: | |
context$1$0.prev = 8; | |
context$1$0.t0 = context$1$0['catch'](4); | |
if (!(context$1$0.t0.status !== 409)) { | |
context$1$0.next = 12; | |
break; | |
} | |
throw context$1$0.t0; | |
case 12: | |
context$1$0.next = 0; | |
break; | |
case 14: | |
case 'end': | |
return context$1$0.stop(); | |
} | |
}, upsert, this, [[4, 8]]); | |
}); | |
var push = function push(x) { | |
return function (xs) { | |
return xs.push(x); | |
}; | |
}; | |
var remove = function remove(x) { | |
return function (xs) { | |
return xs.remove(x); | |
}; | |
}; | |
var exclude = function exclude(x) { | |
return function (xs) { | |
var index = xs.indexOf(x); | |
return index < 0 ? xs : index == 0 ? xs.rest() : index + 1 == xs.size ? xs.butLast() : xs.take(index).concat(xs.skip(index + 1)); | |
}; | |
}; | |
var include = function include(x) { | |
return function (xs) { | |
return xs.indexOf(x) < 0 ? xs.push(x) : xs; | |
}; | |
}; | |
/* | |
## Interests API | |
### Pages | |
Pages store contains records of pages that have being visited by a user. | |
Pages store contains records of the following schema (which could be extended | |
in the future). | |
{ | |
_id: "Page/http://learnyouahaskell.com/introduction#about-this-tutorial", | |
uri: "http://learnyouahaskell.com/introduction#about-this-tutorial", | |
title: "Introduction - Learn You a Haskell for Great Good!", | |
backgroundColor: "rgb(255, 255, 255)", | |
visits: [ | |
{ | |
start: 1421434329682266, | |
end: 1421434484899, | |
device: "Desktop" | |
} | |
], | |
tags: [ | |
"haskell", | |
"functional" | |
] | |
} | |
*/ | |
var Visit = Record({ | |
type: Type('Visit'), | |
start: TimeStamp, | |
end: Maybe(TimeStamp), | |
id: String, // This is a web view ID in our case. | |
device: String('Desktop') | |
}); | |
var Page = Record((_Record = {}, _defineProperty(_Record, PouchDB.id, ID), _defineProperty(_Record, PouchDB.revision, Maybe(Revision)), _defineProperty(_Record, 'type', Type('Page')), _defineProperty(_Record, 'uri', URI), _defineProperty(_Record, 'title', Maybe(String)), _defineProperty(_Record, 'visits', List(Visit)), _defineProperty(_Record, 'tags', List(TagName)), _defineProperty(_Record, 'icon', Maybe(URI)), _defineProperty(_Record, 'image', Blob), _Record)); | |
Page.frequency = function (_ref) { | |
var visits = _ref.visits; | |
return visits.count(); | |
}; | |
Page.from = function (_ref2) { | |
var _Page; | |
var uri = _ref2.uri; | |
var title = _ref2.title; | |
return Page((_Page = {}, _defineProperty(_Page, PouchDB.id, 'Page/' + uri), _defineProperty(_Page, 'uri', uri), _defineProperty(_Page, 'title', title), _Page)); | |
}; | |
Page.beginVisit = function (_ref3) { | |
var time = _ref3.time; | |
var id = _ref3.id; | |
var device = _ref3.device; | |
return function (page) { | |
return page.update('visits', push(Visit({ start: time, id: id, device: device }))); | |
}; | |
}; | |
Page.endVisit = function (_ref4) { | |
var id = _ref4.id; | |
var time = _ref4.time; | |
return function (page) { | |
var index = page.visits.findIndex(function (visit) { | |
return visit.id === id; | |
}); | |
return index < 0 ? page : page.setIn(['visits', index, 'end'], time); | |
}; | |
}; | |
/** | |
### Quotes | |
Quotes store contains records of quotes that have being created by a user that | |
have a following structure. | |
{ | |
_id: "quote/W29iamVjdCBBcnJheUJ1ZmZlcl0=", | |
content: `If you say that <span class="fixed">a</span> is 5, you can't say it's something else later because you just said it was 5. What are you, some kind of liar? So in purely functional languages, a function has no side-effects. The only thing a function can do is calculate something and return it as a result.` | |
uri: "http://learnyouahaskell.com/introduction#about-this-tutorial", | |
tags: ["functional"] | |
} | |
**/ | |
var Quote = Record((_Record2 = {}, _defineProperty(_Record2, PouchDB.id, ID), _defineProperty(_Record2, PouchDB.revision, Maybe(Revision)), _defineProperty(_Record2, 'type', Type('Quote')), _defineProperty(_Record2, 'uri', URI), _defineProperty(_Record2, 'content', String), _defineProperty(_Record2, 'tags', List(TagName)), _Record2)); | |
Quote.construct = async(_regeneratorRuntime.mark(function callee$0$0(_ref5) { | |
var _Quote; | |
var uri = _ref5.uri; | |
var content = _ref5.content; | |
var hash; | |
return _regeneratorRuntime.wrap(function callee$0$0$(context$1$0) { | |
while (1) switch (context$1$0.prev = context$1$0.next) { | |
case 0: | |
context$1$0.next = 2; | |
return sha(content); | |
case 2: | |
hash = context$1$0.sent; | |
return context$1$0.abrupt('return', Quote((_Quote = {}, _defineProperty(_Quote, PouchDB.id, 'Quote/' + uri + '/' + hash), _defineProperty(_Quote, 'uri', uri), _defineProperty(_Quote, 'content', content), _Quote))); | |
case 4: | |
case 'end': | |
return context$1$0.stop(); | |
} | |
}, callee$0$0, this); | |
})); | |
/** | |
{ | |
_id: "tag/haskell", | |
description: "Haskell programming language", | |
name: "haskell", | |
items: [ | |
"Quote/W29iamVjdCBBcnJheUJ1ZmZlcl0=" | |
"Page/http://learnyouahaskell.com" | |
] | |
} | |
**/ | |
var Tag = Record((_Record3 = {}, _defineProperty(_Record3, PouchDB.id, ID), _defineProperty(_Record3, PouchDB._rev, Maybe(Revision)), _defineProperty(_Record3, 'type', Type("Tag")), _defineProperty(_Record3, 'name', TagName), _defineProperty(_Record3, 'items', List(ID)), _Record3)); | |
Tag.add = function (tag, item) { | |
return tag.update('items', include(item[PouchDB.id])); | |
}; | |
Tag.remove = function (tag, item) { | |
return tag.update('items', exclude(item[PouchDB.id])); | |
}; | |
Tag.tag = function (item, tagName) { | |
return item.update('tags', include(tagName)); | |
}; | |
Tag.untag = function (item, tagName) { | |
return item.update('tags', exclude(tagName)); | |
}; | |
var TopPages = Record((_Record4 = {}, _defineProperty(_Record4, PouchDB.id, ID('TopPages')), _defineProperty(_Record4, PouchDB.revision, Maybe(Revision)), _defineProperty(_Record4, 'type', Type('TopPages')), _defineProperty(_Record4, 'pages', List(Page)), _Record4)); | |
TopPages.sample = function (page, limit) { | |
return function (top) { | |
return top.update('pages', function (pages) { | |
var index = pages.findIndex(function (x) { | |
return x[PouchDB.id] === page[PouchDB.id]; | |
}); | |
return pages.set(index < 0 ? pages.size : index, page).sortBy(Page.frequency).take(limit); | |
}); | |
}; | |
}; | |
var PopularSitesImported = Record((_Record5 = {}, _defineProperty(_Record5, PouchDB.id, ID('PopularSitesImported')), _defineProperty(_Record5, PouchDB.revision, Maybe(Revision)), _defineProperty(_Record5, 'value', Boolean(false)), _Record5)); | |
// History | |
var History = (function () { | |
_createClass(History, null, [{ | |
key: 'defaults', | |
value: function defaults() { | |
return { | |
name: 'history', | |
topPageLimit: 6, | |
address: null | |
}; | |
} | |
}]); | |
function History() { | |
var options = arguments.length <= 0 || arguments[0] === undefined ? {} : arguments[0]; | |
_classCallCheck(this, History); | |
this.onPageChange = this.onPageChange.bind(this); | |
this.options = _Object$assign(History.defaults(), options); | |
this.db = new PouchDB(this.options); | |
this.trackTopPages(); | |
this.importPopularSites(); | |
} | |
_createClass(History, [{ | |
key: 'importPopularSites', | |
value: function importPopularSites() { | |
spawn.call(this, _regeneratorRuntime.mark(function callee$2$0() { | |
var imported, request, sites, tasks; | |
return _regeneratorRuntime.wrap(function callee$2$0$(context$3$0) { | |
var _this = this; | |
while (1) switch (context$3$0.prev = context$3$0.next) { | |
case 0: | |
context$3$0.next = 2; | |
return read(this.db, PopularSitesImported()); | |
case 2: | |
imported = context$3$0.sent; | |
if (imported.value) { | |
context$3$0.next = 15; | |
break; | |
} | |
context$3$0.next = 6; | |
return fetch('src/alexa.json'); | |
case 6: | |
request = context$3$0.sent; | |
context$3$0.next = 9; | |
return request.json(); | |
case 9: | |
sites = context$3$0.sent; | |
tasks = sites.map(function (site) { | |
return _this.edit(Page.from({ uri: 'http://' + site + '/', title: site }), identity); | |
}); | |
context$3$0.next = 13; | |
return _Promise.all(tasks); | |
case 13: | |
context$3$0.next = 15; | |
return this.edit(imported, function (record) { | |
return record.set('value', true); | |
}); | |
case 15: | |
case 'end': | |
return context$3$0.stop(); | |
} | |
}, callee$2$0, this); | |
})); | |
} | |
}, { | |
key: 'trackTopPages', | |
value: function trackTopPages() { | |
this.pagesChangeFeed = this.db.changes({ | |
since: "now", | |
live: true, | |
filter: function filter(_ref6) { | |
var id = _ref6[PouchDB.id]; | |
return id.startsWith("Page/"); | |
}, | |
include_docs: true | |
}); | |
this.pagesChangeFeed.on("change", this.onPageChange); | |
} | |
// Edits are scheduled by a record id to avoid obvious conflicts with | |
// in the same node transations. | |
}, { | |
key: 'edit', | |
value: function edit(record, change) { | |
return schedule(record[PouchDB.id], upsert, this.db, record, change); | |
} | |
}, { | |
key: 'clear', | |
value: function clear() { | |
return this.db.destroy(); | |
} | |
}, { | |
key: 'query', | |
value: function query(_ref7) { | |
var type = _ref7.type; | |
var docs = _ref7.docs; | |
return this.db.allDocs({ | |
include_docs: docs, | |
startkey: type && type + '/', | |
endkey: type && type + '/' | |
}); | |
} | |
}, { | |
key: 'onPageChange', | |
value: function onPageChange(_ref8) { | |
var doc = _ref8.doc; | |
var page = Page(doc); | |
this.edit(TopPages(), TopPages.sample(page, this.options.topPageLimit)); | |
if (this.options.address) { | |
this.options.address.send(page); | |
} | |
} | |
}]); | |
return History; | |
})(); | |
; | |
exports.History = History; | |
exports.Page = Page; | |
exports.Tag = Tag; | |
exports.TopPages = TopPages; | |
exports.Quote = Quote; | |
// Would be better if it was Blob as well. | |
},{"../lang/functional":162,"../lang/task":163,"babel-runtime/core-js/object/assign":2,"babel-runtime/core-js/promise":5,"babel-runtime/helpers/class-call-check":8,"babel-runtime/helpers/create-class":9,"babel-runtime/helpers/define-property":10,"babel-runtime/regenerator":77,"pouchdb":113,"typed-immutable":157}],162:[function(require,module,exports){ | |
/* This Source Code Form is subject to the terms of the Mozilla Public | |
* License, v. 2.0. If a copy of the MPL was not distributed with this | |
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ | |
'use strict'; | |
var _toConsumableArray = require('babel-runtime/helpers/to-consumable-array')['default']; | |
var compose = function compose() { | |
for (var _len = arguments.length, lambdas = Array(_len), _key = 0; _key < _len; _key++) { | |
lambdas[_key] = arguments[_key]; | |
} | |
/** | |
Returns the composition of a list of functions, where each function | |
consumes the return value of the function that follows. In math | |
terms, composing the functions `f()`, `g()`, and `h()` produces | |
`f(g(h()))`. | |
Usage: | |
var square = function(x) { return x * x } | |
var increment = function(x) { return x + 1 } | |
var f1 = compose(increment, square) | |
f1(5) // => 26 | |
var f2 = compose(square, increment) | |
f2(5) // => 36 | |
**/ | |
return function () { | |
var index = lambdas.length; | |
var result = lambdas[--index].apply(lambdas, arguments); | |
while (--index >= 0) { | |
result = lambdas[index](result); | |
} | |
return result; | |
}; | |
}; | |
var partial = function partial(lambda) { | |
for (var _len2 = arguments.length, curried = Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) { | |
curried[_key2 - 1] = arguments[_key2]; | |
} | |
return function () { | |
for (var _len3 = arguments.length, passed = Array(_len3), _key3 = 0; _key3 < _len3; _key3++) { | |
passed[_key3] = arguments[_key3]; | |
} | |
return lambda.apply(undefined, curried.concat(passed)); | |
}; | |
}; | |
var curry = function curry(lambda, arity, curried) { | |
if (arity === undefined) arity = lambda.length; | |
return (function () { | |
/** | |
Returns function with implicit currying, which will continue currying until | |
expected number of argument is collected. Expected number of arguments is | |
determined by `lambda.length` unless it's 0. In later case function will be | |
assumed to be variadic and will be curried until invoked with `0` arguments. | |
Optionally `arity` of curried arguments can be overridden via second `arity` | |
argument. | |
## Examples | |
var sum = curry(function(a, b) { | |
return a + b | |
}) | |
console.log(sum(2, 2)) // 4 | |
console.log(sum(2)(4)) // 6 | |
var sum = curry(function() { | |
return Array.prototype.reduce.call(arguments, function(sum, number) { | |
return sum + number | |
}, 0) | |
}) | |
console.log(sum(2, 2)()) // 4 | |
console.log(sum(2, 4, 5)(-3)(1)()) // 9 | |
**/ | |
return function () { | |
for (var _len4 = arguments.length, passed = Array(_len4), _key4 = 0; _key4 < _len4; _key4++) { | |
passed[_key4] = arguments[_key4]; | |
} | |
var args = curried ? [].concat(_toConsumableArray(curried), passed) : passed; | |
return args.length >= arity ? lambda.apply(undefined, _toConsumableArray(args)) : curry(lambda, arity, args); | |
}; | |
})(); | |
}; | |
var arity = function arity(n, f) { | |
return function () { | |
for (var _len5 = arguments.length, params = Array(_len5), _key5 = 0; _key5 < _len5; _key5++) { | |
params[_key5] = arguments[_key5]; | |
} | |
return f.apply(undefined, _toConsumableArray(params.slice(0, n))); | |
}; | |
}; | |
var invokerFrom = function invokerFrom(name, object) { | |
return function () { | |
return object[name].apply(object, arguments); | |
}; | |
}; | |
var invokerOf = function invokerOf(name) { | |
return function (object) { | |
for (var _len6 = arguments.length, args = Array(_len6 > 1 ? _len6 - 1 : 0), _key6 = 1; _key6 < _len6; _key6++) { | |
args[_key6 - 1] = arguments[_key6]; | |
} | |
return object[name].apply(object, args); | |
}; | |
}; | |
var identity = function identity(value) { | |
return value; | |
}; | |
var constant = function constant(value) { | |
return function () { | |
return value; | |
}; | |
}; | |
var True = constant(true); | |
var False = constant(false); | |
var not = function not(f) { | |
return function () { | |
return !f.apply(undefined, arguments); | |
}; | |
}; | |
var and = function and() { | |
for (var _len7 = arguments.length, ps = Array(_len7), _key7 = 0; _key7 < _len7; _key7++) { | |
ps[_key7] = arguments[_key7]; | |
} | |
return function () { | |
for (var _len8 = arguments.length, params = Array(_len8), _key8 = 0; _key8 < _len8; _key8++) { | |
params[_key8] = arguments[_key8]; | |
} | |
return ps.every(function (p) { | |
return p.apply(undefined, params); | |
}); | |
}; | |
}; | |
var or = function or() { | |
for (var _len9 = arguments.length, ps = Array(_len9), _key9 = 0; _key9 < _len9; _key9++) { | |
ps[_key9] = arguments[_key9]; | |
} | |
return function () { | |
for (var _len10 = arguments.length, params = Array(_len10), _key10 = 0; _key10 < _len10; _key10++) { | |
params[_key10] = arguments[_key10]; | |
} | |
return ps.some(function (p) { | |
return p.apply(undefined, params); | |
}); | |
}; | |
}; | |
var scheduler = function scheduler(task) { | |
var isScheduled = false; | |
var end = function end() { | |
return isScheduled = false; | |
}; | |
var schedule = function schedule() { | |
if (!isScheduled) { | |
task(end); | |
} | |
}; | |
}; | |
var throttle = function throttle(f, wait) { | |
var options = arguments.length <= 2 || arguments[2] === undefined ? {} : arguments[2]; | |
var args = null; | |
var result = null; | |
var timeout = null; | |
var previous = 0; | |
var leading = options.leading; | |
var trailing = options.trailing; | |
var later = function later() { | |
previous = leading === false ? 0 : Date.now(); | |
timeout = null; | |
result = f.apply(undefined, _toConsumableArray(args)); | |
args = null; | |
}; | |
return function () { | |
for (var _len11 = arguments.length, params = Array(_len11), _key11 = 0; _key11 < _len11; _key11++) { | |
params[_key11] = arguments[_key11]; | |
} | |
var now = Date.now(); | |
if (!previous && leading === false) previous = now; | |
var remaining = wait - (now - previous); | |
args = params; | |
if (remaining <= 0) { | |
clearTimeout(timeout); | |
timeout = null; | |
previous = now; | |
result = f.apply(undefined, _toConsumableArray(args)); | |
args = null; | |
} else if (!timeout && trailing !== false) { | |
timeout = setTimeout(later, remaining); | |
} | |
return result; | |
}; | |
}; | |
var debounce = function debounce(f, wait, immediate) { | |
var timeout = null; | |
var args = null; | |
var timestamp = null; | |
var result = null; | |
var later = function later() { | |
for (var _len12 = arguments.length, args = Array(_len12), _key12 = 0; _key12 < _len12; _key12++) { | |
args[_key12] = arguments[_key12]; | |
} | |
var last = Date.now() - timestamp; | |
if (last < wait && last >= 0) { | |
timeout = setTimeout(later, wait - last); | |
} else { | |
timeout = null; | |
if (!immediate) { | |
result = f.apply(undefined, _toConsumableArray(args)); | |
if (!timeout) args = null; | |
} | |
} | |
}; | |
return function () { | |
for (var _len13 = arguments.length, params = Array(_len13), _key13 = 0; _key13 < _len13; _key13++) { | |
params[_key13] = arguments[_key13]; | |
} | |
args = params; | |
timestamp = Date.now(); | |
var callNow = immediate && !timeout; | |
if (!timeout) timeout = setTimeout(later, wait); | |
if (callNow) { | |
result = f.apply(undefined, _toConsumableArray(args)); | |
args = null; | |
} | |
return result; | |
}; | |
}; | |
exports.compose = compose; | |
exports.partial = partial; | |
exports.curry = curry; | |
exports.arity = arity; | |
exports.invokerFrom = invokerFrom; | |
exports.invokerOf = invokerOf; | |
exports.identity = identity; | |
exports.constant = constant; | |
exports.True = True; | |
exports.False = False; | |
exports.not = not; | |
exports.and = and; | |
exports.or = or; | |
exports.throttle = throttle; | |
exports.debounce = debounce; | |
},{"babel-runtime/helpers/to-consumable-array":11}],163:[function(require,module,exports){ | |
/* This Source Code Form is subject to the terms of the Mozilla Public | |
* License, v. 2.0. If a copy of the MPL was not distributed with this | |
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ | |
'use strict'; | |
// Utility function that can be used to resume a generator | |
// in a given mode (`next` or `throw`). Result is a function | |
// that takes a `value` and resumes `task` in a curried `mode` | |
// with a given `value`. | |
var _Promise = require("babel-runtime/core-js/promise")["default"]; | |
var _regeneratorRuntime = require("babel-runtime/regenerator")["default"]; | |
var _Object$create = require("babel-runtime/core-js/object/create")["default"]; | |
var resume = function resume(task) { | |
var mode = arguments.length <= 1 || arguments[1] === undefined ? "next" : arguments[1]; | |
return function (value) { | |
try { | |
return task[mode](value); | |
} catch (error) { | |
return { error: error }; | |
} | |
}; | |
}; | |
// Utility function takes `routine` generator and arguments | |
// that will be start / pass to a generator. Result of calling | |
// spawn is a promise that is resolved to a return value of | |
// the give generator. spawn will pause / resume generator on | |
// `yield`. If yield value is a promise generator is resumed | |
// with a value that promise is resolved to or if promise is | |
// rejected then generator will be resumed with an exception | |
// that will be rejection reason. If exception is thrown / not | |
// handled in generator body then returned promise will be | |
// rejected with a given exception. | |
var spawn = function spawn(task) { | |
var _this = this; | |
for (var _len = arguments.length, params = Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) { | |
params[_key - 1] = arguments[_key]; | |
} | |
return new _Promise(function (resolve, reject) { | |
// start a task by passing arguments to generator, note if generator | |
// throws right away it will just reject outer promise. | |
var routine = task.call.apply(task, [_this].concat(params)); | |
// Create a task resuming functions that resume a generator to capture | |
// value it yeilds / returns or an error it throws. `raise` is used to | |
// remuse task with a exception and `next` is used to resume it with a | |
// `value`. | |
var raise = resume(routine, "throw"); | |
var next = resume(routine, "next"); | |
// step function takes result captured via one of the resumer functions | |
// and either completes result promise of the task with it (rejects if | |
// exception was captured or resolves with value if generator returned) | |
// or suspends generator until yield value is resolved / rejected and | |
// then resumes it with resolution value / rejecetion reason. | |
var step = function step(_ref) { | |
var done = _ref.done; | |
var error = _ref.error; | |
var value = _ref.value; | |
// If error was captured reject promise. | |
if (error) { | |
reject(error); | |
} | |
// If generator is done resolve with a completion value. | |
else if (done) { | |
resolve(value); | |
} | |
// Otherwise wrap yield value with promise to wait for tick even | |
// if it was not already a promise & resume generator with either | |
// reseming funciton and caputre results which then are cycled back | |
// onto next step. | |
else { | |
_Promise.resolve(value).then(next, raise).then(step); | |
} | |
}; | |
// Resume generator initially with no value and pass on to next step. | |
step(next()); | |
}); | |
}; | |
exports.spawn = spawn; | |
// Async decorator function takes let you define ES7 like async | |
// function (see http://jakearchibald.com/2014/es7-async-functions/) | |
// but desugared using generators. `async` must be invoked with a | |
// generator function & it will return back pseudo async function. | |
// Returned funciton when invoked returns promise that will be resolved | |
// to a return value of the decorated generator. Generator can yield | |
// promises in which case it's going to be resued with a result of the | |
// promise or exception will be thrown into generator if promise is | |
// rejected. If exception is throw / not caught in generator body | |
// then returned promise will be rejected with that promise. | |
var async = function async(task) { | |
return function () { | |
for (var _len2 = arguments.length, params = Array(_len2), _key2 = 0; _key2 < _len2; _key2++) { | |
params[_key2] = arguments[_key2]; | |
} | |
return spawn.call.apply(spawn, [this, task].concat(params)); | |
}; | |
}; | |
exports.async = async; | |
// Scheduler can be used to queue up tasks to run them in order they | |
// were scheduled but only after task with mathing id is complete. | |
var schedule = function schedule(id, task) { | |
for (var _len3 = arguments.length, params = Array(_len3 > 2 ? _len3 - 2 : 0), _key3 = 2; _key3 < _len3; _key3++) { | |
params[_key3 - 2] = arguments[_key3]; | |
} | |
var pending = schedule.d[id]; | |
return schedule.d[id] = spawn(_regeneratorRuntime.mark(function callee$1$0() { | |
return _regeneratorRuntime.wrap(function callee$1$0$(context$2$0) { | |
while (1) switch (context$2$0.prev = context$2$0.next) { | |
case 0: | |
context$2$0.prev = 0; | |
context$2$0.next = 3; | |
return pending; | |
case 3: | |
context$2$0.prev = 3; | |
return context$2$0.abrupt("return", spawn.apply(undefined, [task].concat(params))); | |
case 6: | |
case "end": | |
return context$2$0.stop(); | |
} | |
}, callee$1$0, this, [[0,, 3, 6]]); | |
})); | |
}; | |
// Use `null` prototype to avoid object as a hash map pitfalls | |
// see following post for more details: | |
// http://www.2ality.com/2012/01/objects-as-maps.html | |
schedule.d = _Object$create(null); | |
exports.schedule = schedule; | |
// wait for the scheduled task with matching id to complete before | |
// spawning new task. | |
// spawn a task regardless if previous task completed with error | |
// or success. Note we do not catch error here to let it propagate | |
// and make devtools handle it more properly. | |
},{"babel-runtime/core-js/object/create":3,"babel-runtime/core-js/promise":5,"babel-runtime/regenerator":77}],164:[function(require,module,exports){ | |
/* This Source Code Form is subject to the terms of the Mozilla Public | |
* License, v. 2.0. If a copy of the MPL was not distributed with this | |
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ | |
'use strict'; | |
var _regeneratorRuntime = require('babel-runtime/regenerator')['default']; | |
var _require = require('../common/history'); | |
var Page = _require.Page; | |
var History = _require.History; | |
var _require2 = require('../lang/task'); | |
// Calculates the score for use in suggestions from | |
// a result array `match` of `RegExp#exec`. | |
var async = _require2.async; | |
var score = function score(pattern) { | |
var input = arguments.length <= 1 || arguments[1] === undefined ? '' : arguments[1]; | |
var base = arguments.length <= 2 || arguments[2] === undefined ? 0.3 : arguments[2]; | |
var length = arguments.length <= 3 || arguments[3] === undefined ? 0.25 : arguments[3]; | |
var index = 1 - base - length; | |
var text = String(input); | |
var count = text.length; | |
var match = pattern.exec(text); | |
return !match ? -1 : base + length * Math.sqrt(match[0].length / count) + index * (1 - match.index / count); | |
}; | |
var Pattern = function Pattern(input) { | |
var flags = arguments.length <= 1 || arguments[1] === undefined ? "i" : arguments[1]; | |
try { | |
return RegExp(input, flags); | |
} catch (error) { | |
if (error instanceof SyntaxError) { | |
return RegExp(pattern.escape(input), flags); | |
} | |
throw error; | |
} | |
}; | |
Pattern.escape = function (input) { | |
return input.replace(/[\.\?\*\+\^\$\|\(\)\{\[\]\\]/g, '\\$&'); | |
}; | |
var pageSearch = async(_regeneratorRuntime.mark(function callee$0$0(db, _ref) { | |
var id = _ref.id; | |
var input = _ref.input; | |
var limit = _ref.limit; | |
var _ref2, rows, query, matches; | |
return _regeneratorRuntime.wrap(function callee$0$0$(context$1$0) { | |
while (1) switch (context$1$0.prev = context$1$0.next) { | |
case 0: | |
context$1$0.next = 2; | |
return db.query({ docs: true, type: 'Page' }); | |
case 2: | |
_ref2 = context$1$0.sent; | |
rows = _ref2.rows; | |
query = Pattern(input.split(/\s+/g).join('[\\s\\S]+') + '|' + input.split(/\s+/g).join('|')); | |
matches = rows.map(function (_ref3) { | |
var page = _ref3.doc; | |
// frequency score is ranked from 0-1 not based on quality of | |
// match but solely on how often this page has been visited in the | |
// past. | |
var frequencyScore = 1 - 0.7 / (1 + page.visits.length); | |
// Title and uri are scored based of input length & match length | |
// and match index. | |
var titleScore = score(query, page.title); | |
var uriScore = score(query, page.uri); | |
// Store each score just for debuging purposes. | |
page.frequencyScore = frequencyScore; | |
page.titleScore = titleScore; | |
page.uriScore = uriScore; | |
// Total score is ranked form `-1` to `1`. Score is devided into | |
// 15 slots and individual field get's different weight based of | |
// portion it can contribute to of over score. No match on individual | |
// field has a negative impact (again besed on it's weight) on actual | |
// score. Assigned weight will likely need some tuning right now | |
// frequencey of visits has a largest wegiht (almost half but less than | |
// half so that no match will still exclude the result). Title has higher | |
// weight than uri as search engines tend to add search term in terms of | |
// query arguments (probably would make sense to score query arguments & | |
// uri hash separately so they weight less, althouh since scoring is length | |
// and index based match in query already get's scored less). | |
page.score = frequencyScore * 7 / 15 + titleScore * 5 / 15 + uriScore * 3 / 15; | |
return page; | |
}).filter(function (page) { | |
return page.score > 0 && page.title; | |
}) | |
// order by score. | |
.sort(function (a, b) { | |
return a.score > b.score ? -1 : a.score < b.score ? 1 : 0; | |
}).slice(0, limit); | |
return context$1$0.abrupt('return', { | |
type: 'PageResult', | |
action: { id: id, results: matches } | |
}); | |
case 7: | |
case 'end': | |
return context$1$0.stop(); | |
} | |
}, callee$0$0, this); | |
})); | |
var history = new History({ trackTopPages: true }); | |
onmessage = function (_ref4) { | |
var _ref4$data = _ref4.data; | |
var type = _ref4$data.type; | |
var id = _ref4$data.id; | |
var action = _ref4$data.action; | |
if (type === 'LoadEnded') { | |
history.edit(Page.from({ uri: action.uri }), Page.beginVisit({ id: id, time: action.timeStamp })); | |
} | |
if (type === 'LocationChanged') { | |
history.edit(Page.from({ uri: action.uri }), Page.endVisit({ id: id, time: action.timeStamp })); | |
} | |
if (type === 'TitleChanged') { | |
history.edit(Page.from({ uri: action.uri }), function (page) { | |
return page.set('title', action.title); | |
}); | |
} | |
if (type === 'ThumbnailChanged') { | |
history.edit(Page.from({ uri: action.uri }), function (page) { | |
return page.set('image', action.image); | |
}); | |
} | |
if (type === 'IconChanged') { | |
history.edit(Page.from({ uri: action.uri }), function (page) { | |
return page.set('icon', action.icon); | |
}); | |
} | |
if (type === 'PageQuery') { | |
pageSearch(history, action).then(postMessage); | |
} | |
}; | |
// Build a query patter from all words and individual words, note that | |
// scoring will take into consideration the length of the match so if we match | |
// multiple words that gets larger score then if we matched just one. | |
},{"../common/history":161,"../lang/task":163,"babel-runtime/regenerator":77}]},{},[164]) | |
//# sourceMappingURL=../service/history-worker.js.map | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
ERROR:js::rust: Error at http://localhost:6060/dist/service/history-worker.js:838: mutating the [[Prototype]] of an object will cause your code to run very slowly; instead create the object with the correct initial [[Prototype]] value using Object.create | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:js::rust: ...err! | |
evaluate_script failed | |
ERROR:js::rust: Error at http://localhost:6060/dist/service/history-worker.js:21786: TypeError: this.pagesChangeFeed.on is not a function | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: bad prototype | |
DEBUG:script::dom::bindings::conversions: plain old dom object | |
DEBUG:script::dom::bindings::conversions: good prototype | |
DEBUG:compositing::compositor: shutting down the constellation for WindowEvent::Quit | |
thread '<main>' panicked at 'called `Result::unwrap()` on an `Err` value: "SendError(..)"', src/libcore/result.rs:732 | |
stack backtrace: | |
1: 0x110b803b5 - sys::backtrace::write::h2513b694e23623efOvs | |
2: 0x110b839f0 - panicking::on_panic::h399650fb5f386215wWw | |
3: 0x110b707f2 - rt::unwind::begin_unwind_inner::h208861166cac0468HEw | |
4: 0x110b70e6c - rt::unwind::begin_unwind_fmt::h2b843ddd6c3f7d25NDw | |
5: 0x110b833bc - rust_begin_unwind | |
6: 0x110ba97b5 - panicking::panic_fmt::h18e94a0fe8bc1992MhC | |
7: 0x10e5369c7 - result::Result<T, E>::unwrap::h11977409905354948680 | |
8: 0x10e5acf0f - compositor::IOCompositor<Window>::handle_window_message::h3443521244016924919 | |
9: 0x10e5397b9 - compositor::IOCompositor<Window>.CompositorEventListener::handle_events::h14913581015608926147 | |
10: 0x10e688b79 - Browser::handle_events::hdef276b03a454cb00ca | |
11: 0x10e4d9615 - main::h56287c74df46b5aeraa | |
12: 0x110b832ef - __rust_try_inner | |
13: 0x110b8332a - __rust_try | |
14: 0x110b8463a - rt::lang_start::h62f9a1aa1fee3678sRw | |
15: 0x10e5b39ae - main | |
Servo exited with return value 101 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment