Created
October 27, 2008 20:51
-
-
Save wycats/20187 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
diff --git a/merb-core/Rakefile b/merb-core/Rakefile | |
index 1dbabaa..448e3c1 100644 | |
--- a/merb-core/Rakefile | |
+++ b/merb-core/Rakefile | |
@@ -230,6 +230,11 @@ end | |
setup_specs("mri", "spec") | |
setup_specs("jruby", "jruby -S spec") | |
+task "specs:core_ext" do | |
+ require "lib/merb-core/test/run_specs" | |
+ run_specs("spec/public/core_ext/*_spec.rb", "spec", "-c -f o") | |
+end | |
+ | |
task "specs" => ["specs:mri"] | |
task "specs:private" => ["specs:mri:private"] | |
task "specs:public" => ["specs:mri:public"] | |
diff --git a/merb-core/lib/merb-core.rb b/merb-core/lib/merb-core.rb | |
index 3eb41a1..f0a6e8d 100644 | |
--- a/merb-core/lib/merb-core.rb | |
+++ b/merb-core/lib/merb-core.rb | |
@@ -402,7 +402,12 @@ module Merb | |
Merb.logger.fatal! | |
print_colorized_backtrace(e) if e && Merb::Config[:verbose] | |
- exit(1) | |
+ | |
+ if Merb::Config[:show_ugly_backtraces] | |
+ raise e | |
+ else | |
+ exit(1) | |
+ end | |
end | |
# Print a colorized backtrace to the merb logger. | |
diff --git a/merb-core/lib/merb-core/core_ext/kernel.rb b/merb-core/lib/merb-core/core_ext/kernel.rb | |
index 0853a0d..932bed2 100644 | |
--- a/merb-core/lib/merb-core/core_ext/kernel.rb | |
+++ b/merb-core/lib/merb-core/core_ext/kernel.rb | |
@@ -18,20 +18,16 @@ module Kernel | |
# | |
# @api private | |
def track_dependency(name, *ver, &blk) | |
- ver.pop if ver.last.is_a?(Hash) && ver.last.empty? | |
- dep = Gem::Dependency.new(name, ver.empty? ? nil : ver) | |
- dep.require_block = blk | |
- dep.require_as = (ver.last.is_a?(Hash) && ver.last[:require_as]) || name | |
+ options = ver.pop if ver.last.is_a?(Hash) | |
+ new_dep = Gem::Dependency.new(name, ver.empty? ? nil : ver) | |
+ new_dep.require_block = blk | |
+ new_dep.require_as = (options && options[:require_as]) || name | |
- existing = Merb::BootLoader::Dependencies.dependencies.find { |d| d.name == dep.name } | |
- if existing | |
- index = Merb::BootLoader::Dependencies.dependencies.index(existing) | |
- Merb::BootLoader::Dependencies.dependencies.delete(existing) | |
- Merb::BootLoader::Dependencies.dependencies.insert(index, dep) | |
- else | |
- Merb::BootLoader::Dependencies.dependencies << dep | |
- end | |
- return dep | |
+ deps = Merb::BootLoader::Dependencies.dependencies | |
+ | |
+ deps.reject! {|current| current.name == new_dep.name } | |
+ deps << new_dep | |
+ new_dep | |
end | |
# Loads the given string as a gem. Execution is deferred until | |
diff --git a/merb-core/spec/public/core_ext/fixtures/bad_require_gem/Rakefile b/merb-core/spec/public/core_ext/fixtures/bad_require_gem/Rakefile | |
new file mode 100644 | |
index 0000000..d861b1c | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/bad_require_gem/Rakefile | |
@@ -0,0 +1,42 @@ | |
+require 'rubygems' | |
+require 'rake/gempackagetask' | |
+require 'rubygems/specification' | |
+require 'date' | |
+ | |
+GEM = "bad_require_gem" | |
+GEM_VERSION = "0.0.1" | |
+AUTHOR = "Your Name" | |
+EMAIL = "Your Email" | |
+HOMEPAGE = "http://example.com" | |
+SUMMARY = "A gem that provides..." | |
+ | |
+spec = Gem::Specification.new do |s| | |
+ s.name = GEM | |
+ s.version = GEM_VERSION | |
+ s.platform = Gem::Platform::RUBY | |
+ s.has_rdoc = true | |
+ s.summary = SUMMARY | |
+ s.description = s.summary | |
+ s.author = AUTHOR | |
+ s.email = EMAIL | |
+ s.homepage = HOMEPAGE | |
+ | |
+ s.require_path = 'lib' | |
+ s.files = %w(Rakefile) + Dir.glob("{lib}/**/*") | |
+end | |
+ | |
+Rake::GemPackageTask.new(spec) do |pkg| | |
+ pkg.gem_spec = spec | |
+end | |
+ | |
+desc "install the gem locally" | |
+task :install => [:package] do | |
+ sh %{sudo #{Gem.ruby} -S gem install pkg/#{GEM}-#{GEM_VERSION}} | |
+end | |
+ | |
+desc "create a gemspec file" | |
+task :make_spec do | |
+ File.open("#{GEM}.gemspec", "w") do |file| | |
+ file.puts spec.to_ruby | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/bad_require_gem/lib/BadRequireGem.rb b/merb-core/spec/public/core_ext/fixtures/bad_require_gem/lib/BadRequireGem.rb | |
new file mode 100644 | |
index 0000000..80643a5 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/bad_require_gem/lib/BadRequireGem.rb | |
@@ -0,0 +1,6 @@ | |
+module Merb | |
+ module SpecFixture | |
+ class BadRequireGem | |
+ end | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/bin/edit_json.rb b/merb-core/spec/public/core_ext/fixtures/gems/bin/edit_json.rb | |
new file mode 100755 | |
index 0000000..f64ac30 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/bin/edit_json.rb | |
@@ -0,0 +1,19 @@ | |
+#!/System/Library/Frameworks/Ruby.framework/Versions/1.8/usr/bin/ruby | |
+# | |
+# This file was generated by RubyGems. | |
+# | |
+# The application 'json_pure' is installed as part of a gem, and | |
+# this file is here to facilitate running it. | |
+# | |
+ | |
+require 'rubygems' | |
+ | |
+version = ">= 0" | |
+ | |
+if ARGV.first =~ /^_(.*)_$/ and Gem::Version.correct? $1 then | |
+ version = $1 | |
+ ARGV.shift | |
+end | |
+ | |
+gem 'json_pure', version | |
+load 'edit_json.rb' | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/bad_require_gem-0.0.1/Rakefile b/merb-core/spec/public/core_ext/fixtures/gems/gems/bad_require_gem-0.0.1/Rakefile | |
new file mode 100644 | |
index 0000000..d861b1c | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/bad_require_gem-0.0.1/Rakefile | |
@@ -0,0 +1,42 @@ | |
+require 'rubygems' | |
+require 'rake/gempackagetask' | |
+require 'rubygems/specification' | |
+require 'date' | |
+ | |
+GEM = "bad_require_gem" | |
+GEM_VERSION = "0.0.1" | |
+AUTHOR = "Your Name" | |
+EMAIL = "Your Email" | |
+HOMEPAGE = "http://example.com" | |
+SUMMARY = "A gem that provides..." | |
+ | |
+spec = Gem::Specification.new do |s| | |
+ s.name = GEM | |
+ s.version = GEM_VERSION | |
+ s.platform = Gem::Platform::RUBY | |
+ s.has_rdoc = true | |
+ s.summary = SUMMARY | |
+ s.description = s.summary | |
+ s.author = AUTHOR | |
+ s.email = EMAIL | |
+ s.homepage = HOMEPAGE | |
+ | |
+ s.require_path = 'lib' | |
+ s.files = %w(Rakefile) + Dir.glob("{lib}/**/*") | |
+end | |
+ | |
+Rake::GemPackageTask.new(spec) do |pkg| | |
+ pkg.gem_spec = spec | |
+end | |
+ | |
+desc "install the gem locally" | |
+task :install => [:package] do | |
+ sh %{sudo #{Gem.ruby} -S gem install pkg/#{GEM}-#{GEM_VERSION}} | |
+end | |
+ | |
+desc "create a gemspec file" | |
+task :make_spec do | |
+ File.open("#{GEM}.gemspec", "w") do |file| | |
+ file.puts spec.to_ruby | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/bad_require_gem-0.0.1/lib/BadRequireGem.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/bad_require_gem-0.0.1/lib/BadRequireGem.rb | |
new file mode 100644 | |
index 0000000..80643a5 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/bad_require_gem-0.0.1/lib/BadRequireGem.rb | |
@@ -0,0 +1,6 @@ | |
+module Merb | |
+ module SpecFixture | |
+ class BadRequireGem | |
+ end | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/CHANGES b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/CHANGES | |
new file mode 100644 | |
index 0000000..b100211 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/CHANGES | |
@@ -0,0 +1,93 @@ | |
+2008-07-10 (1.1.3) | |
+ * Wesley Beary <[email protected]> reported a bug in json/add/core's DateTime | |
+ handling: If the nominator and denominator of the offset were divisible by | |
+ each other Ruby's Rational#to_s returns them as an integer not a fraction | |
+ with '/'. This caused a ZeroDivisionError during parsing. | |
+ * Use Date#start and DateTime#start instead of sg method, while | |
+ remaining backwards compatible. | |
+ * Supports ragel >= 6.0 now. | |
+ * Corrected some tests. | |
+ * Some minor changes. | |
+2007-11-27 (1.1.2) | |
+ * Remember default dir (last used directory) in editor. | |
+ * JSON::Editor.edit method added, the editor can now receive json texts from | |
+ the clipboard via C-v. | |
+ * Load json texts from an URL pasted via middle button press. | |
+ * Added :create_additions option to Parser. This makes it possible to disable | |
+ the creation of additions by force, in order to treat json texts as data | |
+ while having additions loaded. | |
+ * Jacob Maine <[email protected]> reported, that JSON(:foo) outputs a JSON | |
+ object if the rails addition is enabled, which is wrong. It now outputs a | |
+ JSON string "foo" instead, like suggested by Jacob Maine. | |
+ * Discovered a bug in the Ruby Bugs Tracker on rubyforge, that was reported | |
+ by John Evans [email protected]. He could produce a crash in the JSON | |
+ generator by returning something other than a String instance from a | |
+ to_json method. I now guard against this by doing a rather crude type | |
+ check, which raises an exception instead of crashing. | |
+2007-07-06 (1.1.1) | |
+ * Yui NARUSE <[email protected]> sent some patches to fix tests for Ruby | |
+ 1.9. I applied them and adapted some of them a bit to run both on 1.8 and | |
+ 1.9. | |
+ * Introduced a JSON.parse! method without depth checking for people who like | |
+ danger. | |
+ * Made generate and pretty_generate methods configurable by an options hash. | |
+ * Added :allow_nan option to parser and generator in order to handle NaN, | |
+ Infinity, and -Infinity correctly - if requested. Floats, which aren't numbers, | |
+ aren't valid JSON according to RFC4627, so by default an exception will be | |
+ raised if any of these symbols are encountered. Thanks to Andrea Censi | |
+ <[email protected]> for his hint about this. | |
+ * Fixed some more tests for Ruby 1.9. | |
+ * Implemented dump/load interface of Marshal as suggested in ruby-core:11405 | |
+ by murphy <[email protected]>. | |
+ * Implemented the max_nesting feature for generate methods, too. | |
+ * Added some implementations for ruby core's custom objects for | |
+ serialisation/deserialisation purposes. | |
+2007-05-21 (1.1.0) | |
+ * Implemented max_nesting feature for parser to avoid stack overflows for | |
+ data from untrusted sources. If you trust the source, you can disable it | |
+ with the option max_nesting => false. | |
+ * Piers Cawley <[email protected]> reported a bug, that not every | |
+ character can be escaped by ?\ as required by RFC4627. There's a | |
+ contradiction between David Crockford's JSON checker test vectors (in | |
+ tests/fixtures) and RFC4627, though. I decided to stick to the RFC, because | |
+ the JSON checker seems to be a bit older than the RFC. | |
+ * Extended license to Ruby License, which includes the GPL. | |
+ * Added keyboard shortcuts, and 'Open location' menu item to edit_json.rb. | |
+2007-05-09 (1.0.4) | |
+ * Applied a patch from Yui NARUSE <[email protected]> to make JSON compile | |
+ under Ruby 1.9. Thank you very much for mailing it to me! | |
+ * Made binary variants of JSON fail early, instead of falling back to the | |
+ pure version. This should avoid overshadowing of eventual problems while | |
+ loading of the binary. | |
+2007-03-24 (1.0.3) | |
+ * Improved performance of pure variant a bit. | |
+ * The ext variant of this release supports the mswin32 platform. Ugh! | |
+2007-03-24 (1.0.2) | |
+ * Ext Parser didn't parse 0e0 correctly into 0.0: Fixed! | |
+2007-03-24 (1.0.1) | |
+ * Forgot some object files in the build dir. I really like that - not! | |
+2007-03-24 (1.0.0) | |
+ * Added C implementations for the JSON generator and a ragel based JSON | |
+ parser in C. | |
+ * Much more tests, especially fixtures from json.org. | |
+ * Further improved conformance to RFC4627. | |
+2007-02-09 (0.4.3) | |
+ * Conform more to RFC4627 for JSON: This means JSON strings | |
+ now always must contain exactly one object "{ ... }" or array "[ ... ]" in | |
+ order to be parsed without raising an exception. The definition of what | |
+ constitutes a whitespace is narrower in JSON than in Ruby ([ \t\r\n]), and | |
+ there are differences in floats and integers (no octals or hexadecimals) as | |
+ well. | |
+ * Added aliases generate and pretty_generate of unparse and pretty_unparse. | |
+ * Fixed a test case. | |
+ * Catch an Iconv::InvalidEncoding exception, that seems to occur on some Sun | |
+ boxes with SunOS 5.8, if iconv doesn't support utf16 conversions. This was | |
+ reported by Andrew R Jackson <[email protected]>, thanks a bunch! | |
+2006-08-25 (0.4.2) | |
+ * Fixed a bug in handling solidi (/-characters), that was reported by | |
+ Kevin Gilpin <[email protected]>. | |
+2006-02-06 (0.4.1) | |
+ * Fixed a bug related to escaping with backslashes. Thanks for the report go | |
+ to Florian Munz <[email protected]>. | |
+2005-09-23 (0.4.0) | |
+ * Initial Rubyforge Version | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/GPL b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/GPL | |
new file mode 100644 | |
index 0000000..5b6e7c6 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/GPL | |
@@ -0,0 +1,340 @@ | |
+ GNU GENERAL PUBLIC LICENSE | |
+ Version 2, June 1991 | |
+ | |
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc. | |
+ 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | |
+ Everyone is permitted to copy and distribute verbatim copies | |
+ of this license document, but changing it is not allowed. | |
+ | |
+ Preamble | |
+ | |
+ The licenses for most software are designed to take away your | |
+freedom to share and change it. By contrast, the GNU General Public | |
+License is intended to guarantee your freedom to share and change free | |
+software--to make sure the software is free for all its users. This | |
+General Public License applies to most of the Free Software | |
+Foundation's software and to any other program whose authors commit to | |
+using it. (Some other Free Software Foundation software is covered by | |
+the GNU Library General Public License instead.) You can apply it to | |
+your programs, too. | |
+ | |
+ When we speak of free software, we are referring to freedom, not | |
+price. Our General Public Licenses are designed to make sure that you | |
+have the freedom to distribute copies of free software (and charge for | |
+this service if you wish), that you receive source code or can get it | |
+if you want it, that you can change the software or use pieces of it | |
+in new free programs; and that you know you can do these things. | |
+ | |
+ To protect your rights, we need to make restrictions that forbid | |
+anyone to deny you these rights or to ask you to surrender the rights. | |
+These restrictions translate to certain responsibilities for you if you | |
+distribute copies of the software, or if you modify it. | |
+ | |
+ For example, if you distribute copies of such a program, whether | |
+gratis or for a fee, you must give the recipients all the rights that | |
+you have. You must make sure that they, too, receive or can get the | |
+source code. And you must show them these terms so they know their | |
+rights. | |
+ | |
+ We protect your rights with two steps: (1) copyright the software, and | |
+(2) offer you this license which gives you legal permission to copy, | |
+distribute and/or modify the software. | |
+ | |
+ Also, for each author's protection and ours, we want to make certain | |
+that everyone understands that there is no warranty for this free | |
+software. If the software is modified by someone else and passed on, we | |
+want its recipients to know that what they have is not the original, so | |
+that any problems introduced by others will not reflect on the original | |
+authors' reputations. | |
+ | |
+ Finally, any free program is threatened constantly by software | |
+patents. We wish to avoid the danger that redistributors of a free | |
+program will individually obtain patent licenses, in effect making the | |
+program proprietary. To prevent this, we have made it clear that any | |
+patent must be licensed for everyone's free use or not licensed at all. | |
+ | |
+ The precise terms and conditions for copying, distribution and | |
+modification follow. | |
+ | |
+ GNU GENERAL PUBLIC LICENSE | |
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION | |
+ | |
+ 0. This License applies to any program or other work which contains | |
+a notice placed by the copyright holder saying it may be distributed | |
+under the terms of this General Public License. The "Program", below, | |
+refers to any such program or work, and a "work based on the Program" | |
+means either the Program or any derivative work under copyright law: | |
+that is to say, a work containing the Program or a portion of it, | |
+either verbatim or with modifications and/or translated into another | |
+language. (Hereinafter, translation is included without limitation in | |
+the term "modification".) Each licensee is addressed as "you". | |
+ | |
+Activities other than copying, distribution and modification are not | |
+covered by this License; they are outside its scope. The act of | |
+running the Program is not restricted, and the output from the Program | |
+is covered only if its contents constitute a work based on the | |
+Program (independent of having been made by running the Program). | |
+Whether that is true depends on what the Program does. | |
+ | |
+ 1. You may copy and distribute verbatim copies of the Program's | |
+source code as you receive it, in any medium, provided that you | |
+conspicuously and appropriately publish on each copy an appropriate | |
+copyright notice and disclaimer of warranty; keep intact all the | |
+notices that refer to this License and to the absence of any warranty; | |
+and give any other recipients of the Program a copy of this License | |
+along with the Program. | |
+ | |
+You may charge a fee for the physical act of transferring a copy, and | |
+you may at your option offer warranty protection in exchange for a fee. | |
+ | |
+ 2. You may modify your copy or copies of the Program or any portion | |
+of it, thus forming a work based on the Program, and copy and | |
+distribute such modifications or work under the terms of Section 1 | |
+above, provided that you also meet all of these conditions: | |
+ | |
+ a) You must cause the modified files to carry prominent notices | |
+ stating that you changed the files and the date of any change. | |
+ | |
+ b) You must cause any work that you distribute or publish, that in | |
+ whole or in part contains or is derived from the Program or any | |
+ part thereof, to be licensed as a whole at no charge to all third | |
+ parties under the terms of this License. | |
+ | |
+ c) If the modified program normally reads commands interactively | |
+ when run, you must cause it, when started running for such | |
+ interactive use in the most ordinary way, to print or display an | |
+ announcement including an appropriate copyright notice and a | |
+ notice that there is no warranty (or else, saying that you provide | |
+ a warranty) and that users may redistribute the program under | |
+ these conditions, and telling the user how to view a copy of this | |
+ License. (Exception: if the Program itself is interactive but | |
+ does not normally print such an announcement, your work based on | |
+ the Program is not required to print an announcement.) | |
+ | |
+These requirements apply to the modified work as a whole. If | |
+identifiable sections of that work are not derived from the Program, | |
+and can be reasonably considered independent and separate works in | |
+themselves, then this License, and its terms, do not apply to those | |
+sections when you distribute them as separate works. But when you | |
+distribute the same sections as part of a whole which is a work based | |
+on the Program, the distribution of the whole must be on the terms of | |
+this License, whose permissions for other licensees extend to the | |
+entire whole, and thus to each and every part regardless of who wrote it. | |
+ | |
+Thus, it is not the intent of this section to claim rights or contest | |
+your rights to work written entirely by you; rather, the intent is to | |
+exercise the right to control the distribution of derivative or | |
+collective works based on the Program. | |
+ | |
+In addition, mere aggregation of another work not based on the Program | |
+with the Program (or with a work based on the Program) on a volume of | |
+a storage or distribution medium does not bring the other work under | |
+the scope of this License. | |
+ | |
+ 3. You may copy and distribute the Program (or a work based on it, | |
+under Section 2) in object code or executable form under the terms of | |
+Sections 1 and 2 above provided that you also do one of the following: | |
+ | |
+ a) Accompany it with the complete corresponding machine-readable | |
+ source code, which must be distributed under the terms of Sections | |
+ 1 and 2 above on a medium customarily used for software interchange; or, | |
+ | |
+ b) Accompany it with a written offer, valid for at least three | |
+ years, to give any third party, for a charge no more than your | |
+ cost of physically performing source distribution, a complete | |
+ machine-readable copy of the corresponding source code, to be | |
+ distributed under the terms of Sections 1 and 2 above on a medium | |
+ customarily used for software interchange; or, | |
+ | |
+ c) Accompany it with the information you received as to the offer | |
+ to distribute corresponding source code. (This alternative is | |
+ allowed only for noncommercial distribution and only if you | |
+ received the program in object code or executable form with such | |
+ an offer, in accord with Subsection b above.) | |
+ | |
+The source code for a work means the preferred form of the work for | |
+making modifications to it. For an executable work, complete source | |
+code means all the source code for all modules it contains, plus any | |
+associated interface definition files, plus the scripts used to | |
+control compilation and installation of the executable. However, as a | |
+special exception, the source code distributed need not include | |
+anything that is normally distributed (in either source or binary | |
+form) with the major components (compiler, kernel, and so on) of the | |
+operating system on which the executable runs, unless that component | |
+itself accompanies the executable. | |
+ | |
+If distribution of executable or object code is made by offering | |
+access to copy from a designated place, then offering equivalent | |
+access to copy the source code from the same place counts as | |
+distribution of the source code, even though third parties are not | |
+compelled to copy the source along with the object code. | |
+ | |
+ 4. You may not copy, modify, sublicense, or distribute the Program | |
+except as expressly provided under this License. Any attempt | |
+otherwise to copy, modify, sublicense or distribute the Program is | |
+void, and will automatically terminate your rights under this License. | |
+However, parties who have received copies, or rights, from you under | |
+this License will not have their licenses terminated so long as such | |
+parties remain in full compliance. | |
+ | |
+ 5. You are not required to accept this License, since you have not | |
+signed it. However, nothing else grants you permission to modify or | |
+distribute the Program or its derivative works. These actions are | |
+prohibited by law if you do not accept this License. Therefore, by | |
+modifying or distributing the Program (or any work based on the | |
+Program), you indicate your acceptance of this License to do so, and | |
+all its terms and conditions for copying, distributing or modifying | |
+the Program or works based on it. | |
+ | |
+ 6. Each time you redistribute the Program (or any work based on the | |
+Program), the recipient automatically receives a license from the | |
+original licensor to copy, distribute or modify the Program subject to | |
+these terms and conditions. You may not impose any further | |
+restrictions on the recipients' exercise of the rights granted herein. | |
+You are not responsible for enforcing compliance by third parties to | |
+this License. | |
+ | |
+ 7. If, as a consequence of a court judgment or allegation of patent | |
+infringement or for any other reason (not limited to patent issues), | |
+conditions are imposed on you (whether by court order, agreement or | |
+otherwise) that contradict the conditions of this License, they do not | |
+excuse you from the conditions of this License. If you cannot | |
+distribute so as to satisfy simultaneously your obligations under this | |
+License and any other pertinent obligations, then as a consequence you | |
+may not distribute the Program at all. For example, if a patent | |
+license would not permit royalty-free redistribution of the Program by | |
+all those who receive copies directly or indirectly through you, then | |
+the only way you could satisfy both it and this License would be to | |
+refrain entirely from distribution of the Program. | |
+ | |
+If any portion of this section is held invalid or unenforceable under | |
+any particular circumstance, the balance of the section is intended to | |
+apply and the section as a whole is intended to apply in other | |
+circumstances. | |
+ | |
+It is not the purpose of this section to induce you to infringe any | |
+patents or other property right claims or to contest validity of any | |
+such claims; this section has the sole purpose of protecting the | |
+integrity of the free software distribution system, which is | |
+implemented by public license practices. Many people have made | |
+generous contributions to the wide range of software distributed | |
+through that system in reliance on consistent application of that | |
+system; it is up to the author/donor to decide if he or she is willing | |
+to distribute software through any other system and a licensee cannot | |
+impose that choice. | |
+ | |
+This section is intended to make thoroughly clear what is believed to | |
+be a consequence of the rest of this License. | |
+ | |
+ 8. If the distribution and/or use of the Program is restricted in | |
+certain countries either by patents or by copyrighted interfaces, the | |
+original copyright holder who places the Program under this License | |
+may add an explicit geographical distribution limitation excluding | |
+those countries, so that distribution is permitted only in or among | |
+countries not thus excluded. In such case, this License incorporates | |
+the limitation as if written in the body of this License. | |
+ | |
+ 9. The Free Software Foundation may publish revised and/or new versions | |
+of the General Public License from time to time. Such new versions will | |
+be similar in spirit to the present version, but may differ in detail to | |
+address new problems or concerns. | |
+ | |
+Each version is given a distinguishing version number. If the Program | |
+specifies a version number of this License which applies to it and "any | |
+later version", you have the option of following the terms and conditions | |
+either of that version or of any later version published by the Free | |
+Software Foundation. If the Program does not specify a version number of | |
+this License, you may choose any version ever published by the Free Software | |
+Foundation. | |
+ | |
+ 10. If you wish to incorporate parts of the Program into other free | |
+programs whose distribution conditions are different, write to the author | |
+to ask for permission. For software which is copyrighted by the Free | |
+Software Foundation, write to the Free Software Foundation; we sometimes | |
+make exceptions for this. Our decision will be guided by the two goals | |
+of preserving the free status of all derivatives of our free software and | |
+of promoting the sharing and reuse of software generally. | |
+ | |
+ NO WARRANTY | |
+ | |
+ 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY | |
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN | |
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES | |
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED | |
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF | |
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS | |
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE | |
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, | |
+REPAIR OR CORRECTION. | |
+ | |
+ 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING | |
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR | |
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, | |
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING | |
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED | |
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY | |
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER | |
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE | |
+POSSIBILITY OF SUCH DAMAGES. | |
+ | |
+ END OF TERMS AND CONDITIONS | |
+ | |
+ How to Apply These Terms to Your New Programs | |
+ | |
+ If you develop a new program, and you want it to be of the greatest | |
+possible use to the public, the best way to achieve this is to make it | |
+free software which everyone can redistribute and change under these terms. | |
+ | |
+ To do so, attach the following notices to the program. It is safest | |
+to attach them to the start of each source file to most effectively | |
+convey the exclusion of warranty; and each file should have at least | |
+the "copyright" line and a pointer to where the full notice is found. | |
+ | |
+ <one line to give the program's name and a brief idea of what it does.> | |
+ Copyright (C) <year> <name of author> | |
+ | |
+ This program is free software; you can redistribute it and/or modify | |
+ it under the terms of the GNU General Public License as published by | |
+ the Free Software Foundation; either version 2 of the License, or | |
+ (at your option) any later version. | |
+ | |
+ This program is distributed in the hope that it will be useful, | |
+ but WITHOUT ANY WARRANTY; without even the implied warranty of | |
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
+ GNU General Public License for more details. | |
+ | |
+ You should have received a copy of the GNU General Public License | |
+ along with this program; if not, write to the Free Software | |
+ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | |
+ | |
+ | |
+Also add information on how to contact you by electronic and paper mail. | |
+ | |
+If the program is interactive, make it output a short notice like this | |
+when it starts in an interactive mode: | |
+ | |
+ Gnomovision version 69, Copyright (C) year name of author | |
+ Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. | |
+ This is free software, and you are welcome to redistribute it | |
+ under certain conditions; type `show c' for details. | |
+ | |
+The hypothetical commands `show w' and `show c' should show the appropriate | |
+parts of the General Public License. Of course, the commands you use may | |
+be called something other than `show w' and `show c'; they could even be | |
+mouse-clicks or menu items--whatever suits your program. | |
+ | |
+You should also get your employer (if you work as a programmer) or your | |
+school, if any, to sign a "copyright disclaimer" for the program, if | |
+necessary. Here is a sample; alter the names: | |
+ | |
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the program | |
+ `Gnomovision' (which makes passes at compilers) written by James Hacker. | |
+ | |
+ <signature of Ty Coon>, 1 April 1989 | |
+ Ty Coon, President of Vice | |
+ | |
+This General Public License does not permit incorporating your program into | |
+proprietary programs. If your program is a subroutine library, you may | |
+consider it more useful to permit linking proprietary applications with the | |
+library. If this is what you want to do, use the GNU Library General | |
+Public License instead of this License. | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/README b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/README | |
new file mode 100644 | |
index 0000000..9b4f3be | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/README | |
@@ -0,0 +1,78 @@ | |
+Dependencies for Building | |
+========================= | |
+ | |
+- You need rake to build the extensions and install them. | |
+ | |
+ You can get it from rubyforge: | |
+ http://rubyforge.org/projects/rake | |
+ | |
+ or just type | |
+ | |
+ # gem install rake | |
+ | |
+ for the installation via rubygems. | |
+ | |
+- If you want to rebuild the parser.c file or draw nice graphviz images of the | |
+ state machines, you need ragel from: | |
+ http://www.cs.queensu.ca/~thurston/ragel | |
+ | |
+Installation | |
+============ | |
+ | |
+It's recommended to use the extension variant of JSON, because it's quite a bit | |
+faster than the pure ruby variant. If you cannot build it on your system, you | |
+can settle for the latter. | |
+ | |
+Just type into the command line as root: | |
+ | |
+# rake install | |
+ | |
+The above command will build the extensions and install them on your system. | |
+ | |
+# rake install_pure | |
+ | |
+or | |
+ | |
+# ruby install.rb | |
+ | |
+will just install the pure ruby implementation of JSON. | |
+ | |
+If you use Rubygems you can type | |
+ | |
+# gem install json | |
+ | |
+instead, to install the newest JSON version. | |
+ | |
+There is also a pure ruby json only variant of the gem, that can be installed | |
+with: | |
+ | |
+# gem install json_pure | |
+ | |
+Testing and Examples | |
+==================== | |
+ | |
+To run the tests type: | |
+ | |
+$ rake test_ext | |
+ | |
+This will build the extensions first and then test them. | |
+ | |
+$ rake test_pure | |
+ | |
+This will test the pure ruby extensions. | |
+ | |
+There is also a small example in tools/server.rb if you want to see, how | |
+receiving a JSON object from a webrick server in your browser with the | |
+javasript prototype library (http://www.prototypejs.org) works. | |
+ | |
+Author | |
+====== | |
+ | |
+Florian Frank <[email protected]> | |
+ | |
+License | |
+======= | |
+ | |
+Ruby License, see the RUBY file included in the source distribution. The Ruby | |
+License includes the GNU General Public License (GPL), Version 2, so see the | |
+file GPL as well. | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/RUBY b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/RUBY | |
new file mode 100644 | |
index 0000000..4a99f6f | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/RUBY | |
@@ -0,0 +1,58 @@ | |
+Ruby is copyrighted free software by Yukihiro Matsumoto <[email protected]>. | |
+You can redistribute it and/or modify it under either the terms of the GPL | |
+(see COPYING.txt file), or the conditions below: | |
+ | |
+ 1. You may make and give away verbatim copies of the source form of the | |
+ software without restriction, provided that you duplicate all of the | |
+ original copyright notices and associated disclaimers. | |
+ | |
+ 2. You may modify your copy of the software in any way, provided that | |
+ you do at least ONE of the following: | |
+ | |
+ a) place your modifications in the Public Domain or otherwise | |
+ make them Freely Available, such as by posting said | |
+ modifications to Usenet or an equivalent medium, or by allowing | |
+ the author to include your modifications in the software. | |
+ | |
+ b) use the modified software only within your corporation or | |
+ organization. | |
+ | |
+ c) rename any non-standard executables so the names do not conflict | |
+ with standard executables, which must also be provided. | |
+ | |
+ d) make other distribution arrangements with the author. | |
+ | |
+ 3. You may distribute the software in object code or executable | |
+ form, provided that you do at least ONE of the following: | |
+ | |
+ a) distribute the executables and library files of the software, | |
+ together with instructions (in the manual page or equivalent) | |
+ on where to get the original distribution. | |
+ | |
+ b) accompany the distribution with the machine-readable source of | |
+ the software. | |
+ | |
+ c) give non-standard executables non-standard names, with | |
+ instructions on where to get the original software distribution. | |
+ | |
+ d) make other distribution arrangements with the author. | |
+ | |
+ 4. You may modify and include the part of the software into any other | |
+ software (possibly commercial). But some files in the distribution | |
+ are not written by the author, so that they are not under this terms. | |
+ | |
+ They are gc.c(partly), utils.c(partly), regex.[ch], st.[ch] and some | |
+ files under the ./missing directory. See each file for the copying | |
+ condition. | |
+ | |
+ 5. The scripts and library files supplied as input to or produced as | |
+ output from the software do not automatically fall under the | |
+ copyright of the software, but belong to whomever generated them, | |
+ and may be sold commercially, and may be aggregated with this | |
+ software. | |
+ | |
+ 6. THIS SOFTWARE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR | |
+ IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED | |
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
+ PURPOSE. | |
+ | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/Rakefile b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/Rakefile | |
new file mode 100644 | |
index 0000000..b66de5b | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/Rakefile | |
@@ -0,0 +1,309 @@ | |
+begin | |
+ require 'rake/gempackagetask' | |
+rescue LoadError | |
+end | |
+require 'rake/clean' | |
+ | |
+require 'rbconfig' | |
+include Config | |
+ | |
+ON_WINDOWS = RUBY_PLATFORM =~ /mswin32/i | |
+PKG_NAME = 'json' | |
+PKG_VERSION = File.read('VERSION').chomp | |
+PKG_FILES = FileList["**/*"].exclude(/CVS|pkg|coverage|Makefile/).exclude(/\.(so|bundle|o|#{CONFIG['DLEXT']})$/) | |
+EXT_ROOT_DIR = 'ext/json/ext' | |
+EXT_PARSER_DIR = "#{EXT_ROOT_DIR}/parser" | |
+EXT_PARSER_DL = "#{EXT_ROOT_DIR}/parser.#{CONFIG['DLEXT']}" | |
+EXT_PARSER_SRC = "#{EXT_PARSER_DIR}/parser.c" | |
+PKG_FILES << EXT_PARSER_SRC | |
+EXT_GENERATOR_DIR = "#{EXT_ROOT_DIR}/generator" | |
+EXT_GENERATOR_DL = "#{EXT_ROOT_DIR}/generator.#{CONFIG['DLEXT']}" | |
+EXT_GENERATOR_SRC = "#{EXT_GENERATOR_DIR}/generator.c" | |
+RAGEL_CODEGEN = %w[rlcodegen rlgen-cd].find { |c| system(c, '-v') } | |
+RAGEL_DOTGEN = %w[rlgen-dot rlgen-cd].find { |c| system(c, '-v') } | |
+RAGEL_PATH = "#{EXT_PARSER_DIR}/parser.rl" | |
+CLEAN.include 'doc', 'coverage', FileList['diagrams/*.*'], | |
+ FileList["ext/**/*.{so,bundle,#{CONFIG['DLEXT']},o,obj,pdb,lib,manifest,exp,def}"], | |
+ FileList["ext/**/Makefile"] | |
+ | |
+ | |
+desc "Installing library (pure)" | |
+task :install_pure => :version do | |
+ ruby 'install.rb' | |
+end | |
+ | |
+task :install_ext_really do | |
+ sitearchdir = CONFIG["sitearchdir"] | |
+ cd 'ext' do | |
+ for file in Dir["json/ext/*.#{CONFIG['DLEXT']}"] | |
+ d = File.join(sitearchdir, file) | |
+ mkdir_p File.dirname(d) | |
+ install(file, d) | |
+ end | |
+ end | |
+end | |
+ | |
+desc "Installing library (extension)" | |
+task :install_ext => [ :compile, :install_pure, :install_ext_really ] | |
+ | |
+task :install => :install_ext | |
+ | |
+desc "Compiling extension" | |
+task :compile => [ EXT_PARSER_DL, EXT_GENERATOR_DL ] | |
+ | |
+file EXT_PARSER_DL => EXT_PARSER_SRC do | |
+ cd EXT_PARSER_DIR do | |
+ ruby 'extconf.rb' | |
+ if ON_WINDOWS | |
+ sh 'nmake' | |
+ sh "mt -manifest parser.#{CONFIG['DLEXT']}.manifest -outputresource:parser.#{CONFIG['DLEXT']};2" | |
+ else | |
+ sh 'make' | |
+ end | |
+ end | |
+ cp "#{EXT_PARSER_DIR}/parser.#{CONFIG['DLEXT']}", EXT_ROOT_DIR | |
+end | |
+ | |
+file EXT_GENERATOR_DL => EXT_GENERATOR_SRC do | |
+ cd EXT_GENERATOR_DIR do | |
+ ruby 'extconf.rb' | |
+ if ON_WINDOWS | |
+ sh 'nmake' | |
+ sh "mt -manifest generator.#{CONFIG['DLEXT']}.manifest -outputresource:generator.#{CONFIG['DLEXT']};2" | |
+ else | |
+ sh 'make' | |
+ end | |
+ end | |
+ cp "#{EXT_GENERATOR_DIR}/generator.#{CONFIG['DLEXT']}", EXT_ROOT_DIR | |
+end | |
+ | |
+desc "Generate parser with ragel" | |
+task :ragel => EXT_PARSER_SRC | |
+ | |
+task :ragel_clean do | |
+ rm_rf EXT_PARSER_SRC | |
+end | |
+ | |
+file EXT_PARSER_SRC => RAGEL_PATH do | |
+ cd EXT_PARSER_DIR do | |
+ sh "ragel -x parser.rl | #{RAGEL_CODEGEN} -G2" | |
+ end | |
+end | |
+ | |
+desc "Generate diagrams of ragel parser (ps)" | |
+task :ragel_dot_ps do | |
+ root = 'diagrams' | |
+ specs = [] | |
+ File.new(RAGEL_PATH).grep(/^\s*machine\s*(\S+);\s*$/) { specs << $1 } | |
+ for s in specs | |
+ sh "ragel -x #{RAGEL_PATH} -S#{s} | #{RAGEL_DOTGEN} -p|dot -Tps -o#{root}/#{s}.ps" | |
+ end | |
+end | |
+ | |
+desc "Generate diagrams of ragel parser (png)" | |
+task :ragel_dot_png do | |
+ root = 'diagrams' | |
+ specs = [] | |
+ File.new(RAGEL_PATH).grep(/^\s*machine\s*(\S+);\s*$/) { specs << $1 } | |
+ for s in specs | |
+ sh "ragel -x #{RAGEL_PATH} -S#{s} | #{RAGEL_DOTGEN} -p|dot -Tpng -o#{root}/#{s}.png" | |
+ end | |
+end | |
+ | |
+desc "Generate diagrams of ragel parser" | |
+task :ragel_dot => [ :ragel_dot_png, :ragel_dot_ps ] | |
+ | |
+desc "Testing library (pure ruby)" | |
+task :test_pure => :clean do | |
+ ruby '-v -I lib tests/runner.rb' | |
+end | |
+ | |
+desc "Testing library (extension)" | |
+task :test_ext => :compile do | |
+ ruby '-v -I ext:lib tests/runner.rb' | |
+end | |
+ | |
+desc "Benchmarking parser (pure)" | |
+task :benchmark_parser_pure do | |
+ ruby '-I lib benchmarks/benchmark_parser.rb pure' | |
+end | |
+ | |
+desc "Benchmarking generator (pure)" | |
+task :benchmark_generator_pure do | |
+ ruby '-I lib benchmarks/benchmark_generator.rb pure' | |
+ ruby 'benchmarks/benchmark_rails.rb' | |
+end | |
+ | |
+desc "Benchmarking library (pure)" | |
+task :benchmark_pure => [ :benchmark_parser_pure, :benchmark_generator_pure ] | |
+ | |
+desc "Benchmarking parser (extension)" | |
+task :benchmark_parser_ext => :compile do | |
+ ruby '-I ext:lib benchmarks/benchmark_parser.rb ext' | |
+end | |
+ | |
+desc "Benchmarking generator (extension)" | |
+task :benchmark_generator_ext => :compile do | |
+ ruby '-I ext:lib benchmarks/benchmark_generator.rb ext' | |
+ ruby 'benchmarks/benchmark_rails.rb' | |
+end | |
+ | |
+desc "Benchmarking library (extension)" | |
+task :benchmark_ext => [ :benchmark_parser_ext, :benchmark_generator_ext ] | |
+ | |
+task :benchmark do | |
+ puts "Benchmarking extension variant" | |
+ Rake::Task[:benchmark_ext].invoke | |
+ puts "Benchmarking pure variant" | |
+ Rake::Task[:benchmark_pure].invoke | |
+end | |
+ | |
+desc "Testing library with coverage" # XXX broken | |
+task :coverage do | |
+ system 'RUBYOPT="" rcov -x tests -Ilib tests/runner.rb' | |
+end | |
+ | |
+desc "Create RDOC documentation" | |
+task :doc => [ :version, EXT_PARSER_SRC ] do | |
+ sh "rdoc -m JSON -S -o doc lib/json.rb #{FileList['lib/json/**/*.rb']} #{EXT_PARSER_SRC} #{EXT_GENERATOR_SRC}" | |
+end | |
+ | |
+if defined? Gem | |
+ spec_pure = Gem::Specification.new do |s| | |
+ s.name = 'json_pure' | |
+ s.version = PKG_VERSION | |
+ s.summary = "A JSON implementation in Ruby" | |
+ s.description = "" | |
+ | |
+ s.files = PKG_FILES | |
+ | |
+ s.require_path = 'lib' | |
+ | |
+ s.bindir = "bin" | |
+ s.executables = ["edit_json.rb"] | |
+ s.default_executable = "edit_json.rb" | |
+ | |
+ s.has_rdoc = true | |
+ s.rdoc_options << | |
+ '--title' << 'JSON -- A JSON implemention' << | |
+ '--main' << 'JSON' << '--line-numbers' | |
+ s.test_files << 'tests/runner.rb' | |
+ | |
+ s.author = "Florian Frank" | |
+ s.email = "[email protected]" | |
+ s.homepage = "http://json.rubyforge.org" | |
+ s.rubyforge_project = "json" | |
+ end | |
+ | |
+ Rake::GemPackageTask.new(spec_pure) do |pkg| | |
+ pkg.need_tar = true | |
+ pkg.package_files += PKG_FILES | |
+ end | |
+ | |
+ spec_ext = Gem::Specification.new do |s| | |
+ s.name = 'json' | |
+ s.version = PKG_VERSION | |
+ s.summary = "A JSON implementation as a Ruby extension" | |
+ s.description = "" | |
+ | |
+ s.files = PKG_FILES | |
+ | |
+ s.extensions << | |
+ "#{EXT_PARSER_DIR}/extconf.rb" << | |
+ "#{EXT_GENERATOR_DIR}/extconf.rb" | |
+ | |
+ s.require_path = EXT_ROOT_DIR | |
+ s.require_paths << 'ext' | |
+ s.require_paths << 'lib' | |
+ | |
+ s.bindir = "bin" | |
+ s.executables = ["edit_json.rb"] | |
+ s.default_executable = "edit_json.rb" | |
+ | |
+ s.has_rdoc = true | |
+ s.rdoc_options << | |
+ '--title' << 'JSON -- A JSON implemention' << | |
+ '--main' << 'JSON' << '--line-numbers' | |
+ s.test_files << 'tests/runner.rb' | |
+ | |
+ s.author = "Florian Frank" | |
+ s.email = "[email protected]" | |
+ s.homepage = "http://json.rubyforge.org" | |
+ s.rubyforge_project = "json" | |
+ end | |
+ | |
+ Rake::GemPackageTask.new(spec_ext) do |pkg| | |
+ pkg.need_tar = true | |
+ pkg.package_files += PKG_FILES | |
+ end | |
+ | |
+ task :package_win => :compile do | |
+ mkdir_p 'pkg' | |
+ spec_win_ext = Gem::Specification.new do |s| | |
+ s.name = 'json' | |
+ s.platform = Gem::Platform::WIN32 | |
+ s.version = PKG_VERSION | |
+ s.summary = "A JSON implementation as a Ruby extension" | |
+ s.description = "" | |
+ | |
+ s.files = PKG_FILES.to_a << | |
+ "#{EXT_ROOT_DIR}/parser.#{CONFIG['DLEXT']}" << | |
+ "#{EXT_ROOT_DIR}/generator.#{CONFIG['DLEXT']}" | |
+ | |
+ s.require_path = EXT_ROOT_DIR | |
+ s.require_paths << 'ext' | |
+ s.require_paths << 'lib' | |
+ | |
+ s.bindir = "bin" | |
+ s.executables = ["edit_json.rb", "prettify_json.rb"] | |
+ s.default_executable = "edit_json.rb" | |
+ | |
+ s.has_rdoc = true | |
+ s.rdoc_options << | |
+ '--title' << 'JSON -- A JSON implemention' << | |
+ '--main' << 'JSON' << '--line-numbers' | |
+ s.test_files << 'tests/runner.rb' | |
+ | |
+ s.author = "Florian Frank" | |
+ s.email = "[email protected]" | |
+ s.homepage = "http://json.rubyforge.org" | |
+ s.rubyforge_project = "json" | |
+ end | |
+ | |
+ gem_file = "json-#{spec_win_ext.version}-#{spec_win_ext.platform}.gem" | |
+ Gem::Builder.new(spec_win_ext).build | |
+ mv gem_file, 'pkg' | |
+ end | |
+end | |
+ | |
+task :mrproper => [ :ragel_clean, :clean ] do | |
+ for dir in [ EXT_PARSER_DIR, EXT_GENERATOR_DIR ] | |
+ cd(dir) { rm_f 'Makefile' } | |
+ end | |
+end | |
+ | |
+desc m = "Writing version information for #{PKG_VERSION}" | |
+task :version do | |
+ puts m | |
+ File.open(File.join('lib', 'json', 'version.rb'), 'w') do |v| | |
+ v.puts <<EOT | |
+module JSON | |
+ # JSON version | |
+ VERSION = '#{PKG_VERSION}' | |
+ VERSION_ARRAY = VERSION.split(/\\./).map { |x| x.to_i } # :nodoc: | |
+ VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc: | |
+ VERSION_MINOR = VERSION_ARRAY[1] # :nodoc: | |
+ VERSION_BUILD = VERSION_ARRAY[2] # :nodoc: | |
+ VARIANT_BINARY = #{!!ON_WINDOWS} | |
+end | |
+EOT | |
+ end | |
+end | |
+ | |
+if ON_WINDOWS | |
+ task :release => [ :version, :clean, :package_win ] | |
+else | |
+ task :release => [ :version, :mrproper, :package ] | |
+end | |
+ | |
+task :default => [ :version, :compile ] | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/TODO b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/TODO | |
new file mode 100644 | |
index 0000000..8b13789 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/TODO | |
@@ -0,0 +1 @@ | |
+ | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/VERSION b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/VERSION | |
new file mode 100644 | |
index 0000000..781dcb0 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/VERSION | |
@@ -0,0 +1 @@ | |
+1.1.3 | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/benchmarks/benchmark.txt b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/benchmarks/benchmark.txt | |
new file mode 100644 | |
index 0000000..255d9fc | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/benchmarks/benchmark.txt | |
@@ -0,0 +1,133 @@ | |
+(in /home/flori/json) | |
+Ragel Code Generator version 5.17 January 2007 | |
+Copyright (c) 2001-2006 by Adrian Thurston | |
+Benchmarking extension variant | |
+First run for warmup. | |
+Running 'BC_Parser' for a duration of 10 secs per method: | |
+ real total utime stime cutime cstime | |
+parser: 10.005944 10.010000 9.970000 0.040000 0.000000 0.000000 | |
+ 5091 508.591409 0.001966 | |
+ calls calls/sec secs/call | |
+-------------------------------------------------------------------------------- | |
+Running 'BC_Parser' for a duration of 10 secs per method: | |
+ real total utime stime cutime cstime | |
+parser: 10.006881 10.000000 9.950000 0.050000 0.000000 0.000000 | |
+ 5055 505.500000 0.001978 | |
+ calls calls/sec secs/call | |
+-------------------------------------------------------------------------------- | |
+================================================================================ | |
+ | |
+ | |
+[null,false,true,"f\u00d6\u00df\u00c4r",["n\u20acst\u20acd",true],{"quux":true,"foo\u00df":"b\u00e4r"}] | |
+First run for warmup. | |
+Running 'BC_Generator' for a duration of 10 secs per method: | |
+ real total utime stime cutime cstime | |
+generator_pretty: 10.001066 10.010000 9.990000 0.020000 0.000000 0.000000 | |
+ 3492 348.851149 0.002867 | |
+generator_fast : 10.002910 10.000000 9.980000 0.020000 0.000000 0.000000 | |
+ 5416 541.600000 0.001846 | |
+generator_safe : 10.003107 10.010000 10.000000 0.010000 0.000000 0.000000 | |
+ 4926 492.107892 0.002032 | |
+ calls calls/sec secs/call | |
+-------------------------------------------------------------------------------- | |
+Running 'BC_Generator' for a duration of 10 secs per method: | |
+ real total utime stime cutime cstime | |
+generator_pretty: 10.009150 10.010000 10.010000 0.000000 0.000000 0.000000 | |
+ 3511 350.749251 0.002851 | |
+generator_fast : 10.014407 10.020000 10.020000 0.000000 0.000000 0.000000 | |
+ 5411 540.019960 0.001852 | |
+generator_safe : 10.010055 10.000000 10.000000 0.000000 0.000000 0.000000 | |
+ 4933 493.300000 0.002027 | |
+ calls calls/sec secs/call | |
+-------------------------------------------------------------------------------- | |
+================================================================================ | |
+ | |
+Comparison in BC_Generator: | |
+ secs/call speed | |
+generator_fast : 0.002 -> 1.540x | |
+generator_safe : 0.002 -> 1.406x | |
+generator_pretty: 0.003 -> 1.000x | |
+-------------------------------------------------------------------------------- | |
+ | |
+[null, false, true, "f\u00d6\u00df\u00c4r", ["n\u20acst\u20acd", true], {quux: true, "foo\u00df": "b\u00e4r"}] | |
+First run for warmup. | |
+Running 'BC_Rails' for a duration of 10 secs per method: | |
+ real total utime stime cutime cstime | |
+generator: 10.026043 10.020000 10.020000 0.000000 0.000000 0.000000 | |
+ 239 23.852295 0.041925 | |
+ calls calls/sec secs/call | |
+-------------------------------------------------------------------------------- | |
+Running 'BC_Rails' for a duration of 10 secs per method: | |
+ real total utime stime cutime cstime | |
+generator: 10.010931 10.020000 10.010000 0.010000 0.000000 0.000000 | |
+ 238 23.752495 0.042101 | |
+ calls calls/sec secs/call | |
+-------------------------------------------------------------------------------- | |
+================================================================================ | |
+ | |
+ | |
+Benchmarking pure variant | |
+First run for warmup. | |
+Running 'BC_Parser' for a duration of 10 secs per method: | |
+ real total utime stime cutime cstime | |
+parser: 10.022352 10.030000 10.020000 0.010000 0.000000 0.000000 | |
+ 288 28.713858 0.034826 | |
+ calls calls/sec secs/call | |
+-------------------------------------------------------------------------------- | |
+Running 'BC_Parser' for a duration of 10 secs per method: | |
+ real total utime stime cutime cstime | |
+parser: 10.006552 10.000000 10.000000 0.000000 0.000000 0.000000 | |
+ 289 28.900000 0.034602 | |
+ calls calls/sec secs/call | |
+-------------------------------------------------------------------------------- | |
+================================================================================ | |
+ | |
+ | |
+[null,false,true,"f\u00d6\u00df\u00c4r",["n\u20acst\u20acd",true],{"quux":true,"foo\u00df":"b\u00e4r"}] | |
+First run for warmup. | |
+Running 'BC_Generator' for a duration of 10 secs per method: | |
+ real total utime stime cutime cstime | |
+generator_fast : 10.011644 10.010000 10.010000 0.000000 0.000000 0.000000 | |
+ 411 41.058941 0.024355 | |
+generator_safe : 10.007100 10.010000 10.010000 0.000000 0.000000 0.000000 | |
+ 352 35.164835 0.028438 | |
+generator_pretty: 10.008156 10.010000 10.010000 0.000000 0.000000 0.000000 | |
+ 341 34.065934 0.029355 | |
+ calls calls/sec secs/call | |
+-------------------------------------------------------------------------------- | |
+Running 'BC_Generator' for a duration of 10 secs per method: | |
+ real total utime stime cutime cstime | |
+generator_fast : 10.005185 10.010000 10.010000 0.000000 0.000000 0.000000 | |
+ 411 41.058941 0.024355 | |
+generator_safe : 10.006932 10.010000 10.010000 0.000000 0.000000 0.000000 | |
+ 351 35.064935 0.028519 | |
+generator_pretty: 10.007414 10.000000 10.000000 0.000000 0.000000 0.000000 | |
+ 340 34.000000 0.029412 | |
+ calls calls/sec secs/call | |
+-------------------------------------------------------------------------------- | |
+================================================================================ | |
+ | |
+Comparison in BC_Generator: | |
+ secs/call speed | |
+generator_fast : 0.024 -> 1.208x | |
+generator_safe : 0.029 -> 1.031x | |
+generator_pretty: 0.029 -> 1.000x | |
+-------------------------------------------------------------------------------- | |
+ | |
+[null, false, true, "f\u00d6\u00df\u00c4r", ["n\u20acst\u20acd", true], {quux: true, "foo\u00df": "b\u00e4r"}] | |
+First run for warmup. | |
+Running 'BC_Rails' for a duration of 10 secs per method: | |
+ real total utime stime cutime cstime | |
+generator: 10.005748 10.000000 10.000000 0.000000 0.000000 0.000000 | |
+ 240 24.000000 0.041667 | |
+ calls calls/sec secs/call | |
+-------------------------------------------------------------------------------- | |
+Running 'BC_Rails' for a duration of 10 secs per method: | |
+ real total utime stime cutime cstime | |
+generator: 10.006764 10.010000 10.010000 0.000000 0.000000 0.000000 | |
+ 239 23.876124 0.041883 | |
+ calls calls/sec secs/call | |
+-------------------------------------------------------------------------------- | |
+================================================================================ | |
+ | |
+ | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/benchmarks/benchmark_generator.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/benchmarks/benchmark_generator.rb | |
new file mode 100755 | |
index 0000000..f0173bc | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/benchmarks/benchmark_generator.rb | |
@@ -0,0 +1,48 @@ | |
+#!/usr/bin/env ruby | |
+ | |
+require 'bullshit' | |
+$KCODE='utf8' | |
+if ARGV.shift == 'pure' | |
+ require 'json/pure' | |
+else | |
+ require 'json/ext' | |
+end | |
+ | |
+class BC_Generator < Bullshit::TimeCase | |
+ include JSON | |
+ | |
+ warmup true | |
+ duration 10 | |
+ | |
+ def setup | |
+ a = [ nil, false, true, "fÖßÄr", [ "n€st€d", true ], { "fooß" => "bär", "quux" => true } ] | |
+ puts JSON[a] | |
+ @big = a * 100 | |
+ end | |
+ | |
+ def benchmark_generator_fast | |
+ @result = JSON.fast_generate(@big) | |
+ end | |
+ | |
+ def reset_benchmark_generator_fast | |
+ @result and @result.size > 2 + 6 * @big.size or raise @result.to_s | |
+ end | |
+ | |
+ def benchmark_generator_safe | |
+ @result = JSON.generate(@big) | |
+ end | |
+ | |
+ def reset_benchmark_generator_safe | |
+ @result and @result.size > 2 + 6 * @big.size or raise @result.to_s | |
+ end | |
+ | |
+ def benchmark_generator_pretty | |
+ @result = JSON.pretty_generate(@big) | |
+ end | |
+ | |
+ def reset_benchmark_generator_pretty | |
+ @result and @result.size > 2 + 6 * @big.size or raise @result.to_s | |
+ end | |
+ | |
+ compare :generator_fast, :generator_safe, :generator_pretty | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/benchmarks/benchmark_parser.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/benchmarks/benchmark_parser.rb | |
new file mode 100755 | |
index 0000000..486cbaa | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/benchmarks/benchmark_parser.rb | |
@@ -0,0 +1,26 @@ | |
+#!/usr/bin/env ruby | |
+ | |
+require 'bullshit' | |
+if ARGV.shift == 'pure' | |
+ require 'json/pure' | |
+else | |
+ require 'json/ext' | |
+end | |
+ | |
+class BC_Parser < Bullshit::TimeCase | |
+ include JSON | |
+ | |
+ warmup true | |
+ duration 10 | |
+ | |
+ def setup | |
+ a = [ nil, false, true, "fÖß\nÄr", [ "n€st€d", true ], { "fooß" => "bär", "qu\r\nux" => true } ] | |
+ @big = a * 100 | |
+ @json = JSON.generate(@big) | |
+ end | |
+ | |
+ def benchmark_parser | |
+ a = JSON.parse(@json) | |
+ a == @big or raise "not equal" | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/benchmarks/benchmark_rails.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/benchmarks/benchmark_rails.rb | |
new file mode 100755 | |
index 0000000..a9afb0d | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/benchmarks/benchmark_rails.rb | |
@@ -0,0 +1,26 @@ | |
+#!/usr/bin/env ruby | |
+ | |
+require 'bullshit' | |
+require 'active_support' | |
+ | |
+class BC_Rails < Bullshit::TimeCase | |
+ warmup true | |
+ duration 10 | |
+ | |
+ def setup | |
+ a = [ nil, false, true, "fÖßÄr", [ "n€st€d", true ], { "fooß" => "bär", "quux" => true } ] | |
+ puts a.to_json | |
+ @big = a * 100 | |
+ end | |
+ | |
+ def benchmark_generator | |
+ @result = @big.to_json | |
+ end | |
+ | |
+ def reset_benchmark_generator | |
+ @result and @result.size > 2 + 6 * @big.size or raise @result.to_s | |
+ if stack = Thread.current[:json_reference_stack] | |
+ stack.clear | |
+ end | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/bin/edit_json.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/bin/edit_json.rb | |
new file mode 100755 | |
index 0000000..076ed4a | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/bin/edit_json.rb | |
@@ -0,0 +1,10 @@ | |
+#!/usr/bin/env ruby | |
+$KCODE = 'U' | |
+require 'json/editor' | |
+ | |
+filename, encoding = ARGV | |
+JSON::Editor.start(encoding) do |window| | |
+ if filename | |
+ window.file_open(filename) | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/bin/prettify_json.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/bin/prettify_json.rb | |
new file mode 100755 | |
index 0000000..2f9a583 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/bin/prettify_json.rb | |
@@ -0,0 +1,76 @@ | |
+#!/usr/bin/env ruby | |
+ | |
+$KCODE = 'U' | |
+require 'json' | |
+require 'fileutils' | |
+include FileUtils | |
+ | |
+# Parses the argument array _args_, according to the pattern _s_, to | |
+# retrieve the single character command line options from it. If _s_ is | |
+# 'xy:' an option '-x' without an option argument is searched, and an | |
+# option '-y foo' with an option argument ('foo'). | |
+# | |
+# An option hash is returned with all found options set to true or the | |
+# found option argument. | |
+def go(s, args = ARGV) | |
+ b, v = s.scan(/(.)(:?)/).inject([{},{}]) { |t,(o,a)| | |
+ t[a.empty? ? 0 : 1][o] = a.empty? ? false : nil | |
+ t | |
+ } | |
+ while a = args.shift | |
+ a !~ /\A-(.+)/ and args.unshift a and break | |
+ p = $1 | |
+ until p == '' | |
+ o = p.slice!(0, 1) | |
+ if v.key?(o) | |
+ v[o] = if p == '' then args.shift or break 1 else p end | |
+ break | |
+ elsif b.key?(o) | |
+ b[o] = true | |
+ else | |
+ args.unshift a | |
+ break 1 | |
+ end | |
+ end and break | |
+ end | |
+ b.merge(v) | |
+end | |
+ | |
+opts = go 'slhi:', args = ARGV.dup | |
+if opts['h'] || opts['l'] && opts['s'] | |
+ puts <<EOT | |
+Usage: #{File.basename($0)} [OPTION] [FILE] | |
+ | |
+If FILE is skipped, this scripts waits for input from STDIN. Otherwise | |
+FILE is opened, read, and used as input for the prettifier. | |
+ | |
+OPTION can be | |
+ -s to output the shortest possible JSON (precludes -l) | |
+ -l to output a longer, better formatted JSON (precludes -s) | |
+ -i EXT prettifies FILE in place, saving a backup to FILE.EXT | |
+ -h this help | |
+EOT | |
+ exit 0 | |
+end | |
+ | |
+filename = nil | |
+json = JSON[ | |
+ if args.empty? | |
+ STDIN.read | |
+ else | |
+ File.read filename = args.first | |
+ end | |
+] | |
+ | |
+output = if opts['s'] | |
+ JSON.fast_generate json | |
+else # default is -l | |
+ JSON.pretty_generate json | |
+end | |
+ | |
+if opts['i'] && filename | |
+ cp filename, "#{filename}.#{opts['i']}" | |
+ File.open(filename, 'w') { |f| f.puts output } | |
+else | |
+ puts output | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/data/example.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/data/example.json | |
new file mode 100644 | |
index 0000000..88b4e82 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/data/example.json | |
@@ -0,0 +1 @@ | |
+{"a":2,"b":3.141,"TIME":"2007-03-14T11:52:40","c":"c","d":[1,"b",3.14],"COUNT":666,"e":{"foo":"bar"},"foo":"B\u00e4r","g":"\u677e\u672c\u884c\u5f18","h":1000.0,"bar":"\u00a9 \u2260 \u20ac!","i":0.001,"j":"\ud840\udc01"} | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/data/index.html b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/data/index.html | |
new file mode 100644 | |
index 0000000..abe6fdb | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/data/index.html | |
@@ -0,0 +1,38 @@ | |
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" | |
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> | |
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en"> | |
+ <head> | |
+ <title>Javascript Example</title> | |
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/> | |
+ <script src="prototype.js" type="text/javascript"></script> | |
+ </head> | |
+ | |
+ <body> | |
+ <h1>Fetching object from server</h1> | |
+ <div id="list"> | |
+ Wait...<br/> | |
+ <noscript><p>Switch on Javascript!</p></noscript> | |
+ </div> | |
+ <script type="text/javascript"> | |
+ <!-- | |
+ function pollJSON() { | |
+ new Ajax.Request('/json', | |
+ { | |
+ method: 'get', | |
+ onSuccess: function(transport) { | |
+ var response = transport.responseText || "no response text"; | |
+ response = eval("(" + response + ")"); | |
+ var text = ""; | |
+ for (var k in response) { | |
+ text = text + "<b>" + k + "</b>: " + response[k] + "<br/>" | |
+ } | |
+ $("list").update(text); | |
+ }, | |
+ onFailure: function() { alert('Something went wrong...') } | |
+ }); | |
+ } | |
+ new PeriodicalExecuter(pollJSON, 1); | |
+ --> | |
+ </script> | |
+ </body> | |
+</html> | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/data/prototype.js b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/data/prototype.js | |
new file mode 100644 | |
index 0000000..5c73462 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/data/prototype.js | |
@@ -0,0 +1,4184 @@ | |
+/* Prototype JavaScript framework, version 1.6.0 | |
+ * (c) 2005-2007 Sam Stephenson | |
+ * | |
+ * Prototype is freely distributable under the terms of an MIT-style license. | |
+ * For details, see the Prototype web site: http://www.prototypejs.org/ | |
+ * | |
+ *--------------------------------------------------------------------------*/ | |
+ | |
+var Prototype = { | |
+ Version: '1.6.0', | |
+ | |
+ Browser: { | |
+ IE: !!(window.attachEvent && !window.opera), | |
+ Opera: !!window.opera, | |
+ WebKit: navigator.userAgent.indexOf('AppleWebKit/') > -1, | |
+ Gecko: navigator.userAgent.indexOf('Gecko') > -1 && navigator.userAgent.indexOf('KHTML') == -1, | |
+ MobileSafari: !!navigator.userAgent.match(/Apple.*Mobile.*Safari/) | |
+ }, | |
+ | |
+ BrowserFeatures: { | |
+ XPath: !!document.evaluate, | |
+ ElementExtensions: !!window.HTMLElement, | |
+ SpecificElementExtensions: | |
+ document.createElement('div').__proto__ && | |
+ document.createElement('div').__proto__ !== | |
+ document.createElement('form').__proto__ | |
+ }, | |
+ | |
+ ScriptFragment: '<script[^>]*>([\\S\\s]*?)<\/script>', | |
+ JSONFilter: /^\/\*-secure-([\s\S]*)\*\/\s*$/, | |
+ | |
+ emptyFunction: function() { }, | |
+ K: function(x) { return x } | |
+}; | |
+ | |
+if (Prototype.Browser.MobileSafari) | |
+ Prototype.BrowserFeatures.SpecificElementExtensions = false; | |
+ | |
+if (Prototype.Browser.WebKit) | |
+ Prototype.BrowserFeatures.XPath = false; | |
+ | |
+/* Based on Alex Arnell's inheritance implementation. */ | |
+var Class = { | |
+ create: function() { | |
+ var parent = null, properties = $A(arguments); | |
+ if (Object.isFunction(properties[0])) | |
+ parent = properties.shift(); | |
+ | |
+ function klass() { | |
+ this.initialize.apply(this, arguments); | |
+ } | |
+ | |
+ Object.extend(klass, Class.Methods); | |
+ klass.superclass = parent; | |
+ klass.subclasses = []; | |
+ | |
+ if (parent) { | |
+ var subclass = function() { }; | |
+ subclass.prototype = parent.prototype; | |
+ klass.prototype = new subclass; | |
+ parent.subclasses.push(klass); | |
+ } | |
+ | |
+ for (var i = 0; i < properties.length; i++) | |
+ klass.addMethods(properties[i]); | |
+ | |
+ if (!klass.prototype.initialize) | |
+ klass.prototype.initialize = Prototype.emptyFunction; | |
+ | |
+ klass.prototype.constructor = klass; | |
+ | |
+ return klass; | |
+ } | |
+}; | |
+ | |
+Class.Methods = { | |
+ addMethods: function(source) { | |
+ var ancestor = this.superclass && this.superclass.prototype; | |
+ var properties = Object.keys(source); | |
+ | |
+ if (!Object.keys({ toString: true }).length) | |
+ properties.push("toString", "valueOf"); | |
+ | |
+ for (var i = 0, length = properties.length; i < length; i++) { | |
+ var property = properties[i], value = source[property]; | |
+ if (ancestor && Object.isFunction(value) && | |
+ value.argumentNames().first() == "$super") { | |
+ var method = value, value = Object.extend((function(m) { | |
+ return function() { return ancestor[m].apply(this, arguments) }; | |
+ })(property).wrap(method), { | |
+ valueOf: function() { return method }, | |
+ toString: function() { return method.toString() } | |
+ }); | |
+ } | |
+ this.prototype[property] = value; | |
+ } | |
+ | |
+ return this; | |
+ } | |
+}; | |
+ | |
+var Abstract = { }; | |
+ | |
+Object.extend = function(destination, source) { | |
+ for (var property in source) | |
+ destination[property] = source[property]; | |
+ return destination; | |
+}; | |
+ | |
+Object.extend(Object, { | |
+ inspect: function(object) { | |
+ try { | |
+ if (object === undefined) return 'undefined'; | |
+ if (object === null) return 'null'; | |
+ return object.inspect ? object.inspect() : object.toString(); | |
+ } catch (e) { | |
+ if (e instanceof RangeError) return '...'; | |
+ throw e; | |
+ } | |
+ }, | |
+ | |
+ toJSON: function(object) { | |
+ var type = typeof object; | |
+ switch (type) { | |
+ case 'undefined': | |
+ case 'function': | |
+ case 'unknown': return; | |
+ case 'boolean': return object.toString(); | |
+ } | |
+ | |
+ if (object === null) return 'null'; | |
+ if (object.toJSON) return object.toJSON(); | |
+ if (Object.isElement(object)) return; | |
+ | |
+ var results = []; | |
+ for (var property in object) { | |
+ var value = Object.toJSON(object[property]); | |
+ if (value !== undefined) | |
+ results.push(property.toJSON() + ': ' + value); | |
+ } | |
+ | |
+ return '{' + results.join(', ') + '}'; | |
+ }, | |
+ | |
+ toQueryString: function(object) { | |
+ return $H(object).toQueryString(); | |
+ }, | |
+ | |
+ toHTML: function(object) { | |
+ return object && object.toHTML ? object.toHTML() : String.interpret(object); | |
+ }, | |
+ | |
+ keys: function(object) { | |
+ var keys = []; | |
+ for (var property in object) | |
+ keys.push(property); | |
+ return keys; | |
+ }, | |
+ | |
+ values: function(object) { | |
+ var values = []; | |
+ for (var property in object) | |
+ values.push(object[property]); | |
+ return values; | |
+ }, | |
+ | |
+ clone: function(object) { | |
+ return Object.extend({ }, object); | |
+ }, | |
+ | |
+ isElement: function(object) { | |
+ return object && object.nodeType == 1; | |
+ }, | |
+ | |
+ isArray: function(object) { | |
+ return object && object.constructor === Array; | |
+ }, | |
+ | |
+ isHash: function(object) { | |
+ return object instanceof Hash; | |
+ }, | |
+ | |
+ isFunction: function(object) { | |
+ return typeof object == "function"; | |
+ }, | |
+ | |
+ isString: function(object) { | |
+ return typeof object == "string"; | |
+ }, | |
+ | |
+ isNumber: function(object) { | |
+ return typeof object == "number"; | |
+ }, | |
+ | |
+ isUndefined: function(object) { | |
+ return typeof object == "undefined"; | |
+ } | |
+}); | |
+ | |
+Object.extend(Function.prototype, { | |
+ argumentNames: function() { | |
+ var names = this.toString().match(/^[\s\(]*function[^(]*\((.*?)\)/)[1].split(",").invoke("strip"); | |
+ return names.length == 1 && !names[0] ? [] : names; | |
+ }, | |
+ | |
+ bind: function() { | |
+ if (arguments.length < 2 && arguments[0] === undefined) return this; | |
+ var __method = this, args = $A(arguments), object = args.shift(); | |
+ return function() { | |
+ return __method.apply(object, args.concat($A(arguments))); | |
+ } | |
+ }, | |
+ | |
+ bindAsEventListener: function() { | |
+ var __method = this, args = $A(arguments), object = args.shift(); | |
+ return function(event) { | |
+ return __method.apply(object, [event || window.event].concat(args)); | |
+ } | |
+ }, | |
+ | |
+ curry: function() { | |
+ if (!arguments.length) return this; | |
+ var __method = this, args = $A(arguments); | |
+ return function() { | |
+ return __method.apply(this, args.concat($A(arguments))); | |
+ } | |
+ }, | |
+ | |
+ delay: function() { | |
+ var __method = this, args = $A(arguments), timeout = args.shift() * 1000; | |
+ return window.setTimeout(function() { | |
+ return __method.apply(__method, args); | |
+ }, timeout); | |
+ }, | |
+ | |
+ wrap: function(wrapper) { | |
+ var __method = this; | |
+ return function() { | |
+ return wrapper.apply(this, [__method.bind(this)].concat($A(arguments))); | |
+ } | |
+ }, | |
+ | |
+ methodize: function() { | |
+ if (this._methodized) return this._methodized; | |
+ var __method = this; | |
+ return this._methodized = function() { | |
+ return __method.apply(null, [this].concat($A(arguments))); | |
+ }; | |
+ } | |
+}); | |
+ | |
+Function.prototype.defer = Function.prototype.delay.curry(0.01); | |
+ | |
+Date.prototype.toJSON = function() { | |
+ return '"' + this.getUTCFullYear() + '-' + | |
+ (this.getUTCMonth() + 1).toPaddedString(2) + '-' + | |
+ this.getUTCDate().toPaddedString(2) + 'T' + | |
+ this.getUTCHours().toPaddedString(2) + ':' + | |
+ this.getUTCMinutes().toPaddedString(2) + ':' + | |
+ this.getUTCSeconds().toPaddedString(2) + 'Z"'; | |
+}; | |
+ | |
+var Try = { | |
+ these: function() { | |
+ var returnValue; | |
+ | |
+ for (var i = 0, length = arguments.length; i < length; i++) { | |
+ var lambda = arguments[i]; | |
+ try { | |
+ returnValue = lambda(); | |
+ break; | |
+ } catch (e) { } | |
+ } | |
+ | |
+ return returnValue; | |
+ } | |
+}; | |
+ | |
+RegExp.prototype.match = RegExp.prototype.test; | |
+ | |
+RegExp.escape = function(str) { | |
+ return String(str).replace(/([.*+?^=!:${}()|[\]\/\\])/g, '\\$1'); | |
+}; | |
+ | |
+/*--------------------------------------------------------------------------*/ | |
+ | |
+var PeriodicalExecuter = Class.create({ | |
+ initialize: function(callback, frequency) { | |
+ this.callback = callback; | |
+ this.frequency = frequency; | |
+ this.currentlyExecuting = false; | |
+ | |
+ this.registerCallback(); | |
+ }, | |
+ | |
+ registerCallback: function() { | |
+ this.timer = setInterval(this.onTimerEvent.bind(this), this.frequency * 1000); | |
+ }, | |
+ | |
+ execute: function() { | |
+ this.callback(this); | |
+ }, | |
+ | |
+ stop: function() { | |
+ if (!this.timer) return; | |
+ clearInterval(this.timer); | |
+ this.timer = null; | |
+ }, | |
+ | |
+ onTimerEvent: function() { | |
+ if (!this.currentlyExecuting) { | |
+ try { | |
+ this.currentlyExecuting = true; | |
+ this.execute(); | |
+ } finally { | |
+ this.currentlyExecuting = false; | |
+ } | |
+ } | |
+ } | |
+}); | |
+Object.extend(String, { | |
+ interpret: function(value) { | |
+ return value == null ? '' : String(value); | |
+ }, | |
+ specialChar: { | |
+ '\b': '\\b', | |
+ '\t': '\\t', | |
+ '\n': '\\n', | |
+ '\f': '\\f', | |
+ '\r': '\\r', | |
+ '\\': '\\\\' | |
+ } | |
+}); | |
+ | |
+Object.extend(String.prototype, { | |
+ gsub: function(pattern, replacement) { | |
+ var result = '', source = this, match; | |
+ replacement = arguments.callee.prepareReplacement(replacement); | |
+ | |
+ while (source.length > 0) { | |
+ if (match = source.match(pattern)) { | |
+ result += source.slice(0, match.index); | |
+ result += String.interpret(replacement(match)); | |
+ source = source.slice(match.index + match[0].length); | |
+ } else { | |
+ result += source, source = ''; | |
+ } | |
+ } | |
+ return result; | |
+ }, | |
+ | |
+ sub: function(pattern, replacement, count) { | |
+ replacement = this.gsub.prepareReplacement(replacement); | |
+ count = count === undefined ? 1 : count; | |
+ | |
+ return this.gsub(pattern, function(match) { | |
+ if (--count < 0) return match[0]; | |
+ return replacement(match); | |
+ }); | |
+ }, | |
+ | |
+ scan: function(pattern, iterator) { | |
+ this.gsub(pattern, iterator); | |
+ return String(this); | |
+ }, | |
+ | |
+ truncate: function(length, truncation) { | |
+ length = length || 30; | |
+ truncation = truncation === undefined ? '...' : truncation; | |
+ return this.length > length ? | |
+ this.slice(0, length - truncation.length) + truncation : String(this); | |
+ }, | |
+ | |
+ strip: function() { | |
+ return this.replace(/^\s+/, '').replace(/\s+$/, ''); | |
+ }, | |
+ | |
+ stripTags: function() { | |
+ return this.replace(/<\/?[^>]+>/gi, ''); | |
+ }, | |
+ | |
+ stripScripts: function() { | |
+ return this.replace(new RegExp(Prototype.ScriptFragment, 'img'), ''); | |
+ }, | |
+ | |
+ extractScripts: function() { | |
+ var matchAll = new RegExp(Prototype.ScriptFragment, 'img'); | |
+ var matchOne = new RegExp(Prototype.ScriptFragment, 'im'); | |
+ return (this.match(matchAll) || []).map(function(scriptTag) { | |
+ return (scriptTag.match(matchOne) || ['', ''])[1]; | |
+ }); | |
+ }, | |
+ | |
+ evalScripts: function() { | |
+ return this.extractScripts().map(function(script) { return eval(script) }); | |
+ }, | |
+ | |
+ escapeHTML: function() { | |
+ var self = arguments.callee; | |
+ self.text.data = this; | |
+ return self.div.innerHTML; | |
+ }, | |
+ | |
+ unescapeHTML: function() { | |
+ var div = new Element('div'); | |
+ div.innerHTML = this.stripTags(); | |
+ return div.childNodes[0] ? (div.childNodes.length > 1 ? | |
+ $A(div.childNodes).inject('', function(memo, node) { return memo+node.nodeValue }) : | |
+ div.childNodes[0].nodeValue) : ''; | |
+ }, | |
+ | |
+ toQueryParams: function(separator) { | |
+ var match = this.strip().match(/([^?#]*)(#.*)?$/); | |
+ if (!match) return { }; | |
+ | |
+ return match[1].split(separator || '&').inject({ }, function(hash, pair) { | |
+ if ((pair = pair.split('='))[0]) { | |
+ var key = decodeURIComponent(pair.shift()); | |
+ var value = pair.length > 1 ? pair.join('=') : pair[0]; | |
+ if (value != undefined) value = decodeURIComponent(value); | |
+ | |
+ if (key in hash) { | |
+ if (!Object.isArray(hash[key])) hash[key] = [hash[key]]; | |
+ hash[key].push(value); | |
+ } | |
+ else hash[key] = value; | |
+ } | |
+ return hash; | |
+ }); | |
+ }, | |
+ | |
+ toArray: function() { | |
+ return this.split(''); | |
+ }, | |
+ | |
+ succ: function() { | |
+ return this.slice(0, this.length - 1) + | |
+ String.fromCharCode(this.charCodeAt(this.length - 1) + 1); | |
+ }, | |
+ | |
+ times: function(count) { | |
+ return count < 1 ? '' : new Array(count + 1).join(this); | |
+ }, | |
+ | |
+ camelize: function() { | |
+ var parts = this.split('-'), len = parts.length; | |
+ if (len == 1) return parts[0]; | |
+ | |
+ var camelized = this.charAt(0) == '-' | |
+ ? parts[0].charAt(0).toUpperCase() + parts[0].substring(1) | |
+ : parts[0]; | |
+ | |
+ for (var i = 1; i < len; i++) | |
+ camelized += parts[i].charAt(0).toUpperCase() + parts[i].substring(1); | |
+ | |
+ return camelized; | |
+ }, | |
+ | |
+ capitalize: function() { | |
+ return this.charAt(0).toUpperCase() + this.substring(1).toLowerCase(); | |
+ }, | |
+ | |
+ underscore: function() { | |
+ return this.gsub(/::/, '/').gsub(/([A-Z]+)([A-Z][a-z])/,'#{1}_#{2}').gsub(/([a-z\d])([A-Z])/,'#{1}_#{2}').gsub(/-/,'_').toLowerCase(); | |
+ }, | |
+ | |
+ dasherize: function() { | |
+ return this.gsub(/_/,'-'); | |
+ }, | |
+ | |
+ inspect: function(useDoubleQuotes) { | |
+ var escapedString = this.gsub(/[\x00-\x1f\\]/, function(match) { | |
+ var character = String.specialChar[match[0]]; | |
+ return character ? character : '\\u00' + match[0].charCodeAt().toPaddedString(2, 16); | |
+ }); | |
+ if (useDoubleQuotes) return '"' + escapedString.replace(/"/g, '\\"') + '"'; | |
+ return "'" + escapedString.replace(/'/g, '\\\'') + "'"; | |
+ }, | |
+ | |
+ toJSON: function() { | |
+ return this.inspect(true); | |
+ }, | |
+ | |
+ unfilterJSON: function(filter) { | |
+ return this.sub(filter || Prototype.JSONFilter, '#{1}'); | |
+ }, | |
+ | |
+ isJSON: function() { | |
+ var str = this.replace(/\\./g, '@').replace(/"[^"\\\n\r]*"/g, ''); | |
+ return (/^[,:{}\[\]0-9.\-+Eaeflnr-u \n\r\t]*$/).test(str); | |
+ }, | |
+ | |
+ evalJSON: function(sanitize) { | |
+ var json = this.unfilterJSON(); | |
+ try { | |
+ if (!sanitize || json.isJSON()) return eval('(' + json + ')'); | |
+ } catch (e) { } | |
+ throw new SyntaxError('Badly formed JSON string: ' + this.inspect()); | |
+ }, | |
+ | |
+ include: function(pattern) { | |
+ return this.indexOf(pattern) > -1; | |
+ }, | |
+ | |
+ startsWith: function(pattern) { | |
+ return this.indexOf(pattern) === 0; | |
+ }, | |
+ | |
+ endsWith: function(pattern) { | |
+ var d = this.length - pattern.length; | |
+ return d >= 0 && this.lastIndexOf(pattern) === d; | |
+ }, | |
+ | |
+ empty: function() { | |
+ return this == ''; | |
+ }, | |
+ | |
+ blank: function() { | |
+ return /^\s*$/.test(this); | |
+ }, | |
+ | |
+ interpolate: function(object, pattern) { | |
+ return new Template(this, pattern).evaluate(object); | |
+ } | |
+}); | |
+ | |
+if (Prototype.Browser.WebKit || Prototype.Browser.IE) Object.extend(String.prototype, { | |
+ escapeHTML: function() { | |
+ return this.replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>'); | |
+ }, | |
+ unescapeHTML: function() { | |
+ return this.replace(/&/g,'&').replace(/</g,'<').replace(/>/g,'>'); | |
+ } | |
+}); | |
+ | |
+String.prototype.gsub.prepareReplacement = function(replacement) { | |
+ if (Object.isFunction(replacement)) return replacement; | |
+ var template = new Template(replacement); | |
+ return function(match) { return template.evaluate(match) }; | |
+}; | |
+ | |
+String.prototype.parseQuery = String.prototype.toQueryParams; | |
+ | |
+Object.extend(String.prototype.escapeHTML, { | |
+ div: document.createElement('div'), | |
+ text: document.createTextNode('') | |
+}); | |
+ | |
+with (String.prototype.escapeHTML) div.appendChild(text); | |
+ | |
+var Template = Class.create({ | |
+ initialize: function(template, pattern) { | |
+ this.template = template.toString(); | |
+ this.pattern = pattern || Template.Pattern; | |
+ }, | |
+ | |
+ evaluate: function(object) { | |
+ if (Object.isFunction(object.toTemplateReplacements)) | |
+ object = object.toTemplateReplacements(); | |
+ | |
+ return this.template.gsub(this.pattern, function(match) { | |
+ if (object == null) return ''; | |
+ | |
+ var before = match[1] || ''; | |
+ if (before == '\\') return match[2]; | |
+ | |
+ var ctx = object, expr = match[3]; | |
+ var pattern = /^([^.[]+|\[((?:.*?[^\\])?)\])(\.|\[|$)/, match = pattern.exec(expr); | |
+ if (match == null) return before; | |
+ | |
+ while (match != null) { | |
+ var comp = match[1].startsWith('[') ? match[2].gsub('\\\\]', ']') : match[1]; | |
+ ctx = ctx[comp]; | |
+ if (null == ctx || '' == match[3]) break; | |
+ expr = expr.substring('[' == match[3] ? match[1].length : match[0].length); | |
+ match = pattern.exec(expr); | |
+ } | |
+ | |
+ return before + String.interpret(ctx); | |
+ }.bind(this)); | |
+ } | |
+}); | |
+Template.Pattern = /(^|.|\r|\n)(#\{(.*?)\})/; | |
+ | |
+var $break = { }; | |
+ | |
+var Enumerable = { | |
+ each: function(iterator, context) { | |
+ var index = 0; | |
+ iterator = iterator.bind(context); | |
+ try { | |
+ this._each(function(value) { | |
+ iterator(value, index++); | |
+ }); | |
+ } catch (e) { | |
+ if (e != $break) throw e; | |
+ } | |
+ return this; | |
+ }, | |
+ | |
+ eachSlice: function(number, iterator, context) { | |
+ iterator = iterator ? iterator.bind(context) : Prototype.K; | |
+ var index = -number, slices = [], array = this.toArray(); | |
+ while ((index += number) < array.length) | |
+ slices.push(array.slice(index, index+number)); | |
+ return slices.collect(iterator, context); | |
+ }, | |
+ | |
+ all: function(iterator, context) { | |
+ iterator = iterator ? iterator.bind(context) : Prototype.K; | |
+ var result = true; | |
+ this.each(function(value, index) { | |
+ result = result && !!iterator(value, index); | |
+ if (!result) throw $break; | |
+ }); | |
+ return result; | |
+ }, | |
+ | |
+ any: function(iterator, context) { | |
+ iterator = iterator ? iterator.bind(context) : Prototype.K; | |
+ var result = false; | |
+ this.each(function(value, index) { | |
+ if (result = !!iterator(value, index)) | |
+ throw $break; | |
+ }); | |
+ return result; | |
+ }, | |
+ | |
+ collect: function(iterator, context) { | |
+ iterator = iterator ? iterator.bind(context) : Prototype.K; | |
+ var results = []; | |
+ this.each(function(value, index) { | |
+ results.push(iterator(value, index)); | |
+ }); | |
+ return results; | |
+ }, | |
+ | |
+ detect: function(iterator, context) { | |
+ iterator = iterator.bind(context); | |
+ var result; | |
+ this.each(function(value, index) { | |
+ if (iterator(value, index)) { | |
+ result = value; | |
+ throw $break; | |
+ } | |
+ }); | |
+ return result; | |
+ }, | |
+ | |
+ findAll: function(iterator, context) { | |
+ iterator = iterator.bind(context); | |
+ var results = []; | |
+ this.each(function(value, index) { | |
+ if (iterator(value, index)) | |
+ results.push(value); | |
+ }); | |
+ return results; | |
+ }, | |
+ | |
+ grep: function(filter, iterator, context) { | |
+ iterator = iterator ? iterator.bind(context) : Prototype.K; | |
+ var results = []; | |
+ | |
+ if (Object.isString(filter)) | |
+ filter = new RegExp(filter); | |
+ | |
+ this.each(function(value, index) { | |
+ if (filter.match(value)) | |
+ results.push(iterator(value, index)); | |
+ }); | |
+ return results; | |
+ }, | |
+ | |
+ include: function(object) { | |
+ if (Object.isFunction(this.indexOf)) | |
+ if (this.indexOf(object) != -1) return true; | |
+ | |
+ var found = false; | |
+ this.each(function(value) { | |
+ if (value == object) { | |
+ found = true; | |
+ throw $break; | |
+ } | |
+ }); | |
+ return found; | |
+ }, | |
+ | |
+ inGroupsOf: function(number, fillWith) { | |
+ fillWith = fillWith === undefined ? null : fillWith; | |
+ return this.eachSlice(number, function(slice) { | |
+ while(slice.length < number) slice.push(fillWith); | |
+ return slice; | |
+ }); | |
+ }, | |
+ | |
+ inject: function(memo, iterator, context) { | |
+ iterator = iterator.bind(context); | |
+ this.each(function(value, index) { | |
+ memo = iterator(memo, value, index); | |
+ }); | |
+ return memo; | |
+ }, | |
+ | |
+ invoke: function(method) { | |
+ var args = $A(arguments).slice(1); | |
+ return this.map(function(value) { | |
+ return value[method].apply(value, args); | |
+ }); | |
+ }, | |
+ | |
+ max: function(iterator, context) { | |
+ iterator = iterator ? iterator.bind(context) : Prototype.K; | |
+ var result; | |
+ this.each(function(value, index) { | |
+ value = iterator(value, index); | |
+ if (result == undefined || value >= result) | |
+ result = value; | |
+ }); | |
+ return result; | |
+ }, | |
+ | |
+ min: function(iterator, context) { | |
+ iterator = iterator ? iterator.bind(context) : Prototype.K; | |
+ var result; | |
+ this.each(function(value, index) { | |
+ value = iterator(value, index); | |
+ if (result == undefined || value < result) | |
+ result = value; | |
+ }); | |
+ return result; | |
+ }, | |
+ | |
+ partition: function(iterator, context) { | |
+ iterator = iterator ? iterator.bind(context) : Prototype.K; | |
+ var trues = [], falses = []; | |
+ this.each(function(value, index) { | |
+ (iterator(value, index) ? | |
+ trues : falses).push(value); | |
+ }); | |
+ return [trues, falses]; | |
+ }, | |
+ | |
+ pluck: function(property) { | |
+ var results = []; | |
+ this.each(function(value) { | |
+ results.push(value[property]); | |
+ }); | |
+ return results; | |
+ }, | |
+ | |
+ reject: function(iterator, context) { | |
+ iterator = iterator.bind(context); | |
+ var results = []; | |
+ this.each(function(value, index) { | |
+ if (!iterator(value, index)) | |
+ results.push(value); | |
+ }); | |
+ return results; | |
+ }, | |
+ | |
+ sortBy: function(iterator, context) { | |
+ iterator = iterator.bind(context); | |
+ return this.map(function(value, index) { | |
+ return {value: value, criteria: iterator(value, index)}; | |
+ }).sort(function(left, right) { | |
+ var a = left.criteria, b = right.criteria; | |
+ return a < b ? -1 : a > b ? 1 : 0; | |
+ }).pluck('value'); | |
+ }, | |
+ | |
+ toArray: function() { | |
+ return this.map(); | |
+ }, | |
+ | |
+ zip: function() { | |
+ var iterator = Prototype.K, args = $A(arguments); | |
+ if (Object.isFunction(args.last())) | |
+ iterator = args.pop(); | |
+ | |
+ var collections = [this].concat(args).map($A); | |
+ return this.map(function(value, index) { | |
+ return iterator(collections.pluck(index)); | |
+ }); | |
+ }, | |
+ | |
+ size: function() { | |
+ return this.toArray().length; | |
+ }, | |
+ | |
+ inspect: function() { | |
+ return '#<Enumerable:' + this.toArray().inspect() + '>'; | |
+ } | |
+}; | |
+ | |
+Object.extend(Enumerable, { | |
+ map: Enumerable.collect, | |
+ find: Enumerable.detect, | |
+ select: Enumerable.findAll, | |
+ filter: Enumerable.findAll, | |
+ member: Enumerable.include, | |
+ entries: Enumerable.toArray, | |
+ every: Enumerable.all, | |
+ some: Enumerable.any | |
+}); | |
+function $A(iterable) { | |
+ if (!iterable) return []; | |
+ if (iterable.toArray) return iterable.toArray(); | |
+ var length = iterable.length, results = new Array(length); | |
+ while (length--) results[length] = iterable[length]; | |
+ return results; | |
+} | |
+ | |
+if (Prototype.Browser.WebKit) { | |
+ function $A(iterable) { | |
+ if (!iterable) return []; | |
+ if (!(Object.isFunction(iterable) && iterable == '[object NodeList]') && | |
+ iterable.toArray) return iterable.toArray(); | |
+ var length = iterable.length, results = new Array(length); | |
+ while (length--) results[length] = iterable[length]; | |
+ return results; | |
+ } | |
+} | |
+ | |
+Array.from = $A; | |
+ | |
+Object.extend(Array.prototype, Enumerable); | |
+ | |
+if (!Array.prototype._reverse) Array.prototype._reverse = Array.prototype.reverse; | |
+ | |
+Object.extend(Array.prototype, { | |
+ _each: function(iterator) { | |
+ for (var i = 0, length = this.length; i < length; i++) | |
+ iterator(this[i]); | |
+ }, | |
+ | |
+ clear: function() { | |
+ this.length = 0; | |
+ return this; | |
+ }, | |
+ | |
+ first: function() { | |
+ return this[0]; | |
+ }, | |
+ | |
+ last: function() { | |
+ return this[this.length - 1]; | |
+ }, | |
+ | |
+ compact: function() { | |
+ return this.select(function(value) { | |
+ return value != null; | |
+ }); | |
+ }, | |
+ | |
+ flatten: function() { | |
+ return this.inject([], function(array, value) { | |
+ return array.concat(Object.isArray(value) ? | |
+ value.flatten() : [value]); | |
+ }); | |
+ }, | |
+ | |
+ without: function() { | |
+ var values = $A(arguments); | |
+ return this.select(function(value) { | |
+ return !values.include(value); | |
+ }); | |
+ }, | |
+ | |
+ reverse: function(inline) { | |
+ return (inline !== false ? this : this.toArray())._reverse(); | |
+ }, | |
+ | |
+ reduce: function() { | |
+ return this.length > 1 ? this : this[0]; | |
+ }, | |
+ | |
+ uniq: function(sorted) { | |
+ return this.inject([], function(array, value, index) { | |
+ if (0 == index || (sorted ? array.last() != value : !array.include(value))) | |
+ array.push(value); | |
+ return array; | |
+ }); | |
+ }, | |
+ | |
+ intersect: function(array) { | |
+ return this.uniq().findAll(function(item) { | |
+ return array.detect(function(value) { return item === value }); | |
+ }); | |
+ }, | |
+ | |
+ clone: function() { | |
+ return [].concat(this); | |
+ }, | |
+ | |
+ size: function() { | |
+ return this.length; | |
+ }, | |
+ | |
+ inspect: function() { | |
+ return '[' + this.map(Object.inspect).join(', ') + ']'; | |
+ }, | |
+ | |
+ toJSON: function() { | |
+ var results = []; | |
+ this.each(function(object) { | |
+ var value = Object.toJSON(object); | |
+ if (value !== undefined) results.push(value); | |
+ }); | |
+ return '[' + results.join(', ') + ']'; | |
+ } | |
+}); | |
+ | |
+// use native browser JS 1.6 implementation if available | |
+if (Object.isFunction(Array.prototype.forEach)) | |
+ Array.prototype._each = Array.prototype.forEach; | |
+ | |
+if (!Array.prototype.indexOf) Array.prototype.indexOf = function(item, i) { | |
+ i || (i = 0); | |
+ var length = this.length; | |
+ if (i < 0) i = length + i; | |
+ for (; i < length; i++) | |
+ if (this[i] === item) return i; | |
+ return -1; | |
+}; | |
+ | |
+if (!Array.prototype.lastIndexOf) Array.prototype.lastIndexOf = function(item, i) { | |
+ i = isNaN(i) ? this.length : (i < 0 ? this.length + i : i) + 1; | |
+ var n = this.slice(0, i).reverse().indexOf(item); | |
+ return (n < 0) ? n : i - n - 1; | |
+}; | |
+ | |
+Array.prototype.toArray = Array.prototype.clone; | |
+ | |
+function $w(string) { | |
+ if (!Object.isString(string)) return []; | |
+ string = string.strip(); | |
+ return string ? string.split(/\s+/) : []; | |
+} | |
+ | |
+if (Prototype.Browser.Opera){ | |
+ Array.prototype.concat = function() { | |
+ var array = []; | |
+ for (var i = 0, length = this.length; i < length; i++) array.push(this[i]); | |
+ for (var i = 0, length = arguments.length; i < length; i++) { | |
+ if (Object.isArray(arguments[i])) { | |
+ for (var j = 0, arrayLength = arguments[i].length; j < arrayLength; j++) | |
+ array.push(arguments[i][j]); | |
+ } else { | |
+ array.push(arguments[i]); | |
+ } | |
+ } | |
+ return array; | |
+ }; | |
+} | |
+Object.extend(Number.prototype, { | |
+ toColorPart: function() { | |
+ return this.toPaddedString(2, 16); | |
+ }, | |
+ | |
+ succ: function() { | |
+ return this + 1; | |
+ }, | |
+ | |
+ times: function(iterator) { | |
+ $R(0, this, true).each(iterator); | |
+ return this; | |
+ }, | |
+ | |
+ toPaddedString: function(length, radix) { | |
+ var string = this.toString(radix || 10); | |
+ return '0'.times(length - string.length) + string; | |
+ }, | |
+ | |
+ toJSON: function() { | |
+ return isFinite(this) ? this.toString() : 'null'; | |
+ } | |
+}); | |
+ | |
+$w('abs round ceil floor').each(function(method){ | |
+ Number.prototype[method] = Math[method].methodize(); | |
+}); | |
+function $H(object) { | |
+ return new Hash(object); | |
+}; | |
+ | |
+var Hash = Class.create(Enumerable, (function() { | |
+ if (function() { | |
+ var i = 0, Test = function(value) { this.key = value }; | |
+ Test.prototype.key = 'foo'; | |
+ for (var property in new Test('bar')) i++; | |
+ return i > 1; | |
+ }()) { | |
+ function each(iterator) { | |
+ var cache = []; | |
+ for (var key in this._object) { | |
+ var value = this._object[key]; | |
+ if (cache.include(key)) continue; | |
+ cache.push(key); | |
+ var pair = [key, value]; | |
+ pair.key = key; | |
+ pair.value = value; | |
+ iterator(pair); | |
+ } | |
+ } | |
+ } else { | |
+ function each(iterator) { | |
+ for (var key in this._object) { | |
+ var value = this._object[key], pair = [key, value]; | |
+ pair.key = key; | |
+ pair.value = value; | |
+ iterator(pair); | |
+ } | |
+ } | |
+ } | |
+ | |
+ function toQueryPair(key, value) { | |
+ if (Object.isUndefined(value)) return key; | |
+ return key + '=' + encodeURIComponent(String.interpret(value)); | |
+ } | |
+ | |
+ return { | |
+ initialize: function(object) { | |
+ this._object = Object.isHash(object) ? object.toObject() : Object.clone(object); | |
+ }, | |
+ | |
+ _each: each, | |
+ | |
+ set: function(key, value) { | |
+ return this._object[key] = value; | |
+ }, | |
+ | |
+ get: function(key) { | |
+ return this._object[key]; | |
+ }, | |
+ | |
+ unset: function(key) { | |
+ var value = this._object[key]; | |
+ delete this._object[key]; | |
+ return value; | |
+ }, | |
+ | |
+ toObject: function() { | |
+ return Object.clone(this._object); | |
+ }, | |
+ | |
+ keys: function() { | |
+ return this.pluck('key'); | |
+ }, | |
+ | |
+ values: function() { | |
+ return this.pluck('value'); | |
+ }, | |
+ | |
+ index: function(value) { | |
+ var match = this.detect(function(pair) { | |
+ return pair.value === value; | |
+ }); | |
+ return match && match.key; | |
+ }, | |
+ | |
+ merge: function(object) { | |
+ return this.clone().update(object); | |
+ }, | |
+ | |
+ update: function(object) { | |
+ return new Hash(object).inject(this, function(result, pair) { | |
+ result.set(pair.key, pair.value); | |
+ return result; | |
+ }); | |
+ }, | |
+ | |
+ toQueryString: function() { | |
+ return this.map(function(pair) { | |
+ var key = encodeURIComponent(pair.key), values = pair.value; | |
+ | |
+ if (values && typeof values == 'object') { | |
+ if (Object.isArray(values)) | |
+ return values.map(toQueryPair.curry(key)).join('&'); | |
+ } | |
+ return toQueryPair(key, values); | |
+ }).join('&'); | |
+ }, | |
+ | |
+ inspect: function() { | |
+ return '#<Hash:{' + this.map(function(pair) { | |
+ return pair.map(Object.inspect).join(': '); | |
+ }).join(', ') + '}>'; | |
+ }, | |
+ | |
+ toJSON: function() { | |
+ return Object.toJSON(this.toObject()); | |
+ }, | |
+ | |
+ clone: function() { | |
+ return new Hash(this); | |
+ } | |
+ } | |
+})()); | |
+ | |
+Hash.prototype.toTemplateReplacements = Hash.prototype.toObject; | |
+Hash.from = $H; | |
+var ObjectRange = Class.create(Enumerable, { | |
+ initialize: function(start, end, exclusive) { | |
+ this.start = start; | |
+ this.end = end; | |
+ this.exclusive = exclusive; | |
+ }, | |
+ | |
+ _each: function(iterator) { | |
+ var value = this.start; | |
+ while (this.include(value)) { | |
+ iterator(value); | |
+ value = value.succ(); | |
+ } | |
+ }, | |
+ | |
+ include: function(value) { | |
+ if (value < this.start) | |
+ return false; | |
+ if (this.exclusive) | |
+ return value < this.end; | |
+ return value <= this.end; | |
+ } | |
+}); | |
+ | |
+var $R = function(start, end, exclusive) { | |
+ return new ObjectRange(start, end, exclusive); | |
+}; | |
+ | |
+var Ajax = { | |
+ getTransport: function() { | |
+ return Try.these( | |
+ function() {return new XMLHttpRequest()}, | |
+ function() {return new ActiveXObject('Msxml2.XMLHTTP')}, | |
+ function() {return new ActiveXObject('Microsoft.XMLHTTP')} | |
+ ) || false; | |
+ }, | |
+ | |
+ activeRequestCount: 0 | |
+}; | |
+ | |
+Ajax.Responders = { | |
+ responders: [], | |
+ | |
+ _each: function(iterator) { | |
+ this.responders._each(iterator); | |
+ }, | |
+ | |
+ register: function(responder) { | |
+ if (!this.include(responder)) | |
+ this.responders.push(responder); | |
+ }, | |
+ | |
+ unregister: function(responder) { | |
+ this.responders = this.responders.without(responder); | |
+ }, | |
+ | |
+ dispatch: function(callback, request, transport, json) { | |
+ this.each(function(responder) { | |
+ if (Object.isFunction(responder[callback])) { | |
+ try { | |
+ responder[callback].apply(responder, [request, transport, json]); | |
+ } catch (e) { } | |
+ } | |
+ }); | |
+ } | |
+}; | |
+ | |
+Object.extend(Ajax.Responders, Enumerable); | |
+ | |
+Ajax.Responders.register({ | |
+ onCreate: function() { Ajax.activeRequestCount++ }, | |
+ onComplete: function() { Ajax.activeRequestCount-- } | |
+}); | |
+ | |
+Ajax.Base = Class.create({ | |
+ initialize: function(options) { | |
+ this.options = { | |
+ method: 'post', | |
+ asynchronous: true, | |
+ contentType: 'application/x-www-form-urlencoded', | |
+ encoding: 'UTF-8', | |
+ parameters: '', | |
+ evalJSON: true, | |
+ evalJS: true | |
+ }; | |
+ Object.extend(this.options, options || { }); | |
+ | |
+ this.options.method = this.options.method.toLowerCase(); | |
+ if (Object.isString(this.options.parameters)) | |
+ this.options.parameters = this.options.parameters.toQueryParams(); | |
+ } | |
+}); | |
+ | |
+Ajax.Request = Class.create(Ajax.Base, { | |
+ _complete: false, | |
+ | |
+ initialize: function($super, url, options) { | |
+ $super(options); | |
+ this.transport = Ajax.getTransport(); | |
+ this.request(url); | |
+ }, | |
+ | |
+ request: function(url) { | |
+ this.url = url; | |
+ this.method = this.options.method; | |
+ var params = Object.clone(this.options.parameters); | |
+ | |
+ if (!['get', 'post'].include(this.method)) { | |
+ // simulate other verbs over post | |
+ params['_method'] = this.method; | |
+ this.method = 'post'; | |
+ } | |
+ | |
+ this.parameters = params; | |
+ | |
+ if (params = Object.toQueryString(params)) { | |
+ // when GET, append parameters to URL | |
+ if (this.method == 'get') | |
+ this.url += (this.url.include('?') ? '&' : '?') + params; | |
+ else if (/Konqueror|Safari|KHTML/.test(navigator.userAgent)) | |
+ params += '&_='; | |
+ } | |
+ | |
+ try { | |
+ var response = new Ajax.Response(this); | |
+ if (this.options.onCreate) this.options.onCreate(response); | |
+ Ajax.Responders.dispatch('onCreate', this, response); | |
+ | |
+ this.transport.open(this.method.toUpperCase(), this.url, | |
+ this.options.asynchronous); | |
+ | |
+ if (this.options.asynchronous) this.respondToReadyState.bind(this).defer(1); | |
+ | |
+ this.transport.onreadystatechange = this.onStateChange.bind(this); | |
+ this.setRequestHeaders(); | |
+ | |
+ this.body = this.method == 'post' ? (this.options.postBody || params) : null; | |
+ this.transport.send(this.body); | |
+ | |
+ /* Force Firefox to handle ready state 4 for synchronous requests */ | |
+ if (!this.options.asynchronous && this.transport.overrideMimeType) | |
+ this.onStateChange(); | |
+ | |
+ } | |
+ catch (e) { | |
+ this.dispatchException(e); | |
+ } | |
+ }, | |
+ | |
+ onStateChange: function() { | |
+ var readyState = this.transport.readyState; | |
+ if (readyState > 1 && !((readyState == 4) && this._complete)) | |
+ this.respondToReadyState(this.transport.readyState); | |
+ }, | |
+ | |
+ setRequestHeaders: function() { | |
+ var headers = { | |
+ 'X-Requested-With': 'XMLHttpRequest', | |
+ 'X-Prototype-Version': Prototype.Version, | |
+ 'Accept': 'text/javascript, text/html, application/xml, text/xml, */*' | |
+ }; | |
+ | |
+ if (this.method == 'post') { | |
+ headers['Content-type'] = this.options.contentType + | |
+ (this.options.encoding ? '; charset=' + this.options.encoding : ''); | |
+ | |
+ /* Force "Connection: close" for older Mozilla browsers to work | |
+ * around a bug where XMLHttpRequest sends an incorrect | |
+ * Content-length header. See Mozilla Bugzilla #246651. | |
+ */ | |
+ if (this.transport.overrideMimeType && | |
+ (navigator.userAgent.match(/Gecko\/(\d{4})/) || [0,2005])[1] < 2005) | |
+ headers['Connection'] = 'close'; | |
+ } | |
+ | |
+ // user-defined headers | |
+ if (typeof this.options.requestHeaders == 'object') { | |
+ var extras = this.options.requestHeaders; | |
+ | |
+ if (Object.isFunction(extras.push)) | |
+ for (var i = 0, length = extras.length; i < length; i += 2) | |
+ headers[extras[i]] = extras[i+1]; | |
+ else | |
+ $H(extras).each(function(pair) { headers[pair.key] = pair.value }); | |
+ } | |
+ | |
+ for (var name in headers) | |
+ this.transport.setRequestHeader(name, headers[name]); | |
+ }, | |
+ | |
+ success: function() { | |
+ var status = this.getStatus(); | |
+ return !status || (status >= 200 && status < 300); | |
+ }, | |
+ | |
+ getStatus: function() { | |
+ try { | |
+ return this.transport.status || 0; | |
+ } catch (e) { return 0 } | |
+ }, | |
+ | |
+ respondToReadyState: function(readyState) { | |
+ var state = Ajax.Request.Events[readyState], response = new Ajax.Response(this); | |
+ | |
+ if (state == 'Complete') { | |
+ try { | |
+ this._complete = true; | |
+ (this.options['on' + response.status] | |
+ || this.options['on' + (this.success() ? 'Success' : 'Failure')] | |
+ || Prototype.emptyFunction)(response, response.headerJSON); | |
+ } catch (e) { | |
+ this.dispatchException(e); | |
+ } | |
+ | |
+ var contentType = response.getHeader('Content-type'); | |
+ if (this.options.evalJS == 'force' | |
+ || (this.options.evalJS && contentType | |
+ && contentType.match(/^\s*(text|application)\/(x-)?(java|ecma)script(;.*)?\s*$/i))) | |
+ this.evalResponse(); | |
+ } | |
+ | |
+ try { | |
+ (this.options['on' + state] || Prototype.emptyFunction)(response, response.headerJSON); | |
+ Ajax.Responders.dispatch('on' + state, this, response, response.headerJSON); | |
+ } catch (e) { | |
+ this.dispatchException(e); | |
+ } | |
+ | |
+ if (state == 'Complete') { | |
+ // avoid memory leak in MSIE: clean up | |
+ this.transport.onreadystatechange = Prototype.emptyFunction; | |
+ } | |
+ }, | |
+ | |
+ getHeader: function(name) { | |
+ try { | |
+ return this.transport.getResponseHeader(name); | |
+ } catch (e) { return null } | |
+ }, | |
+ | |
+ evalResponse: function() { | |
+ try { | |
+ return eval((this.transport.responseText || '').unfilterJSON()); | |
+ } catch (e) { | |
+ this.dispatchException(e); | |
+ } | |
+ }, | |
+ | |
+ dispatchException: function(exception) { | |
+ (this.options.onException || Prototype.emptyFunction)(this, exception); | |
+ Ajax.Responders.dispatch('onException', this, exception); | |
+ } | |
+}); | |
+ | |
+Ajax.Request.Events = | |
+ ['Uninitialized', 'Loading', 'Loaded', 'Interactive', 'Complete']; | |
+ | |
+Ajax.Response = Class.create({ | |
+ initialize: function(request){ | |
+ this.request = request; | |
+ var transport = this.transport = request.transport, | |
+ readyState = this.readyState = transport.readyState; | |
+ | |
+ if((readyState > 2 && !Prototype.Browser.IE) || readyState == 4) { | |
+ this.status = this.getStatus(); | |
+ this.statusText = this.getStatusText(); | |
+ this.responseText = String.interpret(transport.responseText); | |
+ this.headerJSON = this._getHeaderJSON(); | |
+ } | |
+ | |
+ if(readyState == 4) { | |
+ var xml = transport.responseXML; | |
+ this.responseXML = xml === undefined ? null : xml; | |
+ this.responseJSON = this._getResponseJSON(); | |
+ } | |
+ }, | |
+ | |
+ status: 0, | |
+ statusText: '', | |
+ | |
+ getStatus: Ajax.Request.prototype.getStatus, | |
+ | |
+ getStatusText: function() { | |
+ try { | |
+ return this.transport.statusText || ''; | |
+ } catch (e) { return '' } | |
+ }, | |
+ | |
+ getHeader: Ajax.Request.prototype.getHeader, | |
+ | |
+ getAllHeaders: function() { | |
+ try { | |
+ return this.getAllResponseHeaders(); | |
+ } catch (e) { return null } | |
+ }, | |
+ | |
+ getResponseHeader: function(name) { | |
+ return this.transport.getResponseHeader(name); | |
+ }, | |
+ | |
+ getAllResponseHeaders: function() { | |
+ return this.transport.getAllResponseHeaders(); | |
+ }, | |
+ | |
+ _getHeaderJSON: function() { | |
+ var json = this.getHeader('X-JSON'); | |
+ if (!json) return null; | |
+ json = decodeURIComponent(escape(json)); | |
+ try { | |
+ return json.evalJSON(this.request.options.sanitizeJSON); | |
+ } catch (e) { | |
+ this.request.dispatchException(e); | |
+ } | |
+ }, | |
+ | |
+ _getResponseJSON: function() { | |
+ var options = this.request.options; | |
+ if (!options.evalJSON || (options.evalJSON != 'force' && | |
+ !(this.getHeader('Content-type') || '').include('application/json'))) | |
+ return null; | |
+ try { | |
+ return this.transport.responseText.evalJSON(options.sanitizeJSON); | |
+ } catch (e) { | |
+ this.request.dispatchException(e); | |
+ } | |
+ } | |
+}); | |
+ | |
+Ajax.Updater = Class.create(Ajax.Request, { | |
+ initialize: function($super, container, url, options) { | |
+ this.container = { | |
+ success: (container.success || container), | |
+ failure: (container.failure || (container.success ? null : container)) | |
+ }; | |
+ | |
+ options = options || { }; | |
+ var onComplete = options.onComplete; | |
+ options.onComplete = (function(response, param) { | |
+ this.updateContent(response.responseText); | |
+ if (Object.isFunction(onComplete)) onComplete(response, param); | |
+ }).bind(this); | |
+ | |
+ $super(url, options); | |
+ }, | |
+ | |
+ updateContent: function(responseText) { | |
+ var receiver = this.container[this.success() ? 'success' : 'failure'], | |
+ options = this.options; | |
+ | |
+ if (!options.evalScripts) responseText = responseText.stripScripts(); | |
+ | |
+ if (receiver = $(receiver)) { | |
+ if (options.insertion) { | |
+ if (Object.isString(options.insertion)) { | |
+ var insertion = { }; insertion[options.insertion] = responseText; | |
+ receiver.insert(insertion); | |
+ } | |
+ else options.insertion(receiver, responseText); | |
+ } | |
+ else receiver.update(responseText); | |
+ } | |
+ | |
+ if (this.success()) { | |
+ if (this.onComplete) this.onComplete.bind(this).defer(); | |
+ } | |
+ } | |
+}); | |
+ | |
+Ajax.PeriodicalUpdater = Class.create(Ajax.Base, { | |
+ initialize: function($super, container, url, options) { | |
+ $super(options); | |
+ this.onComplete = this.options.onComplete; | |
+ | |
+ this.frequency = (this.options.frequency || 2); | |
+ this.decay = (this.options.decay || 1); | |
+ | |
+ this.updater = { }; | |
+ this.container = container; | |
+ this.url = url; | |
+ | |
+ this.start(); | |
+ }, | |
+ | |
+ start: function() { | |
+ this.options.onComplete = this.updateComplete.bind(this); | |
+ this.onTimerEvent(); | |
+ }, | |
+ | |
+ stop: function() { | |
+ this.updater.options.onComplete = undefined; | |
+ clearTimeout(this.timer); | |
+ (this.onComplete || Prototype.emptyFunction).apply(this, arguments); | |
+ }, | |
+ | |
+ updateComplete: function(response) { | |
+ if (this.options.decay) { | |
+ this.decay = (response.responseText == this.lastText ? | |
+ this.decay * this.options.decay : 1); | |
+ | |
+ this.lastText = response.responseText; | |
+ } | |
+ this.timer = this.onTimerEvent.bind(this).delay(this.decay * this.frequency); | |
+ }, | |
+ | |
+ onTimerEvent: function() { | |
+ this.updater = new Ajax.Updater(this.container, this.url, this.options); | |
+ } | |
+}); | |
+function $(element) { | |
+ if (arguments.length > 1) { | |
+ for (var i = 0, elements = [], length = arguments.length; i < length; i++) | |
+ elements.push($(arguments[i])); | |
+ return elements; | |
+ } | |
+ if (Object.isString(element)) | |
+ element = document.getElementById(element); | |
+ return Element.extend(element); | |
+} | |
+ | |
+if (Prototype.BrowserFeatures.XPath) { | |
+ document._getElementsByXPath = function(expression, parentElement) { | |
+ var results = []; | |
+ var query = document.evaluate(expression, $(parentElement) || document, | |
+ null, XPathResult.ORDERED_NODE_SNAPSHOT_TYPE, null); | |
+ for (var i = 0, length = query.snapshotLength; i < length; i++) | |
+ results.push(Element.extend(query.snapshotItem(i))); | |
+ return results; | |
+ }; | |
+} | |
+ | |
+/*--------------------------------------------------------------------------*/ | |
+ | |
+if (!window.Node) var Node = { }; | |
+ | |
+if (!Node.ELEMENT_NODE) { | |
+ // DOM level 2 ECMAScript Language Binding | |
+ Object.extend(Node, { | |
+ ELEMENT_NODE: 1, | |
+ ATTRIBUTE_NODE: 2, | |
+ TEXT_NODE: 3, | |
+ CDATA_SECTION_NODE: 4, | |
+ ENTITY_REFERENCE_NODE: 5, | |
+ ENTITY_NODE: 6, | |
+ PROCESSING_INSTRUCTION_NODE: 7, | |
+ COMMENT_NODE: 8, | |
+ DOCUMENT_NODE: 9, | |
+ DOCUMENT_TYPE_NODE: 10, | |
+ DOCUMENT_FRAGMENT_NODE: 11, | |
+ NOTATION_NODE: 12 | |
+ }); | |
+} | |
+ | |
+(function() { | |
+ var element = this.Element; | |
+ this.Element = function(tagName, attributes) { | |
+ attributes = attributes || { }; | |
+ tagName = tagName.toLowerCase(); | |
+ var cache = Element.cache; | |
+ if (Prototype.Browser.IE && attributes.name) { | |
+ tagName = '<' + tagName + ' name="' + attributes.name + '">'; | |
+ delete attributes.name; | |
+ return Element.writeAttribute(document.createElement(tagName), attributes); | |
+ } | |
+ if (!cache[tagName]) cache[tagName] = Element.extend(document.createElement(tagName)); | |
+ return Element.writeAttribute(cache[tagName].cloneNode(false), attributes); | |
+ }; | |
+ Object.extend(this.Element, element || { }); | |
+}).call(window); | |
+ | |
+Element.cache = { }; | |
+ | |
+Element.Methods = { | |
+ visible: function(element) { | |
+ return $(element).style.display != 'none'; | |
+ }, | |
+ | |
+ toggle: function(element) { | |
+ element = $(element); | |
+ Element[Element.visible(element) ? 'hide' : 'show'](element); | |
+ return element; | |
+ }, | |
+ | |
+ hide: function(element) { | |
+ $(element).style.display = 'none'; | |
+ return element; | |
+ }, | |
+ | |
+ show: function(element) { | |
+ $(element).style.display = ''; | |
+ return element; | |
+ }, | |
+ | |
+ remove: function(element) { | |
+ element = $(element); | |
+ element.parentNode.removeChild(element); | |
+ return element; | |
+ }, | |
+ | |
+ update: function(element, content) { | |
+ element = $(element); | |
+ if (content && content.toElement) content = content.toElement(); | |
+ if (Object.isElement(content)) return element.update().insert(content); | |
+ content = Object.toHTML(content); | |
+ element.innerHTML = content.stripScripts(); | |
+ content.evalScripts.bind(content).defer(); | |
+ return element; | |
+ }, | |
+ | |
+ replace: function(element, content) { | |
+ element = $(element); | |
+ if (content && content.toElement) content = content.toElement(); | |
+ else if (!Object.isElement(content)) { | |
+ content = Object.toHTML(content); | |
+ var range = element.ownerDocument.createRange(); | |
+ range.selectNode(element); | |
+ content.evalScripts.bind(content).defer(); | |
+ content = range.createContextualFragment(content.stripScripts()); | |
+ } | |
+ element.parentNode.replaceChild(content, element); | |
+ return element; | |
+ }, | |
+ | |
+ insert: function(element, insertions) { | |
+ element = $(element); | |
+ | |
+ if (Object.isString(insertions) || Object.isNumber(insertions) || | |
+ Object.isElement(insertions) || (insertions && (insertions.toElement || insertions.toHTML))) | |
+ insertions = {bottom:insertions}; | |
+ | |
+ var content, t, range; | |
+ | |
+ for (position in insertions) { | |
+ content = insertions[position]; | |
+ position = position.toLowerCase(); | |
+ t = Element._insertionTranslations[position]; | |
+ | |
+ if (content && content.toElement) content = content.toElement(); | |
+ if (Object.isElement(content)) { | |
+ t.insert(element, content); | |
+ continue; | |
+ } | |
+ | |
+ content = Object.toHTML(content); | |
+ | |
+ range = element.ownerDocument.createRange(); | |
+ t.initializeRange(element, range); | |
+ t.insert(element, range.createContextualFragment(content.stripScripts())); | |
+ | |
+ content.evalScripts.bind(content).defer(); | |
+ } | |
+ | |
+ return element; | |
+ }, | |
+ | |
+ wrap: function(element, wrapper, attributes) { | |
+ element = $(element); | |
+ if (Object.isElement(wrapper)) | |
+ $(wrapper).writeAttribute(attributes || { }); | |
+ else if (Object.isString(wrapper)) wrapper = new Element(wrapper, attributes); | |
+ else wrapper = new Element('div', wrapper); | |
+ if (element.parentNode) | |
+ element.parentNode.replaceChild(wrapper, element); | |
+ wrapper.appendChild(element); | |
+ return wrapper; | |
+ }, | |
+ | |
+ inspect: function(element) { | |
+ element = $(element); | |
+ var result = '<' + element.tagName.toLowerCase(); | |
+ $H({'id': 'id', 'className': 'class'}).each(function(pair) { | |
+ var property = pair.first(), attribute = pair.last(); | |
+ var value = (element[property] || '').toString(); | |
+ if (value) result += ' ' + attribute + '=' + value.inspect(true); | |
+ }); | |
+ return result + '>'; | |
+ }, | |
+ | |
+ recursivelyCollect: function(element, property) { | |
+ element = $(element); | |
+ var elements = []; | |
+ while (element = element[property]) | |
+ if (element.nodeType == 1) | |
+ elements.push(Element.extend(element)); | |
+ return elements; | |
+ }, | |
+ | |
+ ancestors: function(element) { | |
+ return $(element).recursivelyCollect('parentNode'); | |
+ }, | |
+ | |
+ descendants: function(element) { | |
+ return $A($(element).getElementsByTagName('*')).each(Element.extend); | |
+ }, | |
+ | |
+ firstDescendant: function(element) { | |
+ element = $(element).firstChild; | |
+ while (element && element.nodeType != 1) element = element.nextSibling; | |
+ return $(element); | |
+ }, | |
+ | |
+ immediateDescendants: function(element) { | |
+ if (!(element = $(element).firstChild)) return []; | |
+ while (element && element.nodeType != 1) element = element.nextSibling; | |
+ if (element) return [element].concat($(element).nextSiblings()); | |
+ return []; | |
+ }, | |
+ | |
+ previousSiblings: function(element) { | |
+ return $(element).recursivelyCollect('previousSibling'); | |
+ }, | |
+ | |
+ nextSiblings: function(element) { | |
+ return $(element).recursivelyCollect('nextSibling'); | |
+ }, | |
+ | |
+ siblings: function(element) { | |
+ element = $(element); | |
+ return element.previousSiblings().reverse().concat(element.nextSiblings()); | |
+ }, | |
+ | |
+ match: function(element, selector) { | |
+ if (Object.isString(selector)) | |
+ selector = new Selector(selector); | |
+ return selector.match($(element)); | |
+ }, | |
+ | |
+ up: function(element, expression, index) { | |
+ element = $(element); | |
+ if (arguments.length == 1) return $(element.parentNode); | |
+ var ancestors = element.ancestors(); | |
+ return expression ? Selector.findElement(ancestors, expression, index) : | |
+ ancestors[index || 0]; | |
+ }, | |
+ | |
+ down: function(element, expression, index) { | |
+ element = $(element); | |
+ if (arguments.length == 1) return element.firstDescendant(); | |
+ var descendants = element.descendants(); | |
+ return expression ? Selector.findElement(descendants, expression, index) : | |
+ descendants[index || 0]; | |
+ }, | |
+ | |
+ previous: function(element, expression, index) { | |
+ element = $(element); | |
+ if (arguments.length == 1) return $(Selector.handlers.previousElementSibling(element)); | |
+ var previousSiblings = element.previousSiblings(); | |
+ return expression ? Selector.findElement(previousSiblings, expression, index) : | |
+ previousSiblings[index || 0]; | |
+ }, | |
+ | |
+ next: function(element, expression, index) { | |
+ element = $(element); | |
+ if (arguments.length == 1) return $(Selector.handlers.nextElementSibling(element)); | |
+ var nextSiblings = element.nextSiblings(); | |
+ return expression ? Selector.findElement(nextSiblings, expression, index) : | |
+ nextSiblings[index || 0]; | |
+ }, | |
+ | |
+ select: function() { | |
+ var args = $A(arguments), element = $(args.shift()); | |
+ return Selector.findChildElements(element, args); | |
+ }, | |
+ | |
+ adjacent: function() { | |
+ var args = $A(arguments), element = $(args.shift()); | |
+ return Selector.findChildElements(element.parentNode, args).without(element); | |
+ }, | |
+ | |
+ identify: function(element) { | |
+ element = $(element); | |
+ var id = element.readAttribute('id'), self = arguments.callee; | |
+ if (id) return id; | |
+ do { id = 'anonymous_element_' + self.counter++ } while ($(id)); | |
+ element.writeAttribute('id', id); | |
+ return id; | |
+ }, | |
+ | |
+ readAttribute: function(element, name) { | |
+ element = $(element); | |
+ if (Prototype.Browser.IE) { | |
+ var t = Element._attributeTranslations.read; | |
+ if (t.values[name]) return t.values[name](element, name); | |
+ if (t.names[name]) name = t.names[name]; | |
+ if (name.include(':')) { | |
+ return (!element.attributes || !element.attributes[name]) ? null : | |
+ element.attributes[name].value; | |
+ } | |
+ } | |
+ return element.getAttribute(name); | |
+ }, | |
+ | |
+ writeAttribute: function(element, name, value) { | |
+ element = $(element); | |
+ var attributes = { }, t = Element._attributeTranslations.write; | |
+ | |
+ if (typeof name == 'object') attributes = name; | |
+ else attributes[name] = value === undefined ? true : value; | |
+ | |
+ for (var attr in attributes) { | |
+ var name = t.names[attr] || attr, value = attributes[attr]; | |
+ if (t.values[attr]) name = t.values[attr](element, value); | |
+ if (value === false || value === null) | |
+ element.removeAttribute(name); | |
+ else if (value === true) | |
+ element.setAttribute(name, name); | |
+ else element.setAttribute(name, value); | |
+ } | |
+ return element; | |
+ }, | |
+ | |
+ getHeight: function(element) { | |
+ return $(element).getDimensions().height; | |
+ }, | |
+ | |
+ getWidth: function(element) { | |
+ return $(element).getDimensions().width; | |
+ }, | |
+ | |
+ classNames: function(element) { | |
+ return new Element.ClassNames(element); | |
+ }, | |
+ | |
+ hasClassName: function(element, className) { | |
+ if (!(element = $(element))) return; | |
+ var elementClassName = element.className; | |
+ return (elementClassName.length > 0 && (elementClassName == className || | |
+ new RegExp("(^|\\s)" + className + "(\\s|$)").test(elementClassName))); | |
+ }, | |
+ | |
+ addClassName: function(element, className) { | |
+ if (!(element = $(element))) return; | |
+ if (!element.hasClassName(className)) | |
+ element.className += (element.className ? ' ' : '') + className; | |
+ return element; | |
+ }, | |
+ | |
+ removeClassName: function(element, className) { | |
+ if (!(element = $(element))) return; | |
+ element.className = element.className.replace( | |
+ new RegExp("(^|\\s+)" + className + "(\\s+|$)"), ' ').strip(); | |
+ return element; | |
+ }, | |
+ | |
+ toggleClassName: function(element, className) { | |
+ if (!(element = $(element))) return; | |
+ return element[element.hasClassName(className) ? | |
+ 'removeClassName' : 'addClassName'](className); | |
+ }, | |
+ | |
+ // removes whitespace-only text node children | |
+ cleanWhitespace: function(element) { | |
+ element = $(element); | |
+ var node = element.firstChild; | |
+ while (node) { | |
+ var nextNode = node.nextSibling; | |
+ if (node.nodeType == 3 && !/\S/.test(node.nodeValue)) | |
+ element.removeChild(node); | |
+ node = nextNode; | |
+ } | |
+ return element; | |
+ }, | |
+ | |
+ empty: function(element) { | |
+ return $(element).innerHTML.blank(); | |
+ }, | |
+ | |
+ descendantOf: function(element, ancestor) { | |
+ element = $(element), ancestor = $(ancestor); | |
+ | |
+ if (element.compareDocumentPosition) | |
+ return (element.compareDocumentPosition(ancestor) & 8) === 8; | |
+ | |
+ if (element.sourceIndex && !Prototype.Browser.Opera) { | |
+ var e = element.sourceIndex, a = ancestor.sourceIndex, | |
+ nextAncestor = ancestor.nextSibling; | |
+ if (!nextAncestor) { | |
+ do { ancestor = ancestor.parentNode; } | |
+ while (!(nextAncestor = ancestor.nextSibling) && ancestor.parentNode); | |
+ } | |
+ if (nextAncestor) return (e > a && e < nextAncestor.sourceIndex); | |
+ } | |
+ | |
+ while (element = element.parentNode) | |
+ if (element == ancestor) return true; | |
+ return false; | |
+ }, | |
+ | |
+ scrollTo: function(element) { | |
+ element = $(element); | |
+ var pos = element.cumulativeOffset(); | |
+ window.scrollTo(pos[0], pos[1]); | |
+ return element; | |
+ }, | |
+ | |
+ getStyle: function(element, style) { | |
+ element = $(element); | |
+ style = style == 'float' ? 'cssFloat' : style.camelize(); | |
+ var value = element.style[style]; | |
+ if (!value) { | |
+ var css = document.defaultView.getComputedStyle(element, null); | |
+ value = css ? css[style] : null; | |
+ } | |
+ if (style == 'opacity') return value ? parseFloat(value) : 1.0; | |
+ return value == 'auto' ? null : value; | |
+ }, | |
+ | |
+ getOpacity: function(element) { | |
+ return $(element).getStyle('opacity'); | |
+ }, | |
+ | |
+ setStyle: function(element, styles) { | |
+ element = $(element); | |
+ var elementStyle = element.style, match; | |
+ if (Object.isString(styles)) { | |
+ element.style.cssText += ';' + styles; | |
+ return styles.include('opacity') ? | |
+ element.setOpacity(styles.match(/opacity:\s*(\d?\.?\d*)/)[1]) : element; | |
+ } | |
+ for (var property in styles) | |
+ if (property == 'opacity') element.setOpacity(styles[property]); | |
+ else | |
+ elementStyle[(property == 'float' || property == 'cssFloat') ? | |
+ (elementStyle.styleFloat === undefined ? 'cssFloat' : 'styleFloat') : | |
+ property] = styles[property]; | |
+ | |
+ return element; | |
+ }, | |
+ | |
+ setOpacity: function(element, value) { | |
+ element = $(element); | |
+ element.style.opacity = (value == 1 || value === '') ? '' : | |
+ (value < 0.00001) ? 0 : value; | |
+ return element; | |
+ }, | |
+ | |
+ getDimensions: function(element) { | |
+ element = $(element); | |
+ var display = $(element).getStyle('display'); | |
+ if (display != 'none' && display != null) // Safari bug | |
+ return {width: element.offsetWidth, height: element.offsetHeight}; | |
+ | |
+ // All *Width and *Height properties give 0 on elements with display none, | |
+ // so enable the element temporarily | |
+ var els = element.style; | |
+ var originalVisibility = els.visibility; | |
+ var originalPosition = els.position; | |
+ var originalDisplay = els.display; | |
+ els.visibility = 'hidden'; | |
+ els.position = 'absolute'; | |
+ els.display = 'block'; | |
+ var originalWidth = element.clientWidth; | |
+ var originalHeight = element.clientHeight; | |
+ els.display = originalDisplay; | |
+ els.position = originalPosition; | |
+ els.visibility = originalVisibility; | |
+ return {width: originalWidth, height: originalHeight}; | |
+ }, | |
+ | |
+ makePositioned: function(element) { | |
+ element = $(element); | |
+ var pos = Element.getStyle(element, 'position'); | |
+ if (pos == 'static' || !pos) { | |
+ element._madePositioned = true; | |
+ element.style.position = 'relative'; | |
+ // Opera returns the offset relative to the positioning context, when an | |
+ // element is position relative but top and left have not been defined | |
+ if (window.opera) { | |
+ element.style.top = 0; | |
+ element.style.left = 0; | |
+ } | |
+ } | |
+ return element; | |
+ }, | |
+ | |
+ undoPositioned: function(element) { | |
+ element = $(element); | |
+ if (element._madePositioned) { | |
+ element._madePositioned = undefined; | |
+ element.style.position = | |
+ element.style.top = | |
+ element.style.left = | |
+ element.style.bottom = | |
+ element.style.right = ''; | |
+ } | |
+ return element; | |
+ }, | |
+ | |
+ makeClipping: function(element) { | |
+ element = $(element); | |
+ if (element._overflow) return element; | |
+ element._overflow = Element.getStyle(element, 'overflow') || 'auto'; | |
+ if (element._overflow !== 'hidden') | |
+ element.style.overflow = 'hidden'; | |
+ return element; | |
+ }, | |
+ | |
+ undoClipping: function(element) { | |
+ element = $(element); | |
+ if (!element._overflow) return element; | |
+ element.style.overflow = element._overflow == 'auto' ? '' : element._overflow; | |
+ element._overflow = null; | |
+ return element; | |
+ }, | |
+ | |
+ cumulativeOffset: function(element) { | |
+ var valueT = 0, valueL = 0; | |
+ do { | |
+ valueT += element.offsetTop || 0; | |
+ valueL += element.offsetLeft || 0; | |
+ element = element.offsetParent; | |
+ } while (element); | |
+ return Element._returnOffset(valueL, valueT); | |
+ }, | |
+ | |
+ positionedOffset: function(element) { | |
+ var valueT = 0, valueL = 0; | |
+ do { | |
+ valueT += element.offsetTop || 0; | |
+ valueL += element.offsetLeft || 0; | |
+ element = element.offsetParent; | |
+ if (element) { | |
+ if (element.tagName == 'BODY') break; | |
+ var p = Element.getStyle(element, 'position'); | |
+ if (p == 'relative' || p == 'absolute') break; | |
+ } | |
+ } while (element); | |
+ return Element._returnOffset(valueL, valueT); | |
+ }, | |
+ | |
+ absolutize: function(element) { | |
+ element = $(element); | |
+ if (element.getStyle('position') == 'absolute') return; | |
+ // Position.prepare(); // To be done manually by Scripty when it needs it. | |
+ | |
+ var offsets = element.positionedOffset(); | |
+ var top = offsets[1]; | |
+ var left = offsets[0]; | |
+ var width = element.clientWidth; | |
+ var height = element.clientHeight; | |
+ | |
+ element._originalLeft = left - parseFloat(element.style.left || 0); | |
+ element._originalTop = top - parseFloat(element.style.top || 0); | |
+ element._originalWidth = element.style.width; | |
+ element._originalHeight = element.style.height; | |
+ | |
+ element.style.position = 'absolute'; | |
+ element.style.top = top + 'px'; | |
+ element.style.left = left + 'px'; | |
+ element.style.width = width + 'px'; | |
+ element.style.height = height + 'px'; | |
+ return element; | |
+ }, | |
+ | |
+ relativize: function(element) { | |
+ element = $(element); | |
+ if (element.getStyle('position') == 'relative') return; | |
+ // Position.prepare(); // To be done manually by Scripty when it needs it. | |
+ | |
+ element.style.position = 'relative'; | |
+ var top = parseFloat(element.style.top || 0) - (element._originalTop || 0); | |
+ var left = parseFloat(element.style.left || 0) - (element._originalLeft || 0); | |
+ | |
+ element.style.top = top + 'px'; | |
+ element.style.left = left + 'px'; | |
+ element.style.height = element._originalHeight; | |
+ element.style.width = element._originalWidth; | |
+ return element; | |
+ }, | |
+ | |
+ cumulativeScrollOffset: function(element) { | |
+ var valueT = 0, valueL = 0; | |
+ do { | |
+ valueT += element.scrollTop || 0; | |
+ valueL += element.scrollLeft || 0; | |
+ element = element.parentNode; | |
+ } while (element); | |
+ return Element._returnOffset(valueL, valueT); | |
+ }, | |
+ | |
+ getOffsetParent: function(element) { | |
+ if (element.offsetParent) return $(element.offsetParent); | |
+ if (element == document.body) return $(element); | |
+ | |
+ while ((element = element.parentNode) && element != document.body) | |
+ if (Element.getStyle(element, 'position') != 'static') | |
+ return $(element); | |
+ | |
+ return $(document.body); | |
+ }, | |
+ | |
+ viewportOffset: function(forElement) { | |
+ var valueT = 0, valueL = 0; | |
+ | |
+ var element = forElement; | |
+ do { | |
+ valueT += element.offsetTop || 0; | |
+ valueL += element.offsetLeft || 0; | |
+ | |
+ // Safari fix | |
+ if (element.offsetParent == document.body && | |
+ Element.getStyle(element, 'position') == 'absolute') break; | |
+ | |
+ } while (element = element.offsetParent); | |
+ | |
+ element = forElement; | |
+ do { | |
+ if (!Prototype.Browser.Opera || element.tagName == 'BODY') { | |
+ valueT -= element.scrollTop || 0; | |
+ valueL -= element.scrollLeft || 0; | |
+ } | |
+ } while (element = element.parentNode); | |
+ | |
+ return Element._returnOffset(valueL, valueT); | |
+ }, | |
+ | |
+ clonePosition: function(element, source) { | |
+ var options = Object.extend({ | |
+ setLeft: true, | |
+ setTop: true, | |
+ setWidth: true, | |
+ setHeight: true, | |
+ offsetTop: 0, | |
+ offsetLeft: 0 | |
+ }, arguments[2] || { }); | |
+ | |
+ // find page position of source | |
+ source = $(source); | |
+ var p = source.viewportOffset(); | |
+ | |
+ // find coordinate system to use | |
+ element = $(element); | |
+ var delta = [0, 0]; | |
+ var parent = null; | |
+ // delta [0,0] will do fine with position: fixed elements, | |
+ // position:absolute needs offsetParent deltas | |
+ if (Element.getStyle(element, 'position') == 'absolute') { | |
+ parent = element.getOffsetParent(); | |
+ delta = parent.viewportOffset(); | |
+ } | |
+ | |
+ // correct by body offsets (fixes Safari) | |
+ if (parent == document.body) { | |
+ delta[0] -= document.body.offsetLeft; | |
+ delta[1] -= document.body.offsetTop; | |
+ } | |
+ | |
+ // set position | |
+ if (options.setLeft) element.style.left = (p[0] - delta[0] + options.offsetLeft) + 'px'; | |
+ if (options.setTop) element.style.top = (p[1] - delta[1] + options.offsetTop) + 'px'; | |
+ if (options.setWidth) element.style.width = source.offsetWidth + 'px'; | |
+ if (options.setHeight) element.style.height = source.offsetHeight + 'px'; | |
+ return element; | |
+ } | |
+}; | |
+ | |
+Element.Methods.identify.counter = 1; | |
+ | |
+Object.extend(Element.Methods, { | |
+ getElementsBySelector: Element.Methods.select, | |
+ childElements: Element.Methods.immediateDescendants | |
+}); | |
+ | |
+Element._attributeTranslations = { | |
+ write: { | |
+ names: { | |
+ className: 'class', | |
+ htmlFor: 'for' | |
+ }, | |
+ values: { } | |
+ } | |
+}; | |
+ | |
+ | |
+if (!document.createRange || Prototype.Browser.Opera) { | |
+ Element.Methods.insert = function(element, insertions) { | |
+ element = $(element); | |
+ | |
+ if (Object.isString(insertions) || Object.isNumber(insertions) || | |
+ Object.isElement(insertions) || (insertions && (insertions.toElement || insertions.toHTML))) | |
+ insertions = { bottom: insertions }; | |
+ | |
+ var t = Element._insertionTranslations, content, position, pos, tagName; | |
+ | |
+ for (position in insertions) { | |
+ content = insertions[position]; | |
+ position = position.toLowerCase(); | |
+ pos = t[position]; | |
+ | |
+ if (content && content.toElement) content = content.toElement(); | |
+ if (Object.isElement(content)) { | |
+ pos.insert(element, content); | |
+ continue; | |
+ } | |
+ | |
+ content = Object.toHTML(content); | |
+ tagName = ((position == 'before' || position == 'after') | |
+ ? element.parentNode : element).tagName.toUpperCase(); | |
+ | |
+ if (t.tags[tagName]) { | |
+ var fragments = Element._getContentFromAnonymousElement(tagName, content.stripScripts()); | |
+ if (position == 'top' || position == 'after') fragments.reverse(); | |
+ fragments.each(pos.insert.curry(element)); | |
+ } | |
+ else element.insertAdjacentHTML(pos.adjacency, content.stripScripts()); | |
+ | |
+ content.evalScripts.bind(content).defer(); | |
+ } | |
+ | |
+ return element; | |
+ }; | |
+} | |
+ | |
+if (Prototype.Browser.Opera) { | |
+ Element.Methods._getStyle = Element.Methods.getStyle; | |
+ Element.Methods.getStyle = function(element, style) { | |
+ switch(style) { | |
+ case 'left': | |
+ case 'top': | |
+ case 'right': | |
+ case 'bottom': | |
+ if (Element._getStyle(element, 'position') == 'static') return null; | |
+ default: return Element._getStyle(element, style); | |
+ } | |
+ }; | |
+ Element.Methods._readAttribute = Element.Methods.readAttribute; | |
+ Element.Methods.readAttribute = function(element, attribute) { | |
+ if (attribute == 'title') return element.title; | |
+ return Element._readAttribute(element, attribute); | |
+ }; | |
+} | |
+ | |
+else if (Prototype.Browser.IE) { | |
+ $w('positionedOffset getOffsetParent viewportOffset').each(function(method) { | |
+ Element.Methods[method] = Element.Methods[method].wrap( | |
+ function(proceed, element) { | |
+ element = $(element); | |
+ var position = element.getStyle('position'); | |
+ if (position != 'static') return proceed(element); | |
+ element.setStyle({ position: 'relative' }); | |
+ var value = proceed(element); | |
+ element.setStyle({ position: position }); | |
+ return value; | |
+ } | |
+ ); | |
+ }); | |
+ | |
+ Element.Methods.getStyle = function(element, style) { | |
+ element = $(element); | |
+ style = (style == 'float' || style == 'cssFloat') ? 'styleFloat' : style.camelize(); | |
+ var value = element.style[style]; | |
+ if (!value && element.currentStyle) value = element.currentStyle[style]; | |
+ | |
+ if (style == 'opacity') { | |
+ if (value = (element.getStyle('filter') || '').match(/alpha\(opacity=(.*)\)/)) | |
+ if (value[1]) return parseFloat(value[1]) / 100; | |
+ return 1.0; | |
+ } | |
+ | |
+ if (value == 'auto') { | |
+ if ((style == 'width' || style == 'height') && (element.getStyle('display') != 'none')) | |
+ return element['offset' + style.capitalize()] + 'px'; | |
+ return null; | |
+ } | |
+ return value; | |
+ }; | |
+ | |
+ Element.Methods.setOpacity = function(element, value) { | |
+ function stripAlpha(filter){ | |
+ return filter.replace(/alpha\([^\)]*\)/gi,''); | |
+ } | |
+ element = $(element); | |
+ var currentStyle = element.currentStyle; | |
+ if ((currentStyle && !currentStyle.hasLayout) || | |
+ (!currentStyle && element.style.zoom == 'normal')) | |
+ element.style.zoom = 1; | |
+ | |
+ var filter = element.getStyle('filter'), style = element.style; | |
+ if (value == 1 || value === '') { | |
+ (filter = stripAlpha(filter)) ? | |
+ style.filter = filter : style.removeAttribute('filter'); | |
+ return element; | |
+ } else if (value < 0.00001) value = 0; | |
+ style.filter = stripAlpha(filter) + | |
+ 'alpha(opacity=' + (value * 100) + ')'; | |
+ return element; | |
+ }; | |
+ | |
+ Element._attributeTranslations = { | |
+ read: { | |
+ names: { | |
+ 'class': 'className', | |
+ 'for': 'htmlFor' | |
+ }, | |
+ values: { | |
+ _getAttr: function(element, attribute) { | |
+ return element.getAttribute(attribute, 2); | |
+ }, | |
+ _getAttrNode: function(element, attribute) { | |
+ var node = element.getAttributeNode(attribute); | |
+ return node ? node.value : ""; | |
+ }, | |
+ _getEv: function(element, attribute) { | |
+ var attribute = element.getAttribute(attribute); | |
+ return attribute ? attribute.toString().slice(23, -2) : null; | |
+ }, | |
+ _flag: function(element, attribute) { | |
+ return $(element).hasAttribute(attribute) ? attribute : null; | |
+ }, | |
+ style: function(element) { | |
+ return element.style.cssText.toLowerCase(); | |
+ }, | |
+ title: function(element) { | |
+ return element.title; | |
+ } | |
+ } | |
+ } | |
+ }; | |
+ | |
+ Element._attributeTranslations.write = { | |
+ names: Object.clone(Element._attributeTranslations.read.names), | |
+ values: { | |
+ checked: function(element, value) { | |
+ element.checked = !!value; | |
+ }, | |
+ | |
+ style: function(element, value) { | |
+ element.style.cssText = value ? value : ''; | |
+ } | |
+ } | |
+ }; | |
+ | |
+ Element._attributeTranslations.has = {}; | |
+ | |
+ $w('colSpan rowSpan vAlign dateTime accessKey tabIndex ' + | |
+ 'encType maxLength readOnly longDesc').each(function(attr) { | |
+ Element._attributeTranslations.write.names[attr.toLowerCase()] = attr; | |
+ Element._attributeTranslations.has[attr.toLowerCase()] = attr; | |
+ }); | |
+ | |
+ (function(v) { | |
+ Object.extend(v, { | |
+ href: v._getAttr, | |
+ src: v._getAttr, | |
+ type: v._getAttr, | |
+ action: v._getAttrNode, | |
+ disabled: v._flag, | |
+ checked: v._flag, | |
+ readonly: v._flag, | |
+ multiple: v._flag, | |
+ onload: v._getEv, | |
+ onunload: v._getEv, | |
+ onclick: v._getEv, | |
+ ondblclick: v._getEv, | |
+ onmousedown: v._getEv, | |
+ onmouseup: v._getEv, | |
+ onmouseover: v._getEv, | |
+ onmousemove: v._getEv, | |
+ onmouseout: v._getEv, | |
+ onfocus: v._getEv, | |
+ onblur: v._getEv, | |
+ onkeypress: v._getEv, | |
+ onkeydown: v._getEv, | |
+ onkeyup: v._getEv, | |
+ onsubmit: v._getEv, | |
+ onreset: v._getEv, | |
+ onselect: v._getEv, | |
+ onchange: v._getEv | |
+ }); | |
+ })(Element._attributeTranslations.read.values); | |
+} | |
+ | |
+else if (Prototype.Browser.Gecko && /rv:1\.8\.0/.test(navigator.userAgent)) { | |
+ Element.Methods.setOpacity = function(element, value) { | |
+ element = $(element); | |
+ element.style.opacity = (value == 1) ? 0.999999 : | |
+ (value === '') ? '' : (value < 0.00001) ? 0 : value; | |
+ return element; | |
+ }; | |
+} | |
+ | |
+else if (Prototype.Browser.WebKit) { | |
+ Element.Methods.setOpacity = function(element, value) { | |
+ element = $(element); | |
+ element.style.opacity = (value == 1 || value === '') ? '' : | |
+ (value < 0.00001) ? 0 : value; | |
+ | |
+ if (value == 1) | |
+ if(element.tagName == 'IMG' && element.width) { | |
+ element.width++; element.width--; | |
+ } else try { | |
+ var n = document.createTextNode(' '); | |
+ element.appendChild(n); | |
+ element.removeChild(n); | |
+ } catch (e) { } | |
+ | |
+ return element; | |
+ }; | |
+ | |
+ // Safari returns margins on body which is incorrect if the child is absolutely | |
+ // positioned. For performance reasons, redefine Position.cumulativeOffset for | |
+ // KHTML/WebKit only. | |
+ Element.Methods.cumulativeOffset = function(element) { | |
+ var valueT = 0, valueL = 0; | |
+ do { | |
+ valueT += element.offsetTop || 0; | |
+ valueL += element.offsetLeft || 0; | |
+ if (element.offsetParent == document.body) | |
+ if (Element.getStyle(element, 'position') == 'absolute') break; | |
+ | |
+ element = element.offsetParent; | |
+ } while (element); | |
+ | |
+ return Element._returnOffset(valueL, valueT); | |
+ }; | |
+} | |
+ | |
+if (Prototype.Browser.IE || Prototype.Browser.Opera) { | |
+ // IE and Opera are missing .innerHTML support for TABLE-related and SELECT elements | |
+ Element.Methods.update = function(element, content) { | |
+ element = $(element); | |
+ | |
+ if (content && content.toElement) content = content.toElement(); | |
+ if (Object.isElement(content)) return element.update().insert(content); | |
+ | |
+ content = Object.toHTML(content); | |
+ var tagName = element.tagName.toUpperCase(); | |
+ | |
+ if (tagName in Element._insertionTranslations.tags) { | |
+ $A(element.childNodes).each(function(node) { element.removeChild(node) }); | |
+ Element._getContentFromAnonymousElement(tagName, content.stripScripts()) | |
+ .each(function(node) { element.appendChild(node) }); | |
+ } | |
+ else element.innerHTML = content.stripScripts(); | |
+ | |
+ content.evalScripts.bind(content).defer(); | |
+ return element; | |
+ }; | |
+} | |
+ | |
+if (document.createElement('div').outerHTML) { | |
+ Element.Methods.replace = function(element, content) { | |
+ element = $(element); | |
+ | |
+ if (content && content.toElement) content = content.toElement(); | |
+ if (Object.isElement(content)) { | |
+ element.parentNode.replaceChild(content, element); | |
+ return element; | |
+ } | |
+ | |
+ content = Object.toHTML(content); | |
+ var parent = element.parentNode, tagName = parent.tagName.toUpperCase(); | |
+ | |
+ if (Element._insertionTranslations.tags[tagName]) { | |
+ var nextSibling = element.next(); | |
+ var fragments = Element._getContentFromAnonymousElement(tagName, content.stripScripts()); | |
+ parent.removeChild(element); | |
+ if (nextSibling) | |
+ fragments.each(function(node) { parent.insertBefore(node, nextSibling) }); | |
+ else | |
+ fragments.each(function(node) { parent.appendChild(node) }); | |
+ } | |
+ else element.outerHTML = content.stripScripts(); | |
+ | |
+ content.evalScripts.bind(content).defer(); | |
+ return element; | |
+ }; | |
+} | |
+ | |
+Element._returnOffset = function(l, t) { | |
+ var result = [l, t]; | |
+ result.left = l; | |
+ result.top = t; | |
+ return result; | |
+}; | |
+ | |
+Element._getContentFromAnonymousElement = function(tagName, html) { | |
+ var div = new Element('div'), t = Element._insertionTranslations.tags[tagName]; | |
+ div.innerHTML = t[0] + html + t[1]; | |
+ t[2].times(function() { div = div.firstChild }); | |
+ return $A(div.childNodes); | |
+}; | |
+ | |
+Element._insertionTranslations = { | |
+ before: { | |
+ adjacency: 'beforeBegin', | |
+ insert: function(element, node) { | |
+ element.parentNode.insertBefore(node, element); | |
+ }, | |
+ initializeRange: function(element, range) { | |
+ range.setStartBefore(element); | |
+ } | |
+ }, | |
+ top: { | |
+ adjacency: 'afterBegin', | |
+ insert: function(element, node) { | |
+ element.insertBefore(node, element.firstChild); | |
+ }, | |
+ initializeRange: function(element, range) { | |
+ range.selectNodeContents(element); | |
+ range.collapse(true); | |
+ } | |
+ }, | |
+ bottom: { | |
+ adjacency: 'beforeEnd', | |
+ insert: function(element, node) { | |
+ element.appendChild(node); | |
+ } | |
+ }, | |
+ after: { | |
+ adjacency: 'afterEnd', | |
+ insert: function(element, node) { | |
+ element.parentNode.insertBefore(node, element.nextSibling); | |
+ }, | |
+ initializeRange: function(element, range) { | |
+ range.setStartAfter(element); | |
+ } | |
+ }, | |
+ tags: { | |
+ TABLE: ['<table>', '</table>', 1], | |
+ TBODY: ['<table><tbody>', '</tbody></table>', 2], | |
+ TR: ['<table><tbody><tr>', '</tr></tbody></table>', 3], | |
+ TD: ['<table><tbody><tr><td>', '</td></tr></tbody></table>', 4], | |
+ SELECT: ['<select>', '</select>', 1] | |
+ } | |
+}; | |
+ | |
+(function() { | |
+ this.bottom.initializeRange = this.top.initializeRange; | |
+ Object.extend(this.tags, { | |
+ THEAD: this.tags.TBODY, | |
+ TFOOT: this.tags.TBODY, | |
+ TH: this.tags.TD | |
+ }); | |
+}).call(Element._insertionTranslations); | |
+ | |
+Element.Methods.Simulated = { | |
+ hasAttribute: function(element, attribute) { | |
+ attribute = Element._attributeTranslations.has[attribute] || attribute; | |
+ var node = $(element).getAttributeNode(attribute); | |
+ return node && node.specified; | |
+ } | |
+}; | |
+ | |
+Element.Methods.ByTag = { }; | |
+ | |
+Object.extend(Element, Element.Methods); | |
+ | |
+if (!Prototype.BrowserFeatures.ElementExtensions && | |
+ document.createElement('div').__proto__) { | |
+ window.HTMLElement = { }; | |
+ window.HTMLElement.prototype = document.createElement('div').__proto__; | |
+ Prototype.BrowserFeatures.ElementExtensions = true; | |
+} | |
+ | |
+Element.extend = (function() { | |
+ if (Prototype.BrowserFeatures.SpecificElementExtensions) | |
+ return Prototype.K; | |
+ | |
+ var Methods = { }, ByTag = Element.Methods.ByTag; | |
+ | |
+ var extend = Object.extend(function(element) { | |
+ if (!element || element._extendedByPrototype || | |
+ element.nodeType != 1 || element == window) return element; | |
+ | |
+ var methods = Object.clone(Methods), | |
+ tagName = element.tagName, property, value; | |
+ | |
+ // extend methods for specific tags | |
+ if (ByTag[tagName]) Object.extend(methods, ByTag[tagName]); | |
+ | |
+ for (property in methods) { | |
+ value = methods[property]; | |
+ if (Object.isFunction(value) && !(property in element)) | |
+ element[property] = value.methodize(); | |
+ } | |
+ | |
+ element._extendedByPrototype = Prototype.emptyFunction; | |
+ return element; | |
+ | |
+ }, { | |
+ refresh: function() { | |
+ // extend methods for all tags (Safari doesn't need this) | |
+ if (!Prototype.BrowserFeatures.ElementExtensions) { | |
+ Object.extend(Methods, Element.Methods); | |
+ Object.extend(Methods, Element.Methods.Simulated); | |
+ } | |
+ } | |
+ }); | |
+ | |
+ extend.refresh(); | |
+ return extend; | |
+})(); | |
+ | |
+Element.hasAttribute = function(element, attribute) { | |
+ if (element.hasAttribute) return element.hasAttribute(attribute); | |
+ return Element.Methods.Simulated.hasAttribute(element, attribute); | |
+}; | |
+ | |
+Element.addMethods = function(methods) { | |
+ var F = Prototype.BrowserFeatures, T = Element.Methods.ByTag; | |
+ | |
+ if (!methods) { | |
+ Object.extend(Form, Form.Methods); | |
+ Object.extend(Form.Element, Form.Element.Methods); | |
+ Object.extend(Element.Methods.ByTag, { | |
+ "FORM": Object.clone(Form.Methods), | |
+ "INPUT": Object.clone(Form.Element.Methods), | |
+ "SELECT": Object.clone(Form.Element.Methods), | |
+ "TEXTAREA": Object.clone(Form.Element.Methods) | |
+ }); | |
+ } | |
+ | |
+ if (arguments.length == 2) { | |
+ var tagName = methods; | |
+ methods = arguments[1]; | |
+ } | |
+ | |
+ if (!tagName) Object.extend(Element.Methods, methods || { }); | |
+ else { | |
+ if (Object.isArray(tagName)) tagName.each(extend); | |
+ else extend(tagName); | |
+ } | |
+ | |
+ function extend(tagName) { | |
+ tagName = tagName.toUpperCase(); | |
+ if (!Element.Methods.ByTag[tagName]) | |
+ Element.Methods.ByTag[tagName] = { }; | |
+ Object.extend(Element.Methods.ByTag[tagName], methods); | |
+ } | |
+ | |
+ function copy(methods, destination, onlyIfAbsent) { | |
+ onlyIfAbsent = onlyIfAbsent || false; | |
+ for (var property in methods) { | |
+ var value = methods[property]; | |
+ if (!Object.isFunction(value)) continue; | |
+ if (!onlyIfAbsent || !(property in destination)) | |
+ destination[property] = value.methodize(); | |
+ } | |
+ } | |
+ | |
+ function findDOMClass(tagName) { | |
+ var klass; | |
+ var trans = { | |
+ "OPTGROUP": "OptGroup", "TEXTAREA": "TextArea", "P": "Paragraph", | |
+ "FIELDSET": "FieldSet", "UL": "UList", "OL": "OList", "DL": "DList", | |
+ "DIR": "Directory", "H1": "Heading", "H2": "Heading", "H3": "Heading", | |
+ "H4": "Heading", "H5": "Heading", "H6": "Heading", "Q": "Quote", | |
+ "INS": "Mod", "DEL": "Mod", "A": "Anchor", "IMG": "Image", "CAPTION": | |
+ "TableCaption", "COL": "TableCol", "COLGROUP": "TableCol", "THEAD": | |
+ "TableSection", "TFOOT": "TableSection", "TBODY": "TableSection", "TR": | |
+ "TableRow", "TH": "TableCell", "TD": "TableCell", "FRAMESET": | |
+ "FrameSet", "IFRAME": "IFrame" | |
+ }; | |
+ if (trans[tagName]) klass = 'HTML' + trans[tagName] + 'Element'; | |
+ if (window[klass]) return window[klass]; | |
+ klass = 'HTML' + tagName + 'Element'; | |
+ if (window[klass]) return window[klass]; | |
+ klass = 'HTML' + tagName.capitalize() + 'Element'; | |
+ if (window[klass]) return window[klass]; | |
+ | |
+ window[klass] = { }; | |
+ window[klass].prototype = document.createElement(tagName).__proto__; | |
+ return window[klass]; | |
+ } | |
+ | |
+ if (F.ElementExtensions) { | |
+ copy(Element.Methods, HTMLElement.prototype); | |
+ copy(Element.Methods.Simulated, HTMLElement.prototype, true); | |
+ } | |
+ | |
+ if (F.SpecificElementExtensions) { | |
+ for (var tag in Element.Methods.ByTag) { | |
+ var klass = findDOMClass(tag); | |
+ if (Object.isUndefined(klass)) continue; | |
+ copy(T[tag], klass.prototype); | |
+ } | |
+ } | |
+ | |
+ Object.extend(Element, Element.Methods); | |
+ delete Element.ByTag; | |
+ | |
+ if (Element.extend.refresh) Element.extend.refresh(); | |
+ Element.cache = { }; | |
+}; | |
+ | |
+document.viewport = { | |
+ getDimensions: function() { | |
+ var dimensions = { }; | |
+ $w('width height').each(function(d) { | |
+ var D = d.capitalize(); | |
+ dimensions[d] = self['inner' + D] || | |
+ (document.documentElement['client' + D] || document.body['client' + D]); | |
+ }); | |
+ return dimensions; | |
+ }, | |
+ | |
+ getWidth: function() { | |
+ return this.getDimensions().width; | |
+ }, | |
+ | |
+ getHeight: function() { | |
+ return this.getDimensions().height; | |
+ }, | |
+ | |
+ getScrollOffsets: function() { | |
+ return Element._returnOffset( | |
+ window.pageXOffset || document.documentElement.scrollLeft || document.body.scrollLeft, | |
+ window.pageYOffset || document.documentElement.scrollTop || document.body.scrollTop); | |
+ } | |
+}; | |
+/* Portions of the Selector class are derived from Jack Slocum’s DomQuery, | |
+ * part of YUI-Ext version 0.40, distributed under the terms of an MIT-style | |
+ * license. Please see http://www.yui-ext.com/ for more information. */ | |
+ | |
+var Selector = Class.create({ | |
+ initialize: function(expression) { | |
+ this.expression = expression.strip(); | |
+ this.compileMatcher(); | |
+ }, | |
+ | |
+ compileMatcher: function() { | |
+ // Selectors with namespaced attributes can't use the XPath version | |
+ if (Prototype.BrowserFeatures.XPath && !(/(\[[\w-]*?:|:checked)/).test(this.expression)) | |
+ return this.compileXPathMatcher(); | |
+ | |
+ var e = this.expression, ps = Selector.patterns, h = Selector.handlers, | |
+ c = Selector.criteria, le, p, m; | |
+ | |
+ if (Selector._cache[e]) { | |
+ this.matcher = Selector._cache[e]; | |
+ return; | |
+ } | |
+ | |
+ this.matcher = ["this.matcher = function(root) {", | |
+ "var r = root, h = Selector.handlers, c = false, n;"]; | |
+ | |
+ while (e && le != e && (/\S/).test(e)) { | |
+ le = e; | |
+ for (var i in ps) { | |
+ p = ps[i]; | |
+ if (m = e.match(p)) { | |
+ this.matcher.push(Object.isFunction(c[i]) ? c[i](m) : | |
+ new Template(c[i]).evaluate(m)); | |
+ e = e.replace(m[0], ''); | |
+ break; | |
+ } | |
+ } | |
+ } | |
+ | |
+ this.matcher.push("return h.unique(n);\n}"); | |
+ eval(this.matcher.join('\n')); | |
+ Selector._cache[this.expression] = this.matcher; | |
+ }, | |
+ | |
+ compileXPathMatcher: function() { | |
+ var e = this.expression, ps = Selector.patterns, | |
+ x = Selector.xpath, le, m; | |
+ | |
+ if (Selector._cache[e]) { | |
+ this.xpath = Selector._cache[e]; return; | |
+ } | |
+ | |
+ this.matcher = ['.//*']; | |
+ while (e && le != e && (/\S/).test(e)) { | |
+ le = e; | |
+ for (var i in ps) { | |
+ if (m = e.match(ps[i])) { | |
+ this.matcher.push(Object.isFunction(x[i]) ? x[i](m) : | |
+ new Template(x[i]).evaluate(m)); | |
+ e = e.replace(m[0], ''); | |
+ break; | |
+ } | |
+ } | |
+ } | |
+ | |
+ this.xpath = this.matcher.join(''); | |
+ Selector._cache[this.expression] = this.xpath; | |
+ }, | |
+ | |
+ findElements: function(root) { | |
+ root = root || document; | |
+ if (this.xpath) return document._getElementsByXPath(this.xpath, root); | |
+ return this.matcher(root); | |
+ }, | |
+ | |
+ match: function(element) { | |
+ this.tokens = []; | |
+ | |
+ var e = this.expression, ps = Selector.patterns, as = Selector.assertions; | |
+ var le, p, m; | |
+ | |
+ while (e && le !== e && (/\S/).test(e)) { | |
+ le = e; | |
+ for (var i in ps) { | |
+ p = ps[i]; | |
+ if (m = e.match(p)) { | |
+ // use the Selector.assertions methods unless the selector | |
+ // is too complex. | |
+ if (as[i]) { | |
+ this.tokens.push([i, Object.clone(m)]); | |
+ e = e.replace(m[0], ''); | |
+ } else { | |
+ // reluctantly do a document-wide search | |
+ // and look for a match in the array | |
+ return this.findElements(document).include(element); | |
+ } | |
+ } | |
+ } | |
+ } | |
+ | |
+ var match = true, name, matches; | |
+ for (var i = 0, token; token = this.tokens[i]; i++) { | |
+ name = token[0], matches = token[1]; | |
+ if (!Selector.assertions[name](element, matches)) { | |
+ match = false; break; | |
+ } | |
+ } | |
+ | |
+ return match; | |
+ }, | |
+ | |
+ toString: function() { | |
+ return this.expression; | |
+ }, | |
+ | |
+ inspect: function() { | |
+ return "#<Selector:" + this.expression.inspect() + ">"; | |
+ } | |
+}); | |
+ | |
+Object.extend(Selector, { | |
+ _cache: { }, | |
+ | |
+ xpath: { | |
+ descendant: "//*", | |
+ child: "/*", | |
+ adjacent: "/following-sibling::*[1]", | |
+ laterSibling: '/following-sibling::*', | |
+ tagName: function(m) { | |
+ if (m[1] == '*') return ''; | |
+ return "[local-name()='" + m[1].toLowerCase() + | |
+ "' or local-name()='" + m[1].toUpperCase() + "']"; | |
+ }, | |
+ className: "[contains(concat(' ', @class, ' '), ' #{1} ')]", | |
+ id: "[@id='#{1}']", | |
+ attrPresence: "[@#{1}]", | |
+ attr: function(m) { | |
+ m[3] = m[5] || m[6]; | |
+ return new Template(Selector.xpath.operators[m[2]]).evaluate(m); | |
+ }, | |
+ pseudo: function(m) { | |
+ var h = Selector.xpath.pseudos[m[1]]; | |
+ if (!h) return ''; | |
+ if (Object.isFunction(h)) return h(m); | |
+ return new Template(Selector.xpath.pseudos[m[1]]).evaluate(m); | |
+ }, | |
+ operators: { | |
+ '=': "[@#{1}='#{3}']", | |
+ '!=': "[@#{1}!='#{3}']", | |
+ '^=': "[starts-with(@#{1}, '#{3}')]", | |
+ '$=': "[substring(@#{1}, (string-length(@#{1}) - string-length('#{3}') + 1))='#{3}']", | |
+ '*=': "[contains(@#{1}, '#{3}')]", | |
+ '~=': "[contains(concat(' ', @#{1}, ' '), ' #{3} ')]", | |
+ '|=': "[contains(concat('-', @#{1}, '-'), '-#{3}-')]" | |
+ }, | |
+ pseudos: { | |
+ 'first-child': '[not(preceding-sibling::*)]', | |
+ 'last-child': '[not(following-sibling::*)]', | |
+ 'only-child': '[not(preceding-sibling::* or following-sibling::*)]', | |
+ 'empty': "[count(*) = 0 and (count(text()) = 0 or translate(text(), ' \t\r\n', '') = '')]", | |
+ 'checked': "[@checked]", | |
+ 'disabled': "[@disabled]", | |
+ 'enabled': "[not(@disabled)]", | |
+ 'not': function(m) { | |
+ var e = m[6], p = Selector.patterns, | |
+ x = Selector.xpath, le, m, v; | |
+ | |
+ var exclusion = []; | |
+ while (e && le != e && (/\S/).test(e)) { | |
+ le = e; | |
+ for (var i in p) { | |
+ if (m = e.match(p[i])) { | |
+ v = Object.isFunction(x[i]) ? x[i](m) : new Template(x[i]).evaluate(m); | |
+ exclusion.push("(" + v.substring(1, v.length - 1) + ")"); | |
+ e = e.replace(m[0], ''); | |
+ break; | |
+ } | |
+ } | |
+ } | |
+ return "[not(" + exclusion.join(" and ") + ")]"; | |
+ }, | |
+ 'nth-child': function(m) { | |
+ return Selector.xpath.pseudos.nth("(count(./preceding-sibling::*) + 1) ", m); | |
+ }, | |
+ 'nth-last-child': function(m) { | |
+ return Selector.xpath.pseudos.nth("(count(./following-sibling::*) + 1) ", m); | |
+ }, | |
+ 'nth-of-type': function(m) { | |
+ return Selector.xpath.pseudos.nth("position() ", m); | |
+ }, | |
+ 'nth-last-of-type': function(m) { | |
+ return Selector.xpath.pseudos.nth("(last() + 1 - position()) ", m); | |
+ }, | |
+ 'first-of-type': function(m) { | |
+ m[6] = "1"; return Selector.xpath.pseudos['nth-of-type'](m); | |
+ }, | |
+ 'last-of-type': function(m) { | |
+ m[6] = "1"; return Selector.xpath.pseudos['nth-last-of-type'](m); | |
+ }, | |
+ 'only-of-type': function(m) { | |
+ var p = Selector.xpath.pseudos; return p['first-of-type'](m) + p['last-of-type'](m); | |
+ }, | |
+ nth: function(fragment, m) { | |
+ var mm, formula = m[6], predicate; | |
+ if (formula == 'even') formula = '2n+0'; | |
+ if (formula == 'odd') formula = '2n+1'; | |
+ if (mm = formula.match(/^(\d+)$/)) // digit only | |
+ return '[' + fragment + "= " + mm[1] + ']'; | |
+ if (mm = formula.match(/^(-?\d*)?n(([+-])(\d+))?/)) { // an+b | |
+ if (mm[1] == "-") mm[1] = -1; | |
+ var a = mm[1] ? Number(mm[1]) : 1; | |
+ var b = mm[2] ? Number(mm[2]) : 0; | |
+ predicate = "[((#{fragment} - #{b}) mod #{a} = 0) and " + | |
+ "((#{fragment} - #{b}) div #{a} >= 0)]"; | |
+ return new Template(predicate).evaluate({ | |
+ fragment: fragment, a: a, b: b }); | |
+ } | |
+ } | |
+ } | |
+ }, | |
+ | |
+ criteria: { | |
+ tagName: 'n = h.tagName(n, r, "#{1}", c); c = false;', | |
+ className: 'n = h.className(n, r, "#{1}", c); c = false;', | |
+ id: 'n = h.id(n, r, "#{1}", c); c = false;', | |
+ attrPresence: 'n = h.attrPresence(n, r, "#{1}"); c = false;', | |
+ attr: function(m) { | |
+ m[3] = (m[5] || m[6]); | |
+ return new Template('n = h.attr(n, r, "#{1}", "#{3}", "#{2}"); c = false;').evaluate(m); | |
+ }, | |
+ pseudo: function(m) { | |
+ if (m[6]) m[6] = m[6].replace(/"/g, '\\"'); | |
+ return new Template('n = h.pseudo(n, "#{1}", "#{6}", r, c); c = false;').evaluate(m); | |
+ }, | |
+ descendant: 'c = "descendant";', | |
+ child: 'c = "child";', | |
+ adjacent: 'c = "adjacent";', | |
+ laterSibling: 'c = "laterSibling";' | |
+ }, | |
+ | |
+ patterns: { | |
+ // combinators must be listed first | |
+ // (and descendant needs to be last combinator) | |
+ laterSibling: /^\s*~\s*/, | |
+ child: /^\s*>\s*/, | |
+ adjacent: /^\s*\+\s*/, | |
+ descendant: /^\s/, | |
+ | |
+ // selectors follow | |
+ tagName: /^\s*(\*|[\w\-]+)(\b|$)?/, | |
+ id: /^#([\w\-\*]+)(\b|$)/, | |
+ className: /^\.([\w\-\*]+)(\b|$)/, | |
+ pseudo: /^:((first|last|nth|nth-last|only)(-child|-of-type)|empty|checked|(en|dis)abled|not)(\((.*?)\))?(\b|$|(?=\s)|(?=:))/, | |
+ attrPresence: /^\[([\w]+)\]/, | |
+ attr: /\[((?:[\w-]*:)?[\w-]+)\s*(?:([!^$*~|]?=)\s*((['"])([^\4]*?)\4|([^'"][^\]]*?)))?\]/ | |
+ }, | |
+ | |
+ // for Selector.match and Element#match | |
+ assertions: { | |
+ tagName: function(element, matches) { | |
+ return matches[1].toUpperCase() == element.tagName.toUpperCase(); | |
+ }, | |
+ | |
+ className: function(element, matches) { | |
+ return Element.hasClassName(element, matches[1]); | |
+ }, | |
+ | |
+ id: function(element, matches) { | |
+ return element.id === matches[1]; | |
+ }, | |
+ | |
+ attrPresence: function(element, matches) { | |
+ return Element.hasAttribute(element, matches[1]); | |
+ }, | |
+ | |
+ attr: function(element, matches) { | |
+ var nodeValue = Element.readAttribute(element, matches[1]); | |
+ return Selector.operators[matches[2]](nodeValue, matches[3]); | |
+ } | |
+ }, | |
+ | |
+ handlers: { | |
+ // UTILITY FUNCTIONS | |
+ // joins two collections | |
+ concat: function(a, b) { | |
+ for (var i = 0, node; node = b[i]; i++) | |
+ a.push(node); | |
+ return a; | |
+ }, | |
+ | |
+ // marks an array of nodes for counting | |
+ mark: function(nodes) { | |
+ for (var i = 0, node; node = nodes[i]; i++) | |
+ node._counted = true; | |
+ return nodes; | |
+ }, | |
+ | |
+ unmark: function(nodes) { | |
+ for (var i = 0, node; node = nodes[i]; i++) | |
+ node._counted = undefined; | |
+ return nodes; | |
+ }, | |
+ | |
+ // mark each child node with its position (for nth calls) | |
+ // "ofType" flag indicates whether we're indexing for nth-of-type | |
+ // rather than nth-child | |
+ index: function(parentNode, reverse, ofType) { | |
+ parentNode._counted = true; | |
+ if (reverse) { | |
+ for (var nodes = parentNode.childNodes, i = nodes.length - 1, j = 1; i >= 0; i--) { | |
+ var node = nodes[i]; | |
+ if (node.nodeType == 1 && (!ofType || node._counted)) node.nodeIndex = j++; | |
+ } | |
+ } else { | |
+ for (var i = 0, j = 1, nodes = parentNode.childNodes; node = nodes[i]; i++) | |
+ if (node.nodeType == 1 && (!ofType || node._counted)) node.nodeIndex = j++; | |
+ } | |
+ }, | |
+ | |
+ // filters out duplicates and extends all nodes | |
+ unique: function(nodes) { | |
+ if (nodes.length == 0) return nodes; | |
+ var results = [], n; | |
+ for (var i = 0, l = nodes.length; i < l; i++) | |
+ if (!(n = nodes[i])._counted) { | |
+ n._counted = true; | |
+ results.push(Element.extend(n)); | |
+ } | |
+ return Selector.handlers.unmark(results); | |
+ }, | |
+ | |
+ // COMBINATOR FUNCTIONS | |
+ descendant: function(nodes) { | |
+ var h = Selector.handlers; | |
+ for (var i = 0, results = [], node; node = nodes[i]; i++) | |
+ h.concat(results, node.getElementsByTagName('*')); | |
+ return results; | |
+ }, | |
+ | |
+ child: function(nodes) { | |
+ var h = Selector.handlers; | |
+ for (var i = 0, results = [], node; node = nodes[i]; i++) { | |
+ for (var j = 0, children = [], child; child = node.childNodes[j]; j++) | |
+ if (child.nodeType == 1 && child.tagName != '!') results.push(child); | |
+ } | |
+ return results; | |
+ }, | |
+ | |
+ adjacent: function(nodes) { | |
+ for (var i = 0, results = [], node; node = nodes[i]; i++) { | |
+ var next = this.nextElementSibling(node); | |
+ if (next) results.push(next); | |
+ } | |
+ return results; | |
+ }, | |
+ | |
+ laterSibling: function(nodes) { | |
+ var h = Selector.handlers; | |
+ for (var i = 0, results = [], node; node = nodes[i]; i++) | |
+ h.concat(results, Element.nextSiblings(node)); | |
+ return results; | |
+ }, | |
+ | |
+ nextElementSibling: function(node) { | |
+ while (node = node.nextSibling) | |
+ if (node.nodeType == 1) return node; | |
+ return null; | |
+ }, | |
+ | |
+ previousElementSibling: function(node) { | |
+ while (node = node.previousSibling) | |
+ if (node.nodeType == 1) return node; | |
+ return null; | |
+ }, | |
+ | |
+ // TOKEN FUNCTIONS | |
+ tagName: function(nodes, root, tagName, combinator) { | |
+ tagName = tagName.toUpperCase(); | |
+ var results = [], h = Selector.handlers; | |
+ if (nodes) { | |
+ if (combinator) { | |
+ // fastlane for ordinary descendant combinators | |
+ if (combinator == "descendant") { | |
+ for (var i = 0, node; node = nodes[i]; i++) | |
+ h.concat(results, node.getElementsByTagName(tagName)); | |
+ return results; | |
+ } else nodes = this[combinator](nodes); | |
+ if (tagName == "*") return nodes; | |
+ } | |
+ for (var i = 0, node; node = nodes[i]; i++) | |
+ if (node.tagName.toUpperCase() == tagName) results.push(node); | |
+ return results; | |
+ } else return root.getElementsByTagName(tagName); | |
+ }, | |
+ | |
+ id: function(nodes, root, id, combinator) { | |
+ var targetNode = $(id), h = Selector.handlers; | |
+ if (!targetNode) return []; | |
+ if (!nodes && root == document) return [targetNode]; | |
+ if (nodes) { | |
+ if (combinator) { | |
+ if (combinator == 'child') { | |
+ for (var i = 0, node; node = nodes[i]; i++) | |
+ if (targetNode.parentNode == node) return [targetNode]; | |
+ } else if (combinator == 'descendant') { | |
+ for (var i = 0, node; node = nodes[i]; i++) | |
+ if (Element.descendantOf(targetNode, node)) return [targetNode]; | |
+ } else if (combinator == 'adjacent') { | |
+ for (var i = 0, node; node = nodes[i]; i++) | |
+ if (Selector.handlers.previousElementSibling(targetNode) == node) | |
+ return [targetNode]; | |
+ } else nodes = h[combinator](nodes); | |
+ } | |
+ for (var i = 0, node; node = nodes[i]; i++) | |
+ if (node == targetNode) return [targetNode]; | |
+ return []; | |
+ } | |
+ return (targetNode && Element.descendantOf(targetNode, root)) ? [targetNode] : []; | |
+ }, | |
+ | |
+ className: function(nodes, root, className, combinator) { | |
+ if (nodes && combinator) nodes = this[combinator](nodes); | |
+ return Selector.handlers.byClassName(nodes, root, className); | |
+ }, | |
+ | |
+ byClassName: function(nodes, root, className) { | |
+ if (!nodes) nodes = Selector.handlers.descendant([root]); | |
+ var needle = ' ' + className + ' '; | |
+ for (var i = 0, results = [], node, nodeClassName; node = nodes[i]; i++) { | |
+ nodeClassName = node.className; | |
+ if (nodeClassName.length == 0) continue; | |
+ if (nodeClassName == className || (' ' + nodeClassName + ' ').include(needle)) | |
+ results.push(node); | |
+ } | |
+ return results; | |
+ }, | |
+ | |
+ attrPresence: function(nodes, root, attr) { | |
+ if (!nodes) nodes = root.getElementsByTagName("*"); | |
+ var results = []; | |
+ for (var i = 0, node; node = nodes[i]; i++) | |
+ if (Element.hasAttribute(node, attr)) results.push(node); | |
+ return results; | |
+ }, | |
+ | |
+ attr: function(nodes, root, attr, value, operator) { | |
+ if (!nodes) nodes = root.getElementsByTagName("*"); | |
+ var handler = Selector.operators[operator], results = []; | |
+ for (var i = 0, node; node = nodes[i]; i++) { | |
+ var nodeValue = Element.readAttribute(node, attr); | |
+ if (nodeValue === null) continue; | |
+ if (handler(nodeValue, value)) results.push(node); | |
+ } | |
+ return results; | |
+ }, | |
+ | |
+ pseudo: function(nodes, name, value, root, combinator) { | |
+ if (nodes && combinator) nodes = this[combinator](nodes); | |
+ if (!nodes) nodes = root.getElementsByTagName("*"); | |
+ return Selector.pseudos[name](nodes, value, root); | |
+ } | |
+ }, | |
+ | |
+ pseudos: { | |
+ 'first-child': function(nodes, value, root) { | |
+ for (var i = 0, results = [], node; node = nodes[i]; i++) { | |
+ if (Selector.handlers.previousElementSibling(node)) continue; | |
+ results.push(node); | |
+ } | |
+ return results; | |
+ }, | |
+ 'last-child': function(nodes, value, root) { | |
+ for (var i = 0, results = [], node; node = nodes[i]; i++) { | |
+ if (Selector.handlers.nextElementSibling(node)) continue; | |
+ results.push(node); | |
+ } | |
+ return results; | |
+ }, | |
+ 'only-child': function(nodes, value, root) { | |
+ var h = Selector.handlers; | |
+ for (var i = 0, results = [], node; node = nodes[i]; i++) | |
+ if (!h.previousElementSibling(node) && !h.nextElementSibling(node)) | |
+ results.push(node); | |
+ return results; | |
+ }, | |
+ 'nth-child': function(nodes, formula, root) { | |
+ return Selector.pseudos.nth(nodes, formula, root); | |
+ }, | |
+ 'nth-last-child': function(nodes, formula, root) { | |
+ return Selector.pseudos.nth(nodes, formula, root, true); | |
+ }, | |
+ 'nth-of-type': function(nodes, formula, root) { | |
+ return Selector.pseudos.nth(nodes, formula, root, false, true); | |
+ }, | |
+ 'nth-last-of-type': function(nodes, formula, root) { | |
+ return Selector.pseudos.nth(nodes, formula, root, true, true); | |
+ }, | |
+ 'first-of-type': function(nodes, formula, root) { | |
+ return Selector.pseudos.nth(nodes, "1", root, false, true); | |
+ }, | |
+ 'last-of-type': function(nodes, formula, root) { | |
+ return Selector.pseudos.nth(nodes, "1", root, true, true); | |
+ }, | |
+ 'only-of-type': function(nodes, formula, root) { | |
+ var p = Selector.pseudos; | |
+ return p['last-of-type'](p['first-of-type'](nodes, formula, root), formula, root); | |
+ }, | |
+ | |
+ // handles the an+b logic | |
+ getIndices: function(a, b, total) { | |
+ if (a == 0) return b > 0 ? [b] : []; | |
+ return $R(1, total).inject([], function(memo, i) { | |
+ if (0 == (i - b) % a && (i - b) / a >= 0) memo.push(i); | |
+ return memo; | |
+ }); | |
+ }, | |
+ | |
+ // handles nth(-last)-child, nth(-last)-of-type, and (first|last)-of-type | |
+ nth: function(nodes, formula, root, reverse, ofType) { | |
+ if (nodes.length == 0) return []; | |
+ if (formula == 'even') formula = '2n+0'; | |
+ if (formula == 'odd') formula = '2n+1'; | |
+ var h = Selector.handlers, results = [], indexed = [], m; | |
+ h.mark(nodes); | |
+ for (var i = 0, node; node = nodes[i]; i++) { | |
+ if (!node.parentNode._counted) { | |
+ h.index(node.parentNode, reverse, ofType); | |
+ indexed.push(node.parentNode); | |
+ } | |
+ } | |
+ if (formula.match(/^\d+$/)) { // just a number | |
+ formula = Number(formula); | |
+ for (var i = 0, node; node = nodes[i]; i++) | |
+ if (node.nodeIndex == formula) results.push(node); | |
+ } else if (m = formula.match(/^(-?\d*)?n(([+-])(\d+))?/)) { // an+b | |
+ if (m[1] == "-") m[1] = -1; | |
+ var a = m[1] ? Number(m[1]) : 1; | |
+ var b = m[2] ? Number(m[2]) : 0; | |
+ var indices = Selector.pseudos.getIndices(a, b, nodes.length); | |
+ for (var i = 0, node, l = indices.length; node = nodes[i]; i++) { | |
+ for (var j = 0; j < l; j++) | |
+ if (node.nodeIndex == indices[j]) results.push(node); | |
+ } | |
+ } | |
+ h.unmark(nodes); | |
+ h.unmark(indexed); | |
+ return results; | |
+ }, | |
+ | |
+ 'empty': function(nodes, value, root) { | |
+ for (var i = 0, results = [], node; node = nodes[i]; i++) { | |
+ // IE treats comments as element nodes | |
+ if (node.tagName == '!' || (node.firstChild && !node.innerHTML.match(/^\s*$/))) continue; | |
+ results.push(node); | |
+ } | |
+ return results; | |
+ }, | |
+ | |
+ 'not': function(nodes, selector, root) { | |
+ var h = Selector.handlers, selectorType, m; | |
+ var exclusions = new Selector(selector).findElements(root); | |
+ h.mark(exclusions); | |
+ for (var i = 0, results = [], node; node = nodes[i]; i++) | |
+ if (!node._counted) results.push(node); | |
+ h.unmark(exclusions); | |
+ return results; | |
+ }, | |
+ | |
+ 'enabled': function(nodes, value, root) { | |
+ for (var i = 0, results = [], node; node = nodes[i]; i++) | |
+ if (!node.disabled) results.push(node); | |
+ return results; | |
+ }, | |
+ | |
+ 'disabled': function(nodes, value, root) { | |
+ for (var i = 0, results = [], node; node = nodes[i]; i++) | |
+ if (node.disabled) results.push(node); | |
+ return results; | |
+ }, | |
+ | |
+ 'checked': function(nodes, value, root) { | |
+ for (var i = 0, results = [], node; node = nodes[i]; i++) | |
+ if (node.checked) results.push(node); | |
+ return results; | |
+ } | |
+ }, | |
+ | |
+ operators: { | |
+ '=': function(nv, v) { return nv == v; }, | |
+ '!=': function(nv, v) { return nv != v; }, | |
+ '^=': function(nv, v) { return nv.startsWith(v); }, | |
+ '$=': function(nv, v) { return nv.endsWith(v); }, | |
+ '*=': function(nv, v) { return nv.include(v); }, | |
+ '~=': function(nv, v) { return (' ' + nv + ' ').include(' ' + v + ' '); }, | |
+ '|=': function(nv, v) { return ('-' + nv.toUpperCase() + '-').include('-' + v.toUpperCase() + '-'); } | |
+ }, | |
+ | |
+ matchElements: function(elements, expression) { | |
+ var matches = new Selector(expression).findElements(), h = Selector.handlers; | |
+ h.mark(matches); | |
+ for (var i = 0, results = [], element; element = elements[i]; i++) | |
+ if (element._counted) results.push(element); | |
+ h.unmark(matches); | |
+ return results; | |
+ }, | |
+ | |
+ findElement: function(elements, expression, index) { | |
+ if (Object.isNumber(expression)) { | |
+ index = expression; expression = false; | |
+ } | |
+ return Selector.matchElements(elements, expression || '*')[index || 0]; | |
+ }, | |
+ | |
+ findChildElements: function(element, expressions) { | |
+ var exprs = expressions.join(','), expressions = []; | |
+ exprs.scan(/(([\w#:.~>+()\s-]+|\*|\[.*?\])+)\s*(,|$)/, function(m) { | |
+ expressions.push(m[1].strip()); | |
+ }); | |
+ var results = [], h = Selector.handlers; | |
+ for (var i = 0, l = expressions.length, selector; i < l; i++) { | |
+ selector = new Selector(expressions[i].strip()); | |
+ h.concat(results, selector.findElements(element)); | |
+ } | |
+ return (l > 1) ? h.unique(results) : results; | |
+ } | |
+}); | |
+ | |
+function $$() { | |
+ return Selector.findChildElements(document, $A(arguments)); | |
+} | |
+var Form = { | |
+ reset: function(form) { | |
+ $(form).reset(); | |
+ return form; | |
+ }, | |
+ | |
+ serializeElements: function(elements, options) { | |
+ if (typeof options != 'object') options = { hash: !!options }; | |
+ else if (options.hash === undefined) options.hash = true; | |
+ var key, value, submitted = false, submit = options.submit; | |
+ | |
+ var data = elements.inject({ }, function(result, element) { | |
+ if (!element.disabled && element.name) { | |
+ key = element.name; value = $(element).getValue(); | |
+ if (value != null && (element.type != 'submit' || (!submitted && | |
+ submit !== false && (!submit || key == submit) && (submitted = true)))) { | |
+ if (key in result) { | |
+ // a key is already present; construct an array of values | |
+ if (!Object.isArray(result[key])) result[key] = [result[key]]; | |
+ result[key].push(value); | |
+ } | |
+ else result[key] = value; | |
+ } | |
+ } | |
+ return result; | |
+ }); | |
+ | |
+ return options.hash ? data : Object.toQueryString(data); | |
+ } | |
+}; | |
+ | |
+Form.Methods = { | |
+ serialize: function(form, options) { | |
+ return Form.serializeElements(Form.getElements(form), options); | |
+ }, | |
+ | |
+ getElements: function(form) { | |
+ return $A($(form).getElementsByTagName('*')).inject([], | |
+ function(elements, child) { | |
+ if (Form.Element.Serializers[child.tagName.toLowerCase()]) | |
+ elements.push(Element.extend(child)); | |
+ return elements; | |
+ } | |
+ ); | |
+ }, | |
+ | |
+ getInputs: function(form, typeName, name) { | |
+ form = $(form); | |
+ var inputs = form.getElementsByTagName('input'); | |
+ | |
+ if (!typeName && !name) return $A(inputs).map(Element.extend); | |
+ | |
+ for (var i = 0, matchingInputs = [], length = inputs.length; i < length; i++) { | |
+ var input = inputs[i]; | |
+ if ((typeName && input.type != typeName) || (name && input.name != name)) | |
+ continue; | |
+ matchingInputs.push(Element.extend(input)); | |
+ } | |
+ | |
+ return matchingInputs; | |
+ }, | |
+ | |
+ disable: function(form) { | |
+ form = $(form); | |
+ Form.getElements(form).invoke('disable'); | |
+ return form; | |
+ }, | |
+ | |
+ enable: function(form) { | |
+ form = $(form); | |
+ Form.getElements(form).invoke('enable'); | |
+ return form; | |
+ }, | |
+ | |
+ findFirstElement: function(form) { | |
+ var elements = $(form).getElements().findAll(function(element) { | |
+ return 'hidden' != element.type && !element.disabled; | |
+ }); | |
+ var firstByIndex = elements.findAll(function(element) { | |
+ return element.hasAttribute('tabIndex') && element.tabIndex >= 0; | |
+ }).sortBy(function(element) { return element.tabIndex }).first(); | |
+ | |
+ return firstByIndex ? firstByIndex : elements.find(function(element) { | |
+ return ['input', 'select', 'textarea'].include(element.tagName.toLowerCase()); | |
+ }); | |
+ }, | |
+ | |
+ focusFirstElement: function(form) { | |
+ form = $(form); | |
+ form.findFirstElement().activate(); | |
+ return form; | |
+ }, | |
+ | |
+ request: function(form, options) { | |
+ form = $(form), options = Object.clone(options || { }); | |
+ | |
+ var params = options.parameters, action = form.readAttribute('action') || ''; | |
+ if (action.blank()) action = window.location.href; | |
+ options.parameters = form.serialize(true); | |
+ | |
+ if (params) { | |
+ if (Object.isString(params)) params = params.toQueryParams(); | |
+ Object.extend(options.parameters, params); | |
+ } | |
+ | |
+ if (form.hasAttribute('method') && !options.method) | |
+ options.method = form.method; | |
+ | |
+ return new Ajax.Request(action, options); | |
+ } | |
+}; | |
+ | |
+/*--------------------------------------------------------------------------*/ | |
+ | |
+Form.Element = { | |
+ focus: function(element) { | |
+ $(element).focus(); | |
+ return element; | |
+ }, | |
+ | |
+ select: function(element) { | |
+ $(element).select(); | |
+ return element; | |
+ } | |
+}; | |
+ | |
+Form.Element.Methods = { | |
+ serialize: function(element) { | |
+ element = $(element); | |
+ if (!element.disabled && element.name) { | |
+ var value = element.getValue(); | |
+ if (value != undefined) { | |
+ var pair = { }; | |
+ pair[element.name] = value; | |
+ return Object.toQueryString(pair); | |
+ } | |
+ } | |
+ return ''; | |
+ }, | |
+ | |
+ getValue: function(element) { | |
+ element = $(element); | |
+ var method = element.tagName.toLowerCase(); | |
+ return Form.Element.Serializers[method](element); | |
+ }, | |
+ | |
+ setValue: function(element, value) { | |
+ element = $(element); | |
+ var method = element.tagName.toLowerCase(); | |
+ Form.Element.Serializers[method](element, value); | |
+ return element; | |
+ }, | |
+ | |
+ clear: function(element) { | |
+ $(element).value = ''; | |
+ return element; | |
+ }, | |
+ | |
+ present: function(element) { | |
+ return $(element).value != ''; | |
+ }, | |
+ | |
+ activate: function(element) { | |
+ element = $(element); | |
+ try { | |
+ element.focus(); | |
+ if (element.select && (element.tagName.toLowerCase() != 'input' || | |
+ !['button', 'reset', 'submit'].include(element.type))) | |
+ element.select(); | |
+ } catch (e) { } | |
+ return element; | |
+ }, | |
+ | |
+ disable: function(element) { | |
+ element = $(element); | |
+ element.blur(); | |
+ element.disabled = true; | |
+ return element; | |
+ }, | |
+ | |
+ enable: function(element) { | |
+ element = $(element); | |
+ element.disabled = false; | |
+ return element; | |
+ } | |
+}; | |
+ | |
+/*--------------------------------------------------------------------------*/ | |
+ | |
+var Field = Form.Element; | |
+var $F = Form.Element.Methods.getValue; | |
+ | |
+/*--------------------------------------------------------------------------*/ | |
+ | |
+Form.Element.Serializers = { | |
+ input: function(element, value) { | |
+ switch (element.type.toLowerCase()) { | |
+ case 'checkbox': | |
+ case 'radio': | |
+ return Form.Element.Serializers.inputSelector(element, value); | |
+ default: | |
+ return Form.Element.Serializers.textarea(element, value); | |
+ } | |
+ }, | |
+ | |
+ inputSelector: function(element, value) { | |
+ if (value === undefined) return element.checked ? element.value : null; | |
+ else element.checked = !!value; | |
+ }, | |
+ | |
+ textarea: function(element, value) { | |
+ if (value === undefined) return element.value; | |
+ else element.value = value; | |
+ }, | |
+ | |
+ select: function(element, index) { | |
+ if (index === undefined) | |
+ return this[element.type == 'select-one' ? | |
+ 'selectOne' : 'selectMany'](element); | |
+ else { | |
+ var opt, value, single = !Object.isArray(index); | |
+ for (var i = 0, length = element.length; i < length; i++) { | |
+ opt = element.options[i]; | |
+ value = this.optionValue(opt); | |
+ if (single) { | |
+ if (value == index) { | |
+ opt.selected = true; | |
+ return; | |
+ } | |
+ } | |
+ else opt.selected = index.include(value); | |
+ } | |
+ } | |
+ }, | |
+ | |
+ selectOne: function(element) { | |
+ var index = element.selectedIndex; | |
+ return index >= 0 ? this.optionValue(element.options[index]) : null; | |
+ }, | |
+ | |
+ selectMany: function(element) { | |
+ var values, length = element.length; | |
+ if (!length) return null; | |
+ | |
+ for (var i = 0, values = []; i < length; i++) { | |
+ var opt = element.options[i]; | |
+ if (opt.selected) values.push(this.optionValue(opt)); | |
+ } | |
+ return values; | |
+ }, | |
+ | |
+ optionValue: function(opt) { | |
+ // extend element because hasAttribute may not be native | |
+ return Element.extend(opt).hasAttribute('value') ? opt.value : opt.text; | |
+ } | |
+}; | |
+ | |
+/*--------------------------------------------------------------------------*/ | |
+ | |
+Abstract.TimedObserver = Class.create(PeriodicalExecuter, { | |
+ initialize: function($super, element, frequency, callback) { | |
+ $super(callback, frequency); | |
+ this.element = $(element); | |
+ this.lastValue = this.getValue(); | |
+ }, | |
+ | |
+ execute: function() { | |
+ var value = this.getValue(); | |
+ if (Object.isString(this.lastValue) && Object.isString(value) ? | |
+ this.lastValue != value : String(this.lastValue) != String(value)) { | |
+ this.callback(this.element, value); | |
+ this.lastValue = value; | |
+ } | |
+ } | |
+}); | |
+ | |
+Form.Element.Observer = Class.create(Abstract.TimedObserver, { | |
+ getValue: function() { | |
+ return Form.Element.getValue(this.element); | |
+ } | |
+}); | |
+ | |
+Form.Observer = Class.create(Abstract.TimedObserver, { | |
+ getValue: function() { | |
+ return Form.serialize(this.element); | |
+ } | |
+}); | |
+ | |
+/*--------------------------------------------------------------------------*/ | |
+ | |
+Abstract.EventObserver = Class.create({ | |
+ initialize: function(element, callback) { | |
+ this.element = $(element); | |
+ this.callback = callback; | |
+ | |
+ this.lastValue = this.getValue(); | |
+ if (this.element.tagName.toLowerCase() == 'form') | |
+ this.registerFormCallbacks(); | |
+ else | |
+ this.registerCallback(this.element); | |
+ }, | |
+ | |
+ onElementEvent: function() { | |
+ var value = this.getValue(); | |
+ if (this.lastValue != value) { | |
+ this.callback(this.element, value); | |
+ this.lastValue = value; | |
+ } | |
+ }, | |
+ | |
+ registerFormCallbacks: function() { | |
+ Form.getElements(this.element).each(this.registerCallback, this); | |
+ }, | |
+ | |
+ registerCallback: function(element) { | |
+ if (element.type) { | |
+ switch (element.type.toLowerCase()) { | |
+ case 'checkbox': | |
+ case 'radio': | |
+ Event.observe(element, 'click', this.onElementEvent.bind(this)); | |
+ break; | |
+ default: | |
+ Event.observe(element, 'change', this.onElementEvent.bind(this)); | |
+ break; | |
+ } | |
+ } | |
+ } | |
+}); | |
+ | |
+Form.Element.EventObserver = Class.create(Abstract.EventObserver, { | |
+ getValue: function() { | |
+ return Form.Element.getValue(this.element); | |
+ } | |
+}); | |
+ | |
+Form.EventObserver = Class.create(Abstract.EventObserver, { | |
+ getValue: function() { | |
+ return Form.serialize(this.element); | |
+ } | |
+}); | |
+if (!window.Event) var Event = { }; | |
+ | |
+Object.extend(Event, { | |
+ KEY_BACKSPACE: 8, | |
+ KEY_TAB: 9, | |
+ KEY_RETURN: 13, | |
+ KEY_ESC: 27, | |
+ KEY_LEFT: 37, | |
+ KEY_UP: 38, | |
+ KEY_RIGHT: 39, | |
+ KEY_DOWN: 40, | |
+ KEY_DELETE: 46, | |
+ KEY_HOME: 36, | |
+ KEY_END: 35, | |
+ KEY_PAGEUP: 33, | |
+ KEY_PAGEDOWN: 34, | |
+ KEY_INSERT: 45, | |
+ | |
+ cache: { }, | |
+ | |
+ relatedTarget: function(event) { | |
+ var element; | |
+ switch(event.type) { | |
+ case 'mouseover': element = event.fromElement; break; | |
+ case 'mouseout': element = event.toElement; break; | |
+ default: return null; | |
+ } | |
+ return Element.extend(element); | |
+ } | |
+}); | |
+ | |
+Event.Methods = (function() { | |
+ var isButton; | |
+ | |
+ if (Prototype.Browser.IE) { | |
+ var buttonMap = { 0: 1, 1: 4, 2: 2 }; | |
+ isButton = function(event, code) { | |
+ return event.button == buttonMap[code]; | |
+ }; | |
+ | |
+ } else if (Prototype.Browser.WebKit) { | |
+ isButton = function(event, code) { | |
+ switch (code) { | |
+ case 0: return event.which == 1 && !event.metaKey; | |
+ case 1: return event.which == 1 && event.metaKey; | |
+ default: return false; | |
+ } | |
+ }; | |
+ | |
+ } else { | |
+ isButton = function(event, code) { | |
+ return event.which ? (event.which === code + 1) : (event.button === code); | |
+ }; | |
+ } | |
+ | |
+ return { | |
+ isLeftClick: function(event) { return isButton(event, 0) }, | |
+ isMiddleClick: function(event) { return isButton(event, 1) }, | |
+ isRightClick: function(event) { return isButton(event, 2) }, | |
+ | |
+ element: function(event) { | |
+ var node = Event.extend(event).target; | |
+ return Element.extend(node.nodeType == Node.TEXT_NODE ? node.parentNode : node); | |
+ }, | |
+ | |
+ findElement: function(event, expression) { | |
+ var element = Event.element(event); | |
+ return element.match(expression) ? element : element.up(expression); | |
+ }, | |
+ | |
+ pointer: function(event) { | |
+ return { | |
+ x: event.pageX || (event.clientX + | |
+ (document.documentElement.scrollLeft || document.body.scrollLeft)), | |
+ y: event.pageY || (event.clientY + | |
+ (document.documentElement.scrollTop || document.body.scrollTop)) | |
+ }; | |
+ }, | |
+ | |
+ pointerX: function(event) { return Event.pointer(event).x }, | |
+ pointerY: function(event) { return Event.pointer(event).y }, | |
+ | |
+ stop: function(event) { | |
+ Event.extend(event); | |
+ event.preventDefault(); | |
+ event.stopPropagation(); | |
+ event.stopped = true; | |
+ } | |
+ }; | |
+})(); | |
+ | |
+Event.extend = (function() { | |
+ var methods = Object.keys(Event.Methods).inject({ }, function(m, name) { | |
+ m[name] = Event.Methods[name].methodize(); | |
+ return m; | |
+ }); | |
+ | |
+ if (Prototype.Browser.IE) { | |
+ Object.extend(methods, { | |
+ stopPropagation: function() { this.cancelBubble = true }, | |
+ preventDefault: function() { this.returnValue = false }, | |
+ inspect: function() { return "[object Event]" } | |
+ }); | |
+ | |
+ return function(event) { | |
+ if (!event) return false; | |
+ if (event._extendedByPrototype) return event; | |
+ | |
+ event._extendedByPrototype = Prototype.emptyFunction; | |
+ var pointer = Event.pointer(event); | |
+ Object.extend(event, { | |
+ target: event.srcElement, | |
+ relatedTarget: Event.relatedTarget(event), | |
+ pageX: pointer.x, | |
+ pageY: pointer.y | |
+ }); | |
+ return Object.extend(event, methods); | |
+ }; | |
+ | |
+ } else { | |
+ Event.prototype = Event.prototype || document.createEvent("HTMLEvents").__proto__; | |
+ Object.extend(Event.prototype, methods); | |
+ return Prototype.K; | |
+ } | |
+})(); | |
+ | |
+Object.extend(Event, (function() { | |
+ var cache = Event.cache; | |
+ | |
+ function getEventID(element) { | |
+ if (element._eventID) return element._eventID; | |
+ arguments.callee.id = arguments.callee.id || 1; | |
+ return element._eventID = ++arguments.callee.id; | |
+ } | |
+ | |
+ function getDOMEventName(eventName) { | |
+ if (eventName && eventName.include(':')) return "dataavailable"; | |
+ return eventName; | |
+ } | |
+ | |
+ function getCacheForID(id) { | |
+ return cache[id] = cache[id] || { }; | |
+ } | |
+ | |
+ function getWrappersForEventName(id, eventName) { | |
+ var c = getCacheForID(id); | |
+ return c[eventName] = c[eventName] || []; | |
+ } | |
+ | |
+ function createWrapper(element, eventName, handler) { | |
+ var id = getEventID(element); | |
+ var c = getWrappersForEventName(id, eventName); | |
+ if (c.pluck("handler").include(handler)) return false; | |
+ | |
+ var wrapper = function(event) { | |
+ if (!Event || !Event.extend || | |
+ (event.eventName && event.eventName != eventName)) | |
+ return false; | |
+ | |
+ Event.extend(event); | |
+ handler.call(element, event) | |
+ }; | |
+ | |
+ wrapper.handler = handler; | |
+ c.push(wrapper); | |
+ return wrapper; | |
+ } | |
+ | |
+ function findWrapper(id, eventName, handler) { | |
+ var c = getWrappersForEventName(id, eventName); | |
+ return c.find(function(wrapper) { return wrapper.handler == handler }); | |
+ } | |
+ | |
+ function destroyWrapper(id, eventName, handler) { | |
+ var c = getCacheForID(id); | |
+ if (!c[eventName]) return false; | |
+ c[eventName] = c[eventName].without(findWrapper(id, eventName, handler)); | |
+ } | |
+ | |
+ function destroyCache() { | |
+ for (var id in cache) | |
+ for (var eventName in cache[id]) | |
+ cache[id][eventName] = null; | |
+ } | |
+ | |
+ if (window.attachEvent) { | |
+ window.attachEvent("onunload", destroyCache); | |
+ } | |
+ | |
+ return { | |
+ observe: function(element, eventName, handler) { | |
+ element = $(element); | |
+ var name = getDOMEventName(eventName); | |
+ | |
+ var wrapper = createWrapper(element, eventName, handler); | |
+ if (!wrapper) return element; | |
+ | |
+ if (element.addEventListener) { | |
+ element.addEventListener(name, wrapper, false); | |
+ } else { | |
+ element.attachEvent("on" + name, wrapper); | |
+ } | |
+ | |
+ return element; | |
+ }, | |
+ | |
+ stopObserving: function(element, eventName, handler) { | |
+ element = $(element); | |
+ var id = getEventID(element), name = getDOMEventName(eventName); | |
+ | |
+ if (!handler && eventName) { | |
+ getWrappersForEventName(id, eventName).each(function(wrapper) { | |
+ element.stopObserving(eventName, wrapper.handler); | |
+ }); | |
+ return element; | |
+ | |
+ } else if (!eventName) { | |
+ Object.keys(getCacheForID(id)).each(function(eventName) { | |
+ element.stopObserving(eventName); | |
+ }); | |
+ return element; | |
+ } | |
+ | |
+ var wrapper = findWrapper(id, eventName, handler); | |
+ if (!wrapper) return element; | |
+ | |
+ if (element.removeEventListener) { | |
+ element.removeEventListener(name, wrapper, false); | |
+ } else { | |
+ element.detachEvent("on" + name, wrapper); | |
+ } | |
+ | |
+ destroyWrapper(id, eventName, handler); | |
+ | |
+ return element; | |
+ }, | |
+ | |
+ fire: function(element, eventName, memo) { | |
+ element = $(element); | |
+ if (element == document && document.createEvent && !element.dispatchEvent) | |
+ element = document.documentElement; | |
+ | |
+ if (document.createEvent) { | |
+ var event = document.createEvent("HTMLEvents"); | |
+ event.initEvent("dataavailable", true, true); | |
+ } else { | |
+ var event = document.createEventObject(); | |
+ event.eventType = "ondataavailable"; | |
+ } | |
+ | |
+ event.eventName = eventName; | |
+ event.memo = memo || { }; | |
+ | |
+ if (document.createEvent) { | |
+ element.dispatchEvent(event); | |
+ } else { | |
+ element.fireEvent(event.eventType, event); | |
+ } | |
+ | |
+ return event; | |
+ } | |
+ }; | |
+})()); | |
+ | |
+Object.extend(Event, Event.Methods); | |
+ | |
+Element.addMethods({ | |
+ fire: Event.fire, | |
+ observe: Event.observe, | |
+ stopObserving: Event.stopObserving | |
+}); | |
+ | |
+Object.extend(document, { | |
+ fire: Element.Methods.fire.methodize(), | |
+ observe: Element.Methods.observe.methodize(), | |
+ stopObserving: Element.Methods.stopObserving.methodize() | |
+}); | |
+ | |
+(function() { | |
+ /* Support for the DOMContentLoaded event is based on work by Dan Webb, | |
+ Matthias Miller, Dean Edwards and John Resig. */ | |
+ | |
+ var timer, fired = false; | |
+ | |
+ function fireContentLoadedEvent() { | |
+ if (fired) return; | |
+ if (timer) window.clearInterval(timer); | |
+ document.fire("dom:loaded"); | |
+ fired = true; | |
+ } | |
+ | |
+ if (document.addEventListener) { | |
+ if (Prototype.Browser.WebKit) { | |
+ timer = window.setInterval(function() { | |
+ if (/loaded|complete/.test(document.readyState)) | |
+ fireContentLoadedEvent(); | |
+ }, 0); | |
+ | |
+ Event.observe(window, "load", fireContentLoadedEvent); | |
+ | |
+ } else { | |
+ document.addEventListener("DOMContentLoaded", | |
+ fireContentLoadedEvent, false); | |
+ } | |
+ | |
+ } else { | |
+ document.write("<script id=__onDOMContentLoaded defer src=//:><\/script>"); | |
+ $("__onDOMContentLoaded").onreadystatechange = function() { | |
+ if (this.readyState == "complete") { | |
+ this.onreadystatechange = null; | |
+ fireContentLoadedEvent(); | |
+ } | |
+ }; | |
+ } | |
+})(); | |
+/*------------------------------- DEPRECATED -------------------------------*/ | |
+ | |
+Hash.toQueryString = Object.toQueryString; | |
+ | |
+var Toggle = { display: Element.toggle }; | |
+ | |
+Element.Methods.childOf = Element.Methods.descendantOf; | |
+ | |
+var Insertion = { | |
+ Before: function(element, content) { | |
+ return Element.insert(element, {before:content}); | |
+ }, | |
+ | |
+ Top: function(element, content) { | |
+ return Element.insert(element, {top:content}); | |
+ }, | |
+ | |
+ Bottom: function(element, content) { | |
+ return Element.insert(element, {bottom:content}); | |
+ }, | |
+ | |
+ After: function(element, content) { | |
+ return Element.insert(element, {after:content}); | |
+ } | |
+}; | |
+ | |
+var $continue = new Error('"throw $continue" is deprecated, use "return" instead'); | |
+ | |
+// This should be moved to script.aculo.us; notice the deprecated methods | |
+// further below, that map to the newer Element methods. | |
+var Position = { | |
+ // set to true if needed, warning: firefox performance problems | |
+ // NOT neeeded for page scrolling, only if draggable contained in | |
+ // scrollable elements | |
+ includeScrollOffsets: false, | |
+ | |
+ // must be called before calling withinIncludingScrolloffset, every time the | |
+ // page is scrolled | |
+ prepare: function() { | |
+ this.deltaX = window.pageXOffset | |
+ || document.documentElement.scrollLeft | |
+ || document.body.scrollLeft | |
+ || 0; | |
+ this.deltaY = window.pageYOffset | |
+ || document.documentElement.scrollTop | |
+ || document.body.scrollTop | |
+ || 0; | |
+ }, | |
+ | |
+ // caches x/y coordinate pair to use with overlap | |
+ within: function(element, x, y) { | |
+ if (this.includeScrollOffsets) | |
+ return this.withinIncludingScrolloffsets(element, x, y); | |
+ this.xcomp = x; | |
+ this.ycomp = y; | |
+ this.offset = Element.cumulativeOffset(element); | |
+ | |
+ return (y >= this.offset[1] && | |
+ y < this.offset[1] + element.offsetHeight && | |
+ x >= this.offset[0] && | |
+ x < this.offset[0] + element.offsetWidth); | |
+ }, | |
+ | |
+ withinIncludingScrolloffsets: function(element, x, y) { | |
+ var offsetcache = Element.cumulativeScrollOffset(element); | |
+ | |
+ this.xcomp = x + offsetcache[0] - this.deltaX; | |
+ this.ycomp = y + offsetcache[1] - this.deltaY; | |
+ this.offset = Element.cumulativeOffset(element); | |
+ | |
+ return (this.ycomp >= this.offset[1] && | |
+ this.ycomp < this.offset[1] + element.offsetHeight && | |
+ this.xcomp >= this.offset[0] && | |
+ this.xcomp < this.offset[0] + element.offsetWidth); | |
+ }, | |
+ | |
+ // within must be called directly before | |
+ overlap: function(mode, element) { | |
+ if (!mode) return 0; | |
+ if (mode == 'vertical') | |
+ return ((this.offset[1] + element.offsetHeight) - this.ycomp) / | |
+ element.offsetHeight; | |
+ if (mode == 'horizontal') | |
+ return ((this.offset[0] + element.offsetWidth) - this.xcomp) / | |
+ element.offsetWidth; | |
+ }, | |
+ | |
+ // Deprecation layer -- use newer Element methods now (1.5.2). | |
+ | |
+ cumulativeOffset: Element.Methods.cumulativeOffset, | |
+ | |
+ positionedOffset: Element.Methods.positionedOffset, | |
+ | |
+ absolutize: function(element) { | |
+ Position.prepare(); | |
+ return Element.absolutize(element); | |
+ }, | |
+ | |
+ relativize: function(element) { | |
+ Position.prepare(); | |
+ return Element.relativize(element); | |
+ }, | |
+ | |
+ realOffset: Element.Methods.cumulativeScrollOffset, | |
+ | |
+ offsetParent: Element.Methods.getOffsetParent, | |
+ | |
+ page: Element.Methods.viewportOffset, | |
+ | |
+ clone: function(source, target, options) { | |
+ options = options || { }; | |
+ return Element.clonePosition(target, source, options); | |
+ } | |
+}; | |
+ | |
+/*--------------------------------------------------------------------------*/ | |
+ | |
+if (!document.getElementsByClassName) document.getElementsByClassName = function(instanceMethods){ | |
+ function iter(name) { | |
+ return name.blank() ? null : "[contains(concat(' ', @class, ' '), ' " + name + " ')]"; | |
+ } | |
+ | |
+ instanceMethods.getElementsByClassName = Prototype.BrowserFeatures.XPath ? | |
+ function(element, className) { | |
+ className = className.toString().strip(); | |
+ var cond = /\s/.test(className) ? $w(className).map(iter).join('') : iter(className); | |
+ return cond ? document._getElementsByXPath('.//*' + cond, element) : []; | |
+ } : function(element, className) { | |
+ className = className.toString().strip(); | |
+ var elements = [], classNames = (/\s/.test(className) ? $w(className) : null); | |
+ if (!classNames && !className) return elements; | |
+ | |
+ var nodes = $(element).getElementsByTagName('*'); | |
+ className = ' ' + className + ' '; | |
+ | |
+ for (var i = 0, child, cn; child = nodes[i]; i++) { | |
+ if (child.className && (cn = ' ' + child.className + ' ') && (cn.include(className) || | |
+ (classNames && classNames.all(function(name) { | |
+ return !name.toString().blank() && cn.include(' ' + name + ' '); | |
+ })))) | |
+ elements.push(Element.extend(child)); | |
+ } | |
+ return elements; | |
+ }; | |
+ | |
+ return function(className, parentElement) { | |
+ return $(parentElement || document.body).getElementsByClassName(className); | |
+ }; | |
+}(Element.Methods); | |
+ | |
+/*--------------------------------------------------------------------------*/ | |
+ | |
+Element.ClassNames = Class.create(); | |
+Element.ClassNames.prototype = { | |
+ initialize: function(element) { | |
+ this.element = $(element); | |
+ }, | |
+ | |
+ _each: function(iterator) { | |
+ this.element.className.split(/\s+/).select(function(name) { | |
+ return name.length > 0; | |
+ })._each(iterator); | |
+ }, | |
+ | |
+ set: function(className) { | |
+ this.element.className = className; | |
+ }, | |
+ | |
+ add: function(classNameToAdd) { | |
+ if (this.include(classNameToAdd)) return; | |
+ this.set($A(this).concat(classNameToAdd).join(' ')); | |
+ }, | |
+ | |
+ remove: function(classNameToRemove) { | |
+ if (!this.include(classNameToRemove)) return; | |
+ this.set($A(this).without(classNameToRemove).join(' ')); | |
+ }, | |
+ | |
+ toString: function() { | |
+ return $A(this).join(' '); | |
+ } | |
+}; | |
+ | |
+Object.extend(Element.ClassNames.prototype, Enumerable); | |
+ | |
+/*--------------------------------------------------------------------------*/ | |
+ | |
+Element.addMethods(); | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/generator/extconf.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/generator/extconf.rb | |
new file mode 100644 | |
index 0000000..88aaf40 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/generator/extconf.rb | |
@@ -0,0 +1,9 @@ | |
+require 'mkmf' | |
+require 'rbconfig' | |
+ | |
+if CONFIG['CC'] =~ /gcc/ | |
+ $CFLAGS += ' -Wall' | |
+ #$CFLAGS += ' -O0 -ggdb' | |
+end | |
+ | |
+create_makefile 'generator' | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/generator/generator.c b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/generator/generator.c | |
new file mode 100644 | |
index 0000000..e5333b7 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/generator/generator.c | |
@@ -0,0 +1,875 @@ | |
+#include <string.h> | |
+#include "ruby.h" | |
+#include "st.h" | |
+#include "unicode.h" | |
+#include <math.h> | |
+ | |
+#define check_max_nesting(state, depth) do { \ | |
+ long current_nesting = 1 + depth; \ | |
+ if (state->max_nesting != 0 && current_nesting > state->max_nesting) \ | |
+ rb_raise(eNestingError, "nesting of %ld is too deep", current_nesting); \ | |
+} while (0); | |
+ | |
+static VALUE mJSON, mExt, mGenerator, cState, mGeneratorMethods, mObject, | |
+ mHash, mArray, mInteger, mFloat, mString, mString_Extend, | |
+ mTrueClass, mFalseClass, mNilClass, eGeneratorError, | |
+ eCircularDatastructure, eNestingError; | |
+ | |
+static ID i_to_s, i_to_json, i_new, i_indent, i_space, i_space_before, | |
+ i_object_nl, i_array_nl, i_check_circular, i_max_nesting, | |
+ i_allow_nan, i_pack, i_unpack, i_create_id, i_extend; | |
+ | |
+typedef struct JSON_Generator_StateStruct { | |
+ VALUE indent; | |
+ VALUE space; | |
+ VALUE space_before; | |
+ VALUE object_nl; | |
+ VALUE array_nl; | |
+ int check_circular; | |
+ VALUE seen; | |
+ VALUE memo; | |
+ VALUE depth; | |
+ long max_nesting; | |
+ int flag; | |
+ int allow_nan; | |
+} JSON_Generator_State; | |
+ | |
+#define GET_STATE(self) \ | |
+ JSON_Generator_State *state; \ | |
+ Data_Get_Struct(self, JSON_Generator_State, state); | |
+ | |
+/* | |
+ * Document-module: JSON::Ext::Generator | |
+ * | |
+ * This is the JSON generator implemented as a C extension. It can be | |
+ * configured to be used by setting | |
+ * | |
+ * JSON.generator = JSON::Ext::Generator | |
+ * | |
+ * with the method generator= in JSON. | |
+ * | |
+ */ | |
+ | |
+static int hash_to_json_state_i(VALUE key, VALUE value, VALUE Vstate) | |
+{ | |
+ VALUE json, buf, Vdepth; | |
+ GET_STATE(Vstate); | |
+ buf = state->memo; | |
+ Vdepth = state->depth; | |
+ | |
+ if (key == Qundef) return ST_CONTINUE; | |
+ if (state->flag) { | |
+ state->flag = 0; | |
+ rb_str_buf_cat2(buf, ","); | |
+ if (RSTRING_LEN(state->object_nl)) rb_str_buf_append(buf, state->object_nl); | |
+ } | |
+ if (RSTRING_LEN(state->object_nl)) { | |
+ rb_str_buf_append(buf, rb_str_times(state->indent, Vdepth)); | |
+ } | |
+ json = rb_funcall(rb_funcall(key, i_to_s, 0), i_to_json, 2, Vstate, Vdepth); | |
+ Check_Type(json, T_STRING); | |
+ rb_str_buf_append(buf, json); | |
+ OBJ_INFECT(buf, json); | |
+ if (RSTRING_LEN(state->space_before)) { | |
+ rb_str_buf_append(buf, state->space_before); | |
+ } | |
+ rb_str_buf_cat2(buf, ":"); | |
+ if (RSTRING_LEN(state->space)) rb_str_buf_append(buf, state->space); | |
+ json = rb_funcall(value, i_to_json, 2, Vstate, Vdepth); | |
+ Check_Type(json, T_STRING); | |
+ state->flag = 1; | |
+ rb_str_buf_append(buf, json); | |
+ OBJ_INFECT(buf, json); | |
+ state->depth = Vdepth; | |
+ state->memo = buf; | |
+ return ST_CONTINUE; | |
+} | |
+ | |
+inline static VALUE mHash_json_transfrom(VALUE self, VALUE Vstate, VALUE Vdepth) { | |
+ long depth, len = RHASH(self)->tbl->num_entries; | |
+ VALUE result; | |
+ GET_STATE(Vstate); | |
+ | |
+ depth = 1 + FIX2LONG(Vdepth); | |
+ result = rb_str_buf_new(len); | |
+ state->memo = result; | |
+ state->depth = LONG2FIX(depth); | |
+ state->flag = 0; | |
+ rb_str_buf_cat2(result, "{"); | |
+ if (RSTRING_LEN(state->object_nl)) rb_str_buf_append(result, state->object_nl); | |
+ rb_hash_foreach(self, hash_to_json_state_i, Vstate); | |
+ if (RSTRING_LEN(state->object_nl)) rb_str_buf_append(result, state->object_nl); | |
+ if (RSTRING_LEN(state->object_nl)) { | |
+ rb_str_buf_append(result, rb_str_times(state->indent, Vdepth)); | |
+ } | |
+ rb_str_buf_cat2(result, "}"); | |
+ return result; | |
+} | |
+ | |
+static int hash_to_json_i(VALUE key, VALUE value, VALUE buf) | |
+{ | |
+ VALUE tmp; | |
+ | |
+ if (key == Qundef) return ST_CONTINUE; | |
+ if (RSTRING_LEN(buf) > 1) rb_str_buf_cat2(buf, ","); | |
+ tmp = rb_funcall(rb_funcall(key, i_to_s, 0), i_to_json, 0); | |
+ Check_Type(tmp, T_STRING); | |
+ rb_str_buf_append(buf, tmp); | |
+ OBJ_INFECT(buf, tmp); | |
+ rb_str_buf_cat2(buf, ":"); | |
+ tmp = rb_funcall(value, i_to_json, 0); | |
+ Check_Type(tmp, T_STRING); | |
+ rb_str_buf_append(buf, tmp); | |
+ OBJ_INFECT(buf, tmp); | |
+ | |
+ return ST_CONTINUE; | |
+} | |
+ | |
+/* | |
+ * call-seq: to_json(state = nil, depth = 0) | |
+ * | |
+ * Returns a JSON string containing a JSON object, that is unparsed from | |
+ * this Hash instance. | |
+ * _state_ is a JSON::State object, that can also be used to configure the | |
+ * produced JSON string output further. | |
+ * _depth_ is used to find out nesting depth, to indent accordingly. | |
+ */ | |
+static VALUE mHash_to_json(int argc, VALUE *argv, VALUE self) | |
+{ | |
+ VALUE Vstate, Vdepth, result; | |
+ long depth; | |
+ | |
+ rb_scan_args(argc, argv, "02", &Vstate, &Vdepth); | |
+ depth = NIL_P(Vdepth) ? 0 : FIX2LONG(Vdepth); | |
+ if (NIL_P(Vstate)) { | |
+ long len = RHASH(self)->tbl->num_entries; | |
+ result = rb_str_buf_new(len); | |
+ rb_str_buf_cat2(result, "{"); | |
+ rb_hash_foreach(self, hash_to_json_i, result); | |
+ rb_str_buf_cat2(result, "}"); | |
+ } else { | |
+ GET_STATE(Vstate); | |
+ check_max_nesting(state, depth); | |
+ if (state->check_circular) { | |
+ VALUE self_id = rb_obj_id(self); | |
+ if (RTEST(rb_hash_aref(state->seen, self_id))) { | |
+ rb_raise(eCircularDatastructure, | |
+ "circular data structures not supported!"); | |
+ } | |
+ rb_hash_aset(state->seen, self_id, Qtrue); | |
+ result = mHash_json_transfrom(self, Vstate, LONG2FIX(depth)); | |
+ rb_hash_delete(state->seen, self_id); | |
+ } else { | |
+ result = mHash_json_transfrom(self, Vstate, LONG2FIX(depth)); | |
+ } | |
+ } | |
+ OBJ_INFECT(result, self); | |
+ return result; | |
+} | |
+ | |
+inline static VALUE mArray_json_transfrom(VALUE self, VALUE Vstate, VALUE Vdepth) { | |
+ long i, len = RARRAY_LEN(self); | |
+ VALUE shift, result; | |
+ long depth = NIL_P(Vdepth) ? 0 : FIX2LONG(Vdepth); | |
+ VALUE delim = rb_str_new2(","); | |
+ GET_STATE(Vstate); | |
+ | |
+ check_max_nesting(state, depth); | |
+ if (state->check_circular) { | |
+ VALUE self_id = rb_obj_id(self); | |
+ rb_hash_aset(state->seen, self_id, Qtrue); | |
+ result = rb_str_buf_new(len); | |
+ if (RSTRING_LEN(state->array_nl)) rb_str_append(delim, state->array_nl); | |
+ shift = rb_str_times(state->indent, LONG2FIX(depth + 1)); | |
+ | |
+ rb_str_buf_cat2(result, "["); | |
+ OBJ_INFECT(result, self); | |
+ rb_str_buf_append(result, state->array_nl); | |
+ for (i = 0; i < len; i++) { | |
+ VALUE element = RARRAY_PTR(self)[i]; | |
+ if (RTEST(rb_hash_aref(state->seen, rb_obj_id(element)))) { | |
+ rb_raise(eCircularDatastructure, | |
+ "circular data structures not supported!"); | |
+ } | |
+ OBJ_INFECT(result, element); | |
+ if (i > 0) rb_str_buf_append(result, delim); | |
+ rb_str_buf_append(result, shift); | |
+ element = rb_funcall(element, i_to_json, 2, Vstate, LONG2FIX(depth + 1)); | |
+ Check_Type(element, T_STRING); | |
+ rb_str_buf_append(result, element); | |
+ } | |
+ if (RSTRING_LEN(state->array_nl)) { | |
+ rb_str_buf_append(result, state->array_nl); | |
+ rb_str_buf_append(result, rb_str_times(state->indent, LONG2FIX(depth))); | |
+ } | |
+ rb_str_buf_cat2(result, "]"); | |
+ rb_hash_delete(state->seen, self_id); | |
+ } else { | |
+ result = rb_str_buf_new(len); | |
+ OBJ_INFECT(result, self); | |
+ if (RSTRING_LEN(state->array_nl)) rb_str_append(delim, state->array_nl); | |
+ shift = rb_str_times(state->indent, LONG2FIX(depth + 1)); | |
+ | |
+ rb_str_buf_cat2(result, "["); | |
+ rb_str_buf_append(result, state->array_nl); | |
+ for (i = 0; i < len; i++) { | |
+ VALUE element = RARRAY_PTR(self)[i]; | |
+ OBJ_INFECT(result, element); | |
+ if (i > 0) rb_str_buf_append(result, delim); | |
+ rb_str_buf_append(result, shift); | |
+ element = rb_funcall(element, i_to_json, 2, Vstate, LONG2FIX(depth + 1)); | |
+ Check_Type(element, T_STRING); | |
+ rb_str_buf_append(result, element); | |
+ } | |
+ rb_str_buf_append(result, state->array_nl); | |
+ if (RSTRING_LEN(state->array_nl)) { | |
+ rb_str_buf_append(result, rb_str_times(state->indent, LONG2FIX(depth))); | |
+ } | |
+ rb_str_buf_cat2(result, "]"); | |
+ } | |
+ return result; | |
+} | |
+ | |
+/* | |
+ * call-seq: to_json(state = nil, depth = 0) | |
+ * | |
+ * Returns a JSON string containing a JSON array, that is unparsed from | |
+ * this Array instance. | |
+ * _state_ is a JSON::State object, that can also be used to configure the | |
+ * produced JSON string output further. | |
+ * _depth_ is used to find out nesting depth, to indent accordingly. | |
+ */ | |
+static VALUE mArray_to_json(int argc, VALUE *argv, VALUE self) { | |
+ VALUE Vstate, Vdepth, result; | |
+ | |
+ rb_scan_args(argc, argv, "02", &Vstate, &Vdepth); | |
+ if (NIL_P(Vstate)) { | |
+ long i, len = RARRAY_LEN(self); | |
+ result = rb_str_buf_new(2 + 2 * len); | |
+ rb_str_buf_cat2(result, "["); | |
+ OBJ_INFECT(result, self); | |
+ for (i = 0; i < len; i++) { | |
+ VALUE element = RARRAY_PTR(self)[i]; | |
+ OBJ_INFECT(result, element); | |
+ if (i > 0) rb_str_buf_cat2(result, ","); | |
+ element = rb_funcall(element, i_to_json, 0); | |
+ Check_Type(element, T_STRING); | |
+ rb_str_buf_append(result, element); | |
+ } | |
+ rb_str_buf_cat2(result, "]"); | |
+ } else { | |
+ result = mArray_json_transfrom(self, Vstate, Vdepth); | |
+ } | |
+ OBJ_INFECT(result, self); | |
+ return result; | |
+} | |
+ | |
+/* | |
+ * call-seq: to_json(*) | |
+ * | |
+ * Returns a JSON string representation for this Integer number. | |
+ */ | |
+static VALUE mInteger_to_json(int argc, VALUE *argv, VALUE self) | |
+{ | |
+ return rb_funcall(self, i_to_s, 0); | |
+} | |
+ | |
+/* | |
+ * call-seq: to_json(*) | |
+ * | |
+ * Returns a JSON string representation for this Float number. | |
+ */ | |
+static VALUE mFloat_to_json(int argc, VALUE *argv, VALUE self) | |
+{ | |
+ JSON_Generator_State *state = NULL; | |
+ VALUE Vstate, rest, tmp; | |
+ double value = RFLOAT(self)->value; | |
+ rb_scan_args(argc, argv, "01*", &Vstate, &rest); | |
+ if (!NIL_P(Vstate)) Data_Get_Struct(Vstate, JSON_Generator_State, state); | |
+ if (isinf(value)) { | |
+ if (!state || state->allow_nan) { | |
+ return rb_funcall(self, i_to_s, 0); | |
+ } else { | |
+ tmp = rb_funcall(self, i_to_s, 0); | |
+ rb_raise(eGeneratorError, "%u: %s not allowed in JSON", __LINE__, StringValueCStr(tmp)); | |
+ } | |
+ } else if (isnan(value)) { | |
+ if (!state || state->allow_nan) { | |
+ return rb_funcall(self, i_to_s, 0); | |
+ } else { | |
+ tmp = rb_funcall(self, i_to_s, 0); | |
+ rb_raise(eGeneratorError, "%u: %s not allowed in JSON", __LINE__, StringValueCStr(tmp)); | |
+ } | |
+ } else { | |
+ return rb_funcall(self, i_to_s, 0); | |
+ } | |
+} | |
+ | |
+/* | |
+ * call-seq: String.included(modul) | |
+ * | |
+ * Extends _modul_ with the String::Extend module. | |
+ */ | |
+static VALUE mString_included_s(VALUE self, VALUE modul) { | |
+ return rb_funcall(modul, i_extend, 1, mString_Extend); | |
+} | |
+ | |
+/* | |
+ * call-seq: to_json(*) | |
+ * | |
+ * This string should be encoded with UTF-8 A call to this method | |
+ * returns a JSON string encoded with UTF16 big endian characters as | |
+ * \u????. | |
+ */ | |
+static VALUE mString_to_json(int argc, VALUE *argv, VALUE self) | |
+{ | |
+ VALUE result = rb_str_buf_new(RSTRING_LEN(self)); | |
+ rb_str_buf_cat2(result, "\""); | |
+ JSON_convert_UTF8_to_JSON(result, self, strictConversion); | |
+ rb_str_buf_cat2(result, "\""); | |
+ return result; | |
+} | |
+ | |
+/* | |
+ * call-seq: to_json_raw_object() | |
+ * | |
+ * This method creates a raw object hash, that can be nested into | |
+ * other data structures and will be unparsed as a raw string. This | |
+ * method should be used, if you want to convert raw strings to JSON | |
+ * instead of UTF-8 strings, e. g. binary data. | |
+ */ | |
+static VALUE mString_to_json_raw_object(VALUE self) { | |
+ VALUE ary; | |
+ VALUE result = rb_hash_new(); | |
+ rb_hash_aset(result, rb_funcall(mJSON, i_create_id, 0), rb_class_name(rb_obj_class(self))); | |
+ ary = rb_funcall(self, i_unpack, 1, rb_str_new2("C*")); | |
+ rb_hash_aset(result, rb_str_new2("raw"), ary); | |
+ return result; | |
+} | |
+ | |
+/* | |
+ * call-seq: to_json_raw(*args) | |
+ * | |
+ * This method creates a JSON text from the result of a call to | |
+ * to_json_raw_object of this String. | |
+ */ | |
+static VALUE mString_to_json_raw(int argc, VALUE *argv, VALUE self) { | |
+ VALUE obj = mString_to_json_raw_object(self); | |
+ Check_Type(obj, T_HASH); | |
+ return mHash_to_json(argc, argv, obj); | |
+} | |
+ | |
+/* | |
+ * call-seq: json_create(o) | |
+ * | |
+ * Raw Strings are JSON Objects (the raw bytes are stored in an array for the | |
+ * key "raw"). The Ruby String can be created by this module method. | |
+ */ | |
+static VALUE mString_Extend_json_create(VALUE self, VALUE o) { | |
+ VALUE ary; | |
+ Check_Type(o, T_HASH); | |
+ ary = rb_hash_aref(o, rb_str_new2("raw")); | |
+ return rb_funcall(ary, i_pack, 1, rb_str_new2("C*")); | |
+} | |
+ | |
+/* | |
+ * call-seq: to_json(state = nil, depth = 0) | |
+ * | |
+ * Returns a JSON string for true: 'true'. | |
+ */ | |
+static VALUE mTrueClass_to_json(int argc, VALUE *argv, VALUE self) | |
+{ | |
+ return rb_str_new2("true"); | |
+} | |
+ | |
+/* | |
+ * call-seq: to_json(state = nil, depth = 0) | |
+ * | |
+ * Returns a JSON string for false: 'false'. | |
+ */ | |
+static VALUE mFalseClass_to_json(int argc, VALUE *argv, VALUE self) | |
+{ | |
+ return rb_str_new2("false"); | |
+} | |
+ | |
+/* | |
+ * call-seq: to_json(state = nil, depth = 0) | |
+ * | |
+ */ | |
+static VALUE mNilClass_to_json(int argc, VALUE *argv, VALUE self) | |
+{ | |
+ return rb_str_new2("null"); | |
+} | |
+ | |
+/* | |
+ * call-seq: to_json(*) | |
+ * | |
+ * Converts this object to a string (calling #to_s), converts | |
+ * it to a JSON string, and returns the result. This is a fallback, if no | |
+ * special method #to_json was defined for some object. | |
+ */ | |
+static VALUE mObject_to_json(int argc, VALUE *argv, VALUE self) | |
+{ | |
+ VALUE string = rb_funcall(self, i_to_s, 0); | |
+ Check_Type(string, T_STRING); | |
+ return mString_to_json(argc, argv, string); | |
+} | |
+ | |
+/* | |
+ * Document-class: JSON::Ext::Generator::State | |
+ * | |
+ * This class is used to create State instances, that are use to hold data | |
+ * while generating a JSON text from a a Ruby data structure. | |
+ */ | |
+ | |
+static void State_mark(JSON_Generator_State *state) | |
+{ | |
+ rb_gc_mark_maybe(state->indent); | |
+ rb_gc_mark_maybe(state->space); | |
+ rb_gc_mark_maybe(state->space_before); | |
+ rb_gc_mark_maybe(state->object_nl); | |
+ rb_gc_mark_maybe(state->array_nl); | |
+ rb_gc_mark_maybe(state->seen); | |
+ rb_gc_mark_maybe(state->memo); | |
+ rb_gc_mark_maybe(state->depth); | |
+} | |
+ | |
+static JSON_Generator_State *State_allocate() | |
+{ | |
+ JSON_Generator_State *state = ALLOC(JSON_Generator_State); | |
+ return state; | |
+} | |
+ | |
+static VALUE cState_s_allocate(VALUE klass) | |
+{ | |
+ JSON_Generator_State *state = State_allocate(); | |
+ return Data_Wrap_Struct(klass, State_mark, -1, state); | |
+} | |
+ | |
+/* | |
+ * call-seq: configure(opts) | |
+ * | |
+ * Configure this State instance with the Hash _opts_, and return | |
+ * itself. | |
+ */ | |
+static inline VALUE cState_configure(VALUE self, VALUE opts) | |
+{ | |
+ VALUE tmp; | |
+ GET_STATE(self); | |
+ tmp = rb_convert_type(opts, T_HASH, "Hash", "to_hash"); | |
+ if (NIL_P(tmp)) tmp = rb_convert_type(opts, T_HASH, "Hash", "to_h"); | |
+ if (NIL_P(tmp)) { | |
+ rb_raise(rb_eArgError, "opts has to be hash like or convertable into a hash"); | |
+ } | |
+ opts = tmp; | |
+ tmp = rb_hash_aref(opts, ID2SYM(i_indent)); | |
+ if (RTEST(tmp)) { | |
+ Check_Type(tmp, T_STRING); | |
+ state->indent = tmp; | |
+ } | |
+ tmp = rb_hash_aref(opts, ID2SYM(i_space)); | |
+ if (RTEST(tmp)) { | |
+ Check_Type(tmp, T_STRING); | |
+ state->space = tmp; | |
+ } | |
+ tmp = rb_hash_aref(opts, ID2SYM(i_space_before)); | |
+ if (RTEST(tmp)) { | |
+ Check_Type(tmp, T_STRING); | |
+ state->space_before = tmp; | |
+ } | |
+ tmp = rb_hash_aref(opts, ID2SYM(i_array_nl)); | |
+ if (RTEST(tmp)) { | |
+ Check_Type(tmp, T_STRING); | |
+ state->array_nl = tmp; | |
+ } | |
+ tmp = rb_hash_aref(opts, ID2SYM(i_object_nl)); | |
+ if (RTEST(tmp)) { | |
+ Check_Type(tmp, T_STRING); | |
+ state->object_nl = tmp; | |
+ } | |
+ tmp = ID2SYM(i_check_circular); | |
+ if (st_lookup(RHASH(opts)->tbl, tmp, 0)) { | |
+ tmp = rb_hash_aref(opts, ID2SYM(i_check_circular)); | |
+ state->check_circular = RTEST(tmp); | |
+ } else { | |
+ state->check_circular = 1; | |
+ } | |
+ tmp = ID2SYM(i_max_nesting); | |
+ state->max_nesting = 19; | |
+ if (st_lookup(RHASH(opts)->tbl, tmp, 0)) { | |
+ VALUE max_nesting = rb_hash_aref(opts, tmp); | |
+ if (RTEST(max_nesting)) { | |
+ Check_Type(max_nesting, T_FIXNUM); | |
+ state->max_nesting = FIX2LONG(max_nesting); | |
+ } else { | |
+ state->max_nesting = 0; | |
+ } | |
+ } | |
+ tmp = rb_hash_aref(opts, ID2SYM(i_allow_nan)); | |
+ state->allow_nan = RTEST(tmp); | |
+ return self; | |
+} | |
+ | |
+/* | |
+ * call-seq: to_h | |
+ * | |
+ * Returns the configuration instance variables as a hash, that can be | |
+ * passed to the configure method. | |
+ */ | |
+static VALUE cState_to_h(VALUE self) | |
+{ | |
+ VALUE result = rb_hash_new(); | |
+ GET_STATE(self); | |
+ rb_hash_aset(result, ID2SYM(i_indent), state->indent); | |
+ rb_hash_aset(result, ID2SYM(i_space), state->space); | |
+ rb_hash_aset(result, ID2SYM(i_space_before), state->space_before); | |
+ rb_hash_aset(result, ID2SYM(i_object_nl), state->object_nl); | |
+ rb_hash_aset(result, ID2SYM(i_array_nl), state->array_nl); | |
+ rb_hash_aset(result, ID2SYM(i_check_circular), state->check_circular ? Qtrue : Qfalse); | |
+ rb_hash_aset(result, ID2SYM(i_allow_nan), state->allow_nan ? Qtrue : Qfalse); | |
+ rb_hash_aset(result, ID2SYM(i_max_nesting), LONG2FIX(state->max_nesting)); | |
+ return result; | |
+} | |
+ | |
+ | |
+/* | |
+ * call-seq: new(opts = {}) | |
+ * | |
+ * Instantiates a new State object, configured by _opts_. | |
+ * | |
+ * _opts_ can have the following keys: | |
+ * | |
+ * * *indent*: a string used to indent levels (default: ''), | |
+ * * *space*: a string that is put after, a : or , delimiter (default: ''), | |
+ * * *space_before*: a string that is put before a : pair delimiter (default: ''), | |
+ * * *object_nl*: a string that is put at the end of a JSON object (default: ''), | |
+ * * *array_nl*: a string that is put at the end of a JSON array (default: ''), | |
+ * * *check_circular*: true if checking for circular data structures | |
+ * should be done, false (the default) otherwise. | |
+ * * *allow_nan*: true if NaN, Infinity, and -Infinity should be | |
+ * generated, otherwise an exception is thrown, if these values are | |
+ * encountered. This options defaults to false. | |
+ */ | |
+static VALUE cState_initialize(int argc, VALUE *argv, VALUE self) | |
+{ | |
+ VALUE opts; | |
+ GET_STATE(self); | |
+ | |
+ rb_scan_args(argc, argv, "01", &opts); | |
+ state->indent = rb_str_new2(""); | |
+ state->space = rb_str_new2(""); | |
+ state->space_before = rb_str_new2(""); | |
+ state->array_nl = rb_str_new2(""); | |
+ state->object_nl = rb_str_new2(""); | |
+ if (NIL_P(opts)) { | |
+ state->check_circular = 1; | |
+ state->allow_nan = 0; | |
+ state->max_nesting = 19; | |
+ } else { | |
+ cState_configure(self, opts); | |
+ } | |
+ state->seen = rb_hash_new(); | |
+ state->memo = Qnil; | |
+ state->depth = INT2FIX(0); | |
+ return self; | |
+} | |
+ | |
+/* | |
+ * call-seq: from_state(opts) | |
+ * | |
+ * Creates a State object from _opts_, which ought to be Hash to create a | |
+ * new State instance configured by _opts_, something else to create an | |
+ * unconfigured instance. If _opts_ is a State object, it is just returned. | |
+ */ | |
+static VALUE cState_from_state_s(VALUE self, VALUE opts) | |
+{ | |
+ if (rb_obj_is_kind_of(opts, self)) { | |
+ return opts; | |
+ } else if (rb_obj_is_kind_of(opts, rb_cHash)) { | |
+ return rb_funcall(self, i_new, 1, opts); | |
+ } else { | |
+ return rb_funcall(self, i_new, 0); | |
+ } | |
+} | |
+ | |
+/* | |
+ * call-seq: indent() | |
+ * | |
+ * This string is used to indent levels in the JSON text. | |
+ */ | |
+static VALUE cState_indent(VALUE self) | |
+{ | |
+ GET_STATE(self); | |
+ return state->indent; | |
+} | |
+ | |
+/* | |
+ * call-seq: indent=(indent) | |
+ * | |
+ * This string is used to indent levels in the JSON text. | |
+ */ | |
+static VALUE cState_indent_set(VALUE self, VALUE indent) | |
+{ | |
+ GET_STATE(self); | |
+ Check_Type(indent, T_STRING); | |
+ return state->indent = indent; | |
+} | |
+ | |
+/* | |
+ * call-seq: space() | |
+ * | |
+ * This string is used to insert a space between the tokens in a JSON | |
+ * string. | |
+ */ | |
+static VALUE cState_space(VALUE self) | |
+{ | |
+ GET_STATE(self); | |
+ return state->space; | |
+} | |
+ | |
+/* | |
+ * call-seq: space=(space) | |
+ * | |
+ * This string is used to insert a space between the tokens in a JSON | |
+ * string. | |
+ */ | |
+static VALUE cState_space_set(VALUE self, VALUE space) | |
+{ | |
+ GET_STATE(self); | |
+ Check_Type(space, T_STRING); | |
+ return state->space = space; | |
+} | |
+ | |
+/* | |
+ * call-seq: space_before() | |
+ * | |
+ * This string is used to insert a space before the ':' in JSON objects. | |
+ */ | |
+static VALUE cState_space_before(VALUE self) | |
+{ | |
+ GET_STATE(self); | |
+ return state->space_before; | |
+} | |
+ | |
+/* | |
+ * call-seq: space_before=(space_before) | |
+ * | |
+ * This string is used to insert a space before the ':' in JSON objects. | |
+ */ | |
+static VALUE cState_space_before_set(VALUE self, VALUE space_before) | |
+{ | |
+ GET_STATE(self); | |
+ Check_Type(space_before, T_STRING); | |
+ return state->space_before = space_before; | |
+} | |
+ | |
+/* | |
+ * call-seq: object_nl() | |
+ * | |
+ * This string is put at the end of a line that holds a JSON object (or | |
+ * Hash). | |
+ */ | |
+static VALUE cState_object_nl(VALUE self) | |
+{ | |
+ GET_STATE(self); | |
+ return state->object_nl; | |
+} | |
+ | |
+/* | |
+ * call-seq: object_nl=(object_nl) | |
+ * | |
+ * This string is put at the end of a line that holds a JSON object (or | |
+ * Hash). | |
+ */ | |
+static VALUE cState_object_nl_set(VALUE self, VALUE object_nl) | |
+{ | |
+ GET_STATE(self); | |
+ Check_Type(object_nl, T_STRING); | |
+ return state->object_nl = object_nl; | |
+} | |
+ | |
+/* | |
+ * call-seq: array_nl() | |
+ * | |
+ * This string is put at the end of a line that holds a JSON array. | |
+ */ | |
+static VALUE cState_array_nl(VALUE self) | |
+{ | |
+ GET_STATE(self); | |
+ return state->array_nl; | |
+} | |
+ | |
+/* | |
+ * call-seq: array_nl=(array_nl) | |
+ * | |
+ * This string is put at the end of a line that holds a JSON array. | |
+ */ | |
+static VALUE cState_array_nl_set(VALUE self, VALUE array_nl) | |
+{ | |
+ GET_STATE(self); | |
+ Check_Type(array_nl, T_STRING); | |
+ return state->array_nl = array_nl; | |
+} | |
+ | |
+/* | |
+ * call-seq: check_circular? | |
+ * | |
+ * Returns true, if circular data structures should be checked, | |
+ * otherwise returns false. | |
+ */ | |
+static VALUE cState_check_circular_p(VALUE self) | |
+{ | |
+ GET_STATE(self); | |
+ return state->check_circular ? Qtrue : Qfalse; | |
+} | |
+ | |
+/* | |
+ * call-seq: max_nesting | |
+ * | |
+ * This integer returns the maximum level of data structure nesting in | |
+ * the generated JSON, max_nesting = 0 if no maximum is checked. | |
+ */ | |
+static VALUE cState_max_nesting(VALUE self) | |
+{ | |
+ GET_STATE(self); | |
+ return LONG2FIX(state->max_nesting); | |
+} | |
+ | |
+/* | |
+ * call-seq: max_nesting=(depth) | |
+ * | |
+ * This sets the maximum level of data structure nesting in the generated JSON | |
+ * to the integer depth, max_nesting = 0 if no maximum should be checked. | |
+ */ | |
+static VALUE cState_max_nesting_set(VALUE self, VALUE depth) | |
+{ | |
+ GET_STATE(self); | |
+ Check_Type(depth, T_FIXNUM); | |
+ state->max_nesting = FIX2LONG(depth); | |
+ return Qnil; | |
+} | |
+ | |
+/* | |
+ * call-seq: allow_nan? | |
+ * | |
+ * Returns true, if NaN, Infinity, and -Infinity should be generated, otherwise | |
+ * returns false. | |
+ */ | |
+static VALUE cState_allow_nan_p(VALUE self) | |
+{ | |
+ GET_STATE(self); | |
+ return state->allow_nan ? Qtrue : Qfalse; | |
+} | |
+ | |
+/* | |
+ * call-seq: seen?(object) | |
+ * | |
+ * Returns _true_, if _object_ was already seen during this generating run. | |
+ */ | |
+static VALUE cState_seen_p(VALUE self, VALUE object) | |
+{ | |
+ GET_STATE(self); | |
+ return rb_hash_aref(state->seen, rb_obj_id(object)); | |
+} | |
+ | |
+/* | |
+ * call-seq: remember(object) | |
+ * | |
+ * Remember _object_, to find out if it was already encountered (if a cyclic | |
+ * data structure is rendered). | |
+ */ | |
+static VALUE cState_remember(VALUE self, VALUE object) | |
+{ | |
+ GET_STATE(self); | |
+ return rb_hash_aset(state->seen, rb_obj_id(object), Qtrue); | |
+} | |
+ | |
+/* | |
+ * call-seq: forget(object) | |
+ * | |
+ * Forget _object_ for this generating run. | |
+ */ | |
+static VALUE cState_forget(VALUE self, VALUE object) | |
+{ | |
+ GET_STATE(self); | |
+ return rb_hash_delete(state->seen, rb_obj_id(object)); | |
+} | |
+ | |
+/* | |
+ * | |
+ */ | |
+void Init_generator() | |
+{ | |
+ rb_require("json/common"); | |
+ mJSON = rb_define_module("JSON"); | |
+ mExt = rb_define_module_under(mJSON, "Ext"); | |
+ mGenerator = rb_define_module_under(mExt, "Generator"); | |
+ eGeneratorError = rb_path2class("JSON::GeneratorError"); | |
+ eCircularDatastructure = rb_path2class("JSON::CircularDatastructure"); | |
+ eNestingError = rb_path2class("JSON::NestingError"); | |
+ cState = rb_define_class_under(mGenerator, "State", rb_cObject); | |
+ rb_define_alloc_func(cState, cState_s_allocate); | |
+ rb_define_singleton_method(cState, "from_state", cState_from_state_s, 1); | |
+ rb_define_method(cState, "initialize", cState_initialize, -1); | |
+ | |
+ rb_define_method(cState, "indent", cState_indent, 0); | |
+ rb_define_method(cState, "indent=", cState_indent_set, 1); | |
+ rb_define_method(cState, "space", cState_space, 0); | |
+ rb_define_method(cState, "space=", cState_space_set, 1); | |
+ rb_define_method(cState, "space_before", cState_space_before, 0); | |
+ rb_define_method(cState, "space_before=", cState_space_before_set, 1); | |
+ rb_define_method(cState, "object_nl", cState_object_nl, 0); | |
+ rb_define_method(cState, "object_nl=", cState_object_nl_set, 1); | |
+ rb_define_method(cState, "array_nl", cState_array_nl, 0); | |
+ rb_define_method(cState, "array_nl=", cState_array_nl_set, 1); | |
+ rb_define_method(cState, "check_circular?", cState_check_circular_p, 0); | |
+ rb_define_method(cState, "max_nesting", cState_max_nesting, 0); | |
+ rb_define_method(cState, "max_nesting=", cState_max_nesting_set, 1); | |
+ rb_define_method(cState, "allow_nan?", cState_allow_nan_p, 0); | |
+ rb_define_method(cState, "seen?", cState_seen_p, 1); | |
+ rb_define_method(cState, "remember", cState_remember, 1); | |
+ rb_define_method(cState, "forget", cState_forget, 1); | |
+ rb_define_method(cState, "configure", cState_configure, 1); | |
+ rb_define_method(cState, "to_h", cState_to_h, 0); | |
+ | |
+ mGeneratorMethods = rb_define_module_under(mGenerator, "GeneratorMethods"); | |
+ mObject = rb_define_module_under(mGeneratorMethods, "Object"); | |
+ rb_define_method(mObject, "to_json", mObject_to_json, -1); | |
+ mHash = rb_define_module_under(mGeneratorMethods, "Hash"); | |
+ rb_define_method(mHash, "to_json", mHash_to_json, -1); | |
+ mArray = rb_define_module_under(mGeneratorMethods, "Array"); | |
+ rb_define_method(mArray, "to_json", mArray_to_json, -1); | |
+ mInteger = rb_define_module_under(mGeneratorMethods, "Integer"); | |
+ rb_define_method(mInteger, "to_json", mInteger_to_json, -1); | |
+ mFloat = rb_define_module_under(mGeneratorMethods, "Float"); | |
+ rb_define_method(mFloat, "to_json", mFloat_to_json, -1); | |
+ mString = rb_define_module_under(mGeneratorMethods, "String"); | |
+ rb_define_singleton_method(mString, "included", mString_included_s, 1); | |
+ rb_define_method(mString, "to_json", mString_to_json, -1); | |
+ rb_define_method(mString, "to_json_raw", mString_to_json_raw, -1); | |
+ rb_define_method(mString, "to_json_raw_object", mString_to_json_raw_object, 0); | |
+ mString_Extend = rb_define_module_under(mString, "Extend"); | |
+ rb_define_method(mString_Extend, "json_create", mString_Extend_json_create, 1); | |
+ mTrueClass = rb_define_module_under(mGeneratorMethods, "TrueClass"); | |
+ rb_define_method(mTrueClass, "to_json", mTrueClass_to_json, -1); | |
+ mFalseClass = rb_define_module_under(mGeneratorMethods, "FalseClass"); | |
+ rb_define_method(mFalseClass, "to_json", mFalseClass_to_json, -1); | |
+ mNilClass = rb_define_module_under(mGeneratorMethods, "NilClass"); | |
+ rb_define_method(mNilClass, "to_json", mNilClass_to_json, -1); | |
+ | |
+ i_to_s = rb_intern("to_s"); | |
+ i_to_json = rb_intern("to_json"); | |
+ i_new = rb_intern("new"); | |
+ i_indent = rb_intern("indent"); | |
+ i_space = rb_intern("space"); | |
+ i_space_before = rb_intern("space_before"); | |
+ i_object_nl = rb_intern("object_nl"); | |
+ i_array_nl = rb_intern("array_nl"); | |
+ i_check_circular = rb_intern("check_circular"); | |
+ i_max_nesting = rb_intern("max_nesting"); | |
+ i_allow_nan = rb_intern("allow_nan"); | |
+ i_pack = rb_intern("pack"); | |
+ i_unpack = rb_intern("unpack"); | |
+ i_create_id = rb_intern("create_id"); | |
+ i_extend = rb_intern("extend"); | |
+} | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/generator/unicode.c b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/generator/unicode.c | |
new file mode 100644 | |
index 0000000..76834ea | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/generator/unicode.c | |
@@ -0,0 +1,182 @@ | |
+#include "unicode.h" | |
+ | |
+#define unicode_escape(buffer, character) \ | |
+ snprintf(buf, 7, "\\u%04x", (unsigned int) (character)); \ | |
+ rb_str_buf_cat(buffer, buf, 6); | |
+ | |
+/* | |
+ * Copyright 2001-2004 Unicode, Inc. | |
+ * | |
+ * Disclaimer | |
+ * | |
+ * This source code is provided as is by Unicode, Inc. No claims are | |
+ * made as to fitness for any particular purpose. No warranties of any | |
+ * kind are expressed or implied. The recipient agrees to determine | |
+ * applicability of information provided. If this file has been | |
+ * purchased on magnetic or optical media from Unicode, Inc., the | |
+ * sole remedy for any claim will be exchange of defective media | |
+ * within 90 days of receipt. | |
+ * | |
+ * Limitations on Rights to Redistribute This Code | |
+ * | |
+ * Unicode, Inc. hereby grants the right to freely use the information | |
+ * supplied in this file in the creation of products supporting the | |
+ * Unicode Standard, and to make copies of this file in any form | |
+ * for internal or external distribution as long as this notice | |
+ * remains attached. | |
+ */ | |
+ | |
+/* | |
+ * Index into the table below with the first byte of a UTF-8 sequence to | |
+ * get the number of trailing bytes that are supposed to follow it. | |
+ * Note that *legal* UTF-8 values can't have 4 or 5-bytes. The table is | |
+ * left as-is for anyone who may want to do such conversion, which was | |
+ * allowed in earlier algorithms. | |
+ */ | |
+static const char trailingBytesForUTF8[256] = { | |
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | |
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | |
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | |
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | |
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | |
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | |
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, | |
+ 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 3,3,3,3,3,3,3,3,4,4,4,4,5,5,5,5 | |
+}; | |
+ | |
+/* | |
+ * Magic values subtracted from a buffer value during UTF8 conversion. | |
+ * This table contains as many values as there might be trailing bytes | |
+ * in a UTF-8 sequence. | |
+ */ | |
+static const UTF32 offsetsFromUTF8[6] = { 0x00000000UL, 0x00003080UL, 0x000E2080UL, | |
+ 0x03C82080UL, 0xFA082080UL, 0x82082080UL }; | |
+ | |
+/* | |
+ * Once the bits are split out into bytes of UTF-8, this is a mask OR-ed | |
+ * into the first byte, depending on how many bytes follow. There are | |
+ * as many entries in this table as there are UTF-8 sequence types. | |
+ * (I.e., one byte sequence, two byte... etc.). Remember that sequencs | |
+ * for *legal* UTF-8 will be 4 or fewer bytes total. | |
+ */ | |
+static const UTF8 firstByteMark[7] = { 0x00, 0x00, 0xC0, 0xE0, 0xF0, 0xF8, 0xFC }; | |
+ | |
+/* | |
+ * Utility routine to tell whether a sequence of bytes is legal UTF-8. | |
+ * This must be called with the length pre-determined by the first byte. | |
+ * If not calling this from ConvertUTF8to*, then the length can be set by: | |
+ * length = trailingBytesForUTF8[*source]+1; | |
+ * and the sequence is illegal right away if there aren't that many bytes | |
+ * available. | |
+ * If presented with a length > 4, this returns 0. The Unicode | |
+ * definition of UTF-8 goes up to 4-byte sequences. | |
+ */ | |
+ | |
+inline static unsigned char isLegalUTF8(const UTF8 *source, int length) | |
+{ | |
+ UTF8 a; | |
+ const UTF8 *srcptr = source+length; | |
+ switch (length) { | |
+ default: return 0; | |
+ /* Everything else falls through when "1"... */ | |
+ case 4: if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return 0; | |
+ case 3: if ((a = (*--srcptr)) < 0x80 || a > 0xBF) return 0; | |
+ case 2: if ((a = (*--srcptr)) > 0xBF) return 0; | |
+ | |
+ switch (*source) { | |
+ /* no fall-through in this inner switch */ | |
+ case 0xE0: if (a < 0xA0) return 0; break; | |
+ case 0xED: if (a > 0x9F) return 0; break; | |
+ case 0xF0: if (a < 0x90) return 0; break; | |
+ case 0xF4: if (a > 0x8F) return 0; break; | |
+ default: if (a < 0x80) return 0; | |
+ } | |
+ | |
+ case 1: if (*source >= 0x80 && *source < 0xC2) return 0; | |
+ } | |
+ if (*source > 0xF4) return 0; | |
+ return 1; | |
+} | |
+ | |
+void JSON_convert_UTF8_to_JSON(VALUE buffer, VALUE string, ConversionFlags flags) | |
+{ | |
+ char buf[7]; | |
+ const UTF8* source = (UTF8 *) RSTRING_PTR(string); | |
+ const UTF8* sourceEnd = source + RSTRING_LEN(string); | |
+ | |
+ while (source < sourceEnd) { | |
+ UTF32 ch = 0; | |
+ unsigned short extraBytesToRead = trailingBytesForUTF8[*source]; | |
+ if (source + extraBytesToRead >= sourceEnd) { | |
+ rb_raise(rb_path2class("JSON::GeneratorError"), | |
+ "partial character in source, but hit end"); | |
+ } | |
+ if (!isLegalUTF8(source, extraBytesToRead+1)) { | |
+ rb_raise(rb_path2class("JSON::GeneratorError"), | |
+ "source sequence is illegal/malformed"); | |
+ } | |
+ /* | |
+ * The cases all fall through. See "Note A" below. | |
+ */ | |
+ switch (extraBytesToRead) { | |
+ case 5: ch += *source++; ch <<= 6; /* remember, illegal UTF-8 */ | |
+ case 4: ch += *source++; ch <<= 6; /* remember, illegal UTF-8 */ | |
+ case 3: ch += *source++; ch <<= 6; | |
+ case 2: ch += *source++; ch <<= 6; | |
+ case 1: ch += *source++; ch <<= 6; | |
+ case 0: ch += *source++; | |
+ } | |
+ ch -= offsetsFromUTF8[extraBytesToRead]; | |
+ | |
+ if (ch <= UNI_MAX_BMP) { /* Target is a character <= 0xFFFF */ | |
+ /* UTF-16 surrogate values are illegal in UTF-32 */ | |
+ if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_LOW_END) { | |
+ if (flags == strictConversion) { | |
+ source -= (extraBytesToRead+1); /* return to the illegal value itself */ | |
+ rb_raise(rb_path2class("JSON::GeneratorError"), | |
+ "source sequence is illegal/malformed"); | |
+ } else { | |
+ unicode_escape(buffer, UNI_REPLACEMENT_CHAR); | |
+ } | |
+ } else { | |
+ /* normal case */ | |
+ if (ch == '"') { | |
+ rb_str_buf_cat2(buffer, "\\\""); | |
+ } else if (ch == '\\') { | |
+ rb_str_buf_cat2(buffer, "\\\\"); | |
+ } else if (ch == '/') { | |
+ rb_str_buf_cat2(buffer, "\\/"); | |
+ } else if (ch >= 0x20 && ch <= 0x7f) { | |
+ rb_str_buf_cat(buffer, (char *) source - 1, 1); | |
+ } else if (ch == '\n') { | |
+ rb_str_buf_cat2(buffer, "\\n"); | |
+ } else if (ch == '\r') { | |
+ rb_str_buf_cat2(buffer, "\\r"); | |
+ } else if (ch == '\t') { | |
+ rb_str_buf_cat2(buffer, "\\t"); | |
+ } else if (ch == '\f') { | |
+ rb_str_buf_cat2(buffer, "\\f"); | |
+ } else if (ch == '\b') { | |
+ rb_str_buf_cat2(buffer, "\\b"); | |
+ } else if (ch < 0x20) { | |
+ unicode_escape(buffer, (UTF16) ch); | |
+ } else { | |
+ unicode_escape(buffer, (UTF16) ch); | |
+ } | |
+ } | |
+ } else if (ch > UNI_MAX_UTF16) { | |
+ if (flags == strictConversion) { | |
+ source -= (extraBytesToRead+1); /* return to the start */ | |
+ rb_raise(rb_path2class("JSON::GeneratorError"), | |
+ "source sequence is illegal/malformed"); | |
+ } else { | |
+ unicode_escape(buffer, UNI_REPLACEMENT_CHAR); | |
+ } | |
+ } else { | |
+ /* target is a character in range 0xFFFF - 0x10FFFF. */ | |
+ ch -= halfBase; | |
+ unicode_escape(buffer, (UTF16)((ch >> halfShift) + UNI_SUR_HIGH_START)); | |
+ unicode_escape(buffer, (UTF16)((ch & halfMask) + UNI_SUR_LOW_START)); | |
+ } | |
+ } | |
+} | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/generator/unicode.h b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/generator/unicode.h | |
new file mode 100644 | |
index 0000000..841474b | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/generator/unicode.h | |
@@ -0,0 +1,53 @@ | |
+#include "ruby.h" | |
+ | |
+#ifndef _GENERATOR_UNICODE_H_ | |
+#define _GENERATOR_UNICODE_H_ | |
+ | |
+typedef enum { | |
+ conversionOK = 0, /* conversion successful */ | |
+ sourceExhausted, /* partial character in source, but hit end */ | |
+ targetExhausted, /* insuff. room in target for conversion */ | |
+ sourceIllegal /* source sequence is illegal/malformed */ | |
+} ConversionResult; | |
+ | |
+typedef enum { | |
+ strictConversion = 0, | |
+ lenientConversion | |
+} ConversionFlags; | |
+ | |
+typedef unsigned long UTF32; /* at least 32 bits */ | |
+typedef unsigned short UTF16; /* at least 16 bits */ | |
+typedef unsigned char UTF8; /* typically 8 bits */ | |
+ | |
+#define UNI_REPLACEMENT_CHAR (UTF32)0x0000FFFD | |
+#define UNI_MAX_BMP (UTF32)0x0000FFFF | |
+#define UNI_MAX_UTF16 (UTF32)0x0010FFFF | |
+#define UNI_MAX_UTF32 (UTF32)0x7FFFFFFF | |
+#define UNI_MAX_LEGAL_UTF32 (UTF32)0x0010FFFF | |
+ | |
+#define UNI_SUR_HIGH_START (UTF32)0xD800 | |
+#define UNI_SUR_HIGH_END (UTF32)0xDBFF | |
+#define UNI_SUR_LOW_START (UTF32)0xDC00 | |
+#define UNI_SUR_LOW_END (UTF32)0xDFFF | |
+ | |
+static const int halfShift = 10; /* used for shifting by 10 bits */ | |
+ | |
+static const UTF32 halfBase = 0x0010000UL; | |
+static const UTF32 halfMask = 0x3FFUL; | |
+ | |
+void JSON_convert_UTF8_to_JSON(VALUE buffer, VALUE string, ConversionFlags flags); | |
+ | |
+#ifndef RARRAY_PTR | |
+#define RARRAY_PTR(ARRAY) RARRAY(ARRAY)->ptr | |
+#endif | |
+#ifndef RARRAY_LEN | |
+#define RARRAY_LEN(ARRAY) RARRAY(ARRAY)->len | |
+#endif | |
+#ifndef RSTRING_PTR | |
+#define RSTRING_PTR(string) RSTRING(string)->ptr | |
+#endif | |
+#ifndef RSTRING_LEN | |
+#define RSTRING_LEN(string) RSTRING(string)->len | |
+#endif | |
+ | |
+#endif | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/extconf.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/extconf.rb | |
new file mode 100644 | |
index 0000000..f511bf0 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/extconf.rb | |
@@ -0,0 +1,9 @@ | |
+require 'mkmf' | |
+require 'rbconfig' | |
+ | |
+if CONFIG['CC'] =~ /gcc/ | |
+ $CFLAGS += ' -Wall' | |
+ #$CFLAGS += ' -O0 -ggdb' | |
+end | |
+ | |
+create_makefile 'parser' | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/parser.c b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/parser.c | |
new file mode 100644 | |
index 0000000..b13c0e7 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/parser.c | |
@@ -0,0 +1,1758 @@ | |
+#line 1 "parser.rl" | |
+#include "ruby.h" | |
+#include "re.h" | |
+#include "st.h" | |
+#include "unicode.h" | |
+ | |
+#define EVIL 0x666 | |
+ | |
+static VALUE mJSON, mExt, cParser, eParserError, eNestingError; | |
+static VALUE CNaN, CInfinity, CMinusInfinity; | |
+ | |
+static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions, | |
+ i_chr, i_max_nesting, i_allow_nan; | |
+ | |
+#define MinusInfinity "-Infinity" | |
+ | |
+typedef struct JSON_ParserStruct { | |
+ VALUE Vsource; | |
+ char *source; | |
+ long len; | |
+ char *memo; | |
+ VALUE create_id; | |
+ int max_nesting; | |
+ int current_nesting; | |
+ int allow_nan; | |
+} JSON_Parser; | |
+ | |
+static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result); | |
+static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result); | |
+static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result); | |
+static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result); | |
+static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result); | |
+static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result); | |
+ | |
+#define GET_STRUCT \ | |
+ JSON_Parser *json; \ | |
+ Data_Get_Struct(self, JSON_Parser, json); | |
+ | |
+#line 64 "parser.rl" | |
+ | |
+ | |
+ | |
+#line 44 "parser.c" | |
+static const int JSON_object_start = 1; | |
+static const int JSON_object_first_final = 27; | |
+static const int JSON_object_error = 0; | |
+ | |
+static const int JSON_object_en_main = 1; | |
+ | |
+#line 97 "parser.rl" | |
+ | |
+ | |
+static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result) | |
+{ | |
+ int cs = EVIL; | |
+ VALUE last_name = Qnil; | |
+ | |
+ if (json->max_nesting && json->current_nesting > json->max_nesting) { | |
+ rb_raise(eNestingError, "nesting of %d is to deep", json->current_nesting); | |
+ } | |
+ | |
+ *result = rb_hash_new(); | |
+ | |
+ | |
+#line 66 "parser.c" | |
+ { | |
+ cs = JSON_object_start; | |
+ } | |
+#line 111 "parser.rl" | |
+ | |
+#line 72 "parser.c" | |
+ { | |
+ if ( p == pe ) | |
+ goto _test_eof; | |
+ switch ( cs ) | |
+ { | |
+case 1: | |
+ if ( (*p) == 123 ) | |
+ goto st2; | |
+ goto st0; | |
+st0: | |
+cs = 0; | |
+ goto _out; | |
+st2: | |
+ if ( ++p == pe ) | |
+ goto _test_eof2; | |
+case 2: | |
+ switch( (*p) ) { | |
+ case 13: goto st2; | |
+ case 32: goto st2; | |
+ case 34: goto tr2; | |
+ case 47: goto st23; | |
+ case 125: goto tr4; | |
+ } | |
+ if ( 9 <= (*p) && (*p) <= 10 ) | |
+ goto st2; | |
+ goto st0; | |
+tr2: | |
+#line 83 "parser.rl" | |
+ { | |
+ char *np = JSON_parse_string(json, p, pe, &last_name); | |
+ if (np == NULL) { p--; {p++; cs = 3; goto _out;} } else {p = (( np))-1;} | |
+ } | |
+ goto st3; | |
+st3: | |
+ if ( ++p == pe ) | |
+ goto _test_eof3; | |
+case 3: | |
+#line 110 "parser.c" | |
+ switch( (*p) ) { | |
+ case 13: goto st3; | |
+ case 32: goto st3; | |
+ case 47: goto st4; | |
+ case 58: goto st8; | |
+ } | |
+ if ( 9 <= (*p) && (*p) <= 10 ) | |
+ goto st3; | |
+ goto st0; | |
+st4: | |
+ if ( ++p == pe ) | |
+ goto _test_eof4; | |
+case 4: | |
+ switch( (*p) ) { | |
+ case 42: goto st5; | |
+ case 47: goto st7; | |
+ } | |
+ goto st0; | |
+st5: | |
+ if ( ++p == pe ) | |
+ goto _test_eof5; | |
+case 5: | |
+ if ( (*p) == 42 ) | |
+ goto st6; | |
+ goto st5; | |
+st6: | |
+ if ( ++p == pe ) | |
+ goto _test_eof6; | |
+case 6: | |
+ switch( (*p) ) { | |
+ case 42: goto st6; | |
+ case 47: goto st3; | |
+ } | |
+ goto st5; | |
+st7: | |
+ if ( ++p == pe ) | |
+ goto _test_eof7; | |
+case 7: | |
+ if ( (*p) == 10 ) | |
+ goto st3; | |
+ goto st7; | |
+st8: | |
+ if ( ++p == pe ) | |
+ goto _test_eof8; | |
+case 8: | |
+ switch( (*p) ) { | |
+ case 13: goto st8; | |
+ case 32: goto st8; | |
+ case 34: goto tr11; | |
+ case 45: goto tr11; | |
+ case 47: goto st19; | |
+ case 73: goto tr11; | |
+ case 78: goto tr11; | |
+ case 91: goto tr11; | |
+ case 102: goto tr11; | |
+ case 110: goto tr11; | |
+ case 116: goto tr11; | |
+ case 123: goto tr11; | |
+ } | |
+ if ( (*p) > 10 ) { | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto tr11; | |
+ } else if ( (*p) >= 9 ) | |
+ goto st8; | |
+ goto st0; | |
+tr11: | |
+#line 72 "parser.rl" | |
+ { | |
+ VALUE v = Qnil; | |
+ char *np = JSON_parse_value(json, p, pe, &v); | |
+ if (np == NULL) { | |
+ p--; {p++; cs = 9; goto _out;} | |
+ } else { | |
+ rb_hash_aset(*result, last_name, v); | |
+ {p = (( np))-1;} | |
+ } | |
+ } | |
+ goto st9; | |
+st9: | |
+ if ( ++p == pe ) | |
+ goto _test_eof9; | |
+case 9: | |
+#line 193 "parser.c" | |
+ switch( (*p) ) { | |
+ case 13: goto st9; | |
+ case 32: goto st9; | |
+ case 44: goto st10; | |
+ case 47: goto st15; | |
+ case 125: goto tr4; | |
+ } | |
+ if ( 9 <= (*p) && (*p) <= 10 ) | |
+ goto st9; | |
+ goto st0; | |
+st10: | |
+ if ( ++p == pe ) | |
+ goto _test_eof10; | |
+case 10: | |
+ switch( (*p) ) { | |
+ case 13: goto st10; | |
+ case 32: goto st10; | |
+ case 34: goto tr2; | |
+ case 47: goto st11; | |
+ } | |
+ if ( 9 <= (*p) && (*p) <= 10 ) | |
+ goto st10; | |
+ goto st0; | |
+st11: | |
+ if ( ++p == pe ) | |
+ goto _test_eof11; | |
+case 11: | |
+ switch( (*p) ) { | |
+ case 42: goto st12; | |
+ case 47: goto st14; | |
+ } | |
+ goto st0; | |
+st12: | |
+ if ( ++p == pe ) | |
+ goto _test_eof12; | |
+case 12: | |
+ if ( (*p) == 42 ) | |
+ goto st13; | |
+ goto st12; | |
+st13: | |
+ if ( ++p == pe ) | |
+ goto _test_eof13; | |
+case 13: | |
+ switch( (*p) ) { | |
+ case 42: goto st13; | |
+ case 47: goto st10; | |
+ } | |
+ goto st12; | |
+st14: | |
+ if ( ++p == pe ) | |
+ goto _test_eof14; | |
+case 14: | |
+ if ( (*p) == 10 ) | |
+ goto st10; | |
+ goto st14; | |
+st15: | |
+ if ( ++p == pe ) | |
+ goto _test_eof15; | |
+case 15: | |
+ switch( (*p) ) { | |
+ case 42: goto st16; | |
+ case 47: goto st18; | |
+ } | |
+ goto st0; | |
+st16: | |
+ if ( ++p == pe ) | |
+ goto _test_eof16; | |
+case 16: | |
+ if ( (*p) == 42 ) | |
+ goto st17; | |
+ goto st16; | |
+st17: | |
+ if ( ++p == pe ) | |
+ goto _test_eof17; | |
+case 17: | |
+ switch( (*p) ) { | |
+ case 42: goto st17; | |
+ case 47: goto st9; | |
+ } | |
+ goto st16; | |
+st18: | |
+ if ( ++p == pe ) | |
+ goto _test_eof18; | |
+case 18: | |
+ if ( (*p) == 10 ) | |
+ goto st9; | |
+ goto st18; | |
+tr4: | |
+#line 88 "parser.rl" | |
+ { p--; {p++; cs = 27; goto _out;} } | |
+ goto st27; | |
+st27: | |
+ if ( ++p == pe ) | |
+ goto _test_eof27; | |
+case 27: | |
+#line 289 "parser.c" | |
+ goto st0; | |
+st19: | |
+ if ( ++p == pe ) | |
+ goto _test_eof19; | |
+case 19: | |
+ switch( (*p) ) { | |
+ case 42: goto st20; | |
+ case 47: goto st22; | |
+ } | |
+ goto st0; | |
+st20: | |
+ if ( ++p == pe ) | |
+ goto _test_eof20; | |
+case 20: | |
+ if ( (*p) == 42 ) | |
+ goto st21; | |
+ goto st20; | |
+st21: | |
+ if ( ++p == pe ) | |
+ goto _test_eof21; | |
+case 21: | |
+ switch( (*p) ) { | |
+ case 42: goto st21; | |
+ case 47: goto st8; | |
+ } | |
+ goto st20; | |
+st22: | |
+ if ( ++p == pe ) | |
+ goto _test_eof22; | |
+case 22: | |
+ if ( (*p) == 10 ) | |
+ goto st8; | |
+ goto st22; | |
+st23: | |
+ if ( ++p == pe ) | |
+ goto _test_eof23; | |
+case 23: | |
+ switch( (*p) ) { | |
+ case 42: goto st24; | |
+ case 47: goto st26; | |
+ } | |
+ goto st0; | |
+st24: | |
+ if ( ++p == pe ) | |
+ goto _test_eof24; | |
+case 24: | |
+ if ( (*p) == 42 ) | |
+ goto st25; | |
+ goto st24; | |
+st25: | |
+ if ( ++p == pe ) | |
+ goto _test_eof25; | |
+case 25: | |
+ switch( (*p) ) { | |
+ case 42: goto st25; | |
+ case 47: goto st2; | |
+ } | |
+ goto st24; | |
+st26: | |
+ if ( ++p == pe ) | |
+ goto _test_eof26; | |
+case 26: | |
+ if ( (*p) == 10 ) | |
+ goto st2; | |
+ goto st26; | |
+ } | |
+ _test_eof2: cs = 2; goto _test_eof; | |
+ _test_eof3: cs = 3; goto _test_eof; | |
+ _test_eof4: cs = 4; goto _test_eof; | |
+ _test_eof5: cs = 5; goto _test_eof; | |
+ _test_eof6: cs = 6; goto _test_eof; | |
+ _test_eof7: cs = 7; goto _test_eof; | |
+ _test_eof8: cs = 8; goto _test_eof; | |
+ _test_eof9: cs = 9; goto _test_eof; | |
+ _test_eof10: cs = 10; goto _test_eof; | |
+ _test_eof11: cs = 11; goto _test_eof; | |
+ _test_eof12: cs = 12; goto _test_eof; | |
+ _test_eof13: cs = 13; goto _test_eof; | |
+ _test_eof14: cs = 14; goto _test_eof; | |
+ _test_eof15: cs = 15; goto _test_eof; | |
+ _test_eof16: cs = 16; goto _test_eof; | |
+ _test_eof17: cs = 17; goto _test_eof; | |
+ _test_eof18: cs = 18; goto _test_eof; | |
+ _test_eof27: cs = 27; goto _test_eof; | |
+ _test_eof19: cs = 19; goto _test_eof; | |
+ _test_eof20: cs = 20; goto _test_eof; | |
+ _test_eof21: cs = 21; goto _test_eof; | |
+ _test_eof22: cs = 22; goto _test_eof; | |
+ _test_eof23: cs = 23; goto _test_eof; | |
+ _test_eof24: cs = 24; goto _test_eof; | |
+ _test_eof25: cs = 25; goto _test_eof; | |
+ _test_eof26: cs = 26; goto _test_eof; | |
+ | |
+ _test_eof: {} | |
+ _out: {} | |
+ } | |
+#line 112 "parser.rl" | |
+ | |
+ if (cs >= JSON_object_first_final) { | |
+ if (RTEST(json->create_id)) { | |
+ VALUE klassname = rb_hash_aref(*result, json->create_id); | |
+ if (!NIL_P(klassname)) { | |
+ VALUE klass = rb_path2class(StringValueCStr(klassname)); | |
+ if RTEST(rb_funcall(klass, i_json_creatable_p, 0)) { | |
+ *result = rb_funcall(klass, i_json_create, 1, *result); | |
+ } | |
+ } | |
+ } | |
+ return p + 1; | |
+ } else { | |
+ return NULL; | |
+ } | |
+} | |
+ | |
+ | |
+#line 405 "parser.c" | |
+static const int JSON_value_start = 1; | |
+static const int JSON_value_first_final = 21; | |
+static const int JSON_value_error = 0; | |
+ | |
+static const int JSON_value_en_main = 1; | |
+ | |
+#line 210 "parser.rl" | |
+ | |
+ | |
+static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result) | |
+{ | |
+ int cs = EVIL; | |
+ | |
+ | |
+#line 420 "parser.c" | |
+ { | |
+ cs = JSON_value_start; | |
+ } | |
+#line 217 "parser.rl" | |
+ | |
+#line 426 "parser.c" | |
+ { | |
+ if ( p == pe ) | |
+ goto _test_eof; | |
+ switch ( cs ) | |
+ { | |
+case 1: | |
+ switch( (*p) ) { | |
+ case 34: goto tr0; | |
+ case 45: goto tr2; | |
+ case 73: goto st2; | |
+ case 78: goto st9; | |
+ case 91: goto tr5; | |
+ case 102: goto st11; | |
+ case 110: goto st15; | |
+ case 116: goto st18; | |
+ case 123: goto tr9; | |
+ } | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto tr2; | |
+ goto st0; | |
+st0: | |
+cs = 0; | |
+ goto _out; | |
+tr0: | |
+#line 158 "parser.rl" | |
+ { | |
+ char *np = JSON_parse_string(json, p, pe, result); | |
+ if (np == NULL) { p--; {p++; cs = 21; goto _out;} } else {p = (( np))-1;} | |
+ } | |
+ goto st21; | |
+tr2: | |
+#line 163 "parser.rl" | |
+ { | |
+ char *np; | |
+ if(pe > p + 9 && !strncmp(MinusInfinity, p, 9)) { | |
+ if (json->allow_nan) { | |
+ *result = CMinusInfinity; | |
+ {p = (( p + 10))-1;} | |
+ p--; {p++; cs = 21; goto _out;} | |
+ } else { | |
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p); | |
+ } | |
+ } | |
+ np = JSON_parse_float(json, p, pe, result); | |
+ if (np != NULL) {p = (( np))-1;} | |
+ np = JSON_parse_integer(json, p, pe, result); | |
+ if (np != NULL) {p = (( np))-1;} | |
+ p--; {p++; cs = 21; goto _out;} | |
+ } | |
+ goto st21; | |
+tr5: | |
+#line 181 "parser.rl" | |
+ { | |
+ char *np; | |
+ json->current_nesting++; | |
+ np = JSON_parse_array(json, p, pe, result); | |
+ json->current_nesting--; | |
+ if (np == NULL) { p--; {p++; cs = 21; goto _out;} } else {p = (( np))-1;} | |
+ } | |
+ goto st21; | |
+tr9: | |
+#line 189 "parser.rl" | |
+ { | |
+ char *np; | |
+ json->current_nesting++; | |
+ np = JSON_parse_object(json, p, pe, result); | |
+ json->current_nesting--; | |
+ if (np == NULL) { p--; {p++; cs = 21; goto _out;} } else {p = (( np))-1;} | |
+ } | |
+ goto st21; | |
+tr16: | |
+#line 151 "parser.rl" | |
+ { | |
+ if (json->allow_nan) { | |
+ *result = CInfinity; | |
+ } else { | |
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p - 8); | |
+ } | |
+ } | |
+ goto st21; | |
+tr18: | |
+#line 144 "parser.rl" | |
+ { | |
+ if (json->allow_nan) { | |
+ *result = CNaN; | |
+ } else { | |
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p - 2); | |
+ } | |
+ } | |
+ goto st21; | |
+tr22: | |
+#line 138 "parser.rl" | |
+ { | |
+ *result = Qfalse; | |
+ } | |
+ goto st21; | |
+tr25: | |
+#line 135 "parser.rl" | |
+ { | |
+ *result = Qnil; | |
+ } | |
+ goto st21; | |
+tr28: | |
+#line 141 "parser.rl" | |
+ { | |
+ *result = Qtrue; | |
+ } | |
+ goto st21; | |
+st21: | |
+ if ( ++p == pe ) | |
+ goto _test_eof21; | |
+case 21: | |
+#line 197 "parser.rl" | |
+ { p--; {p++; cs = 21; goto _out;} } | |
+#line 541 "parser.c" | |
+ goto st0; | |
+st2: | |
+ if ( ++p == pe ) | |
+ goto _test_eof2; | |
+case 2: | |
+ if ( (*p) == 110 ) | |
+ goto st3; | |
+ goto st0; | |
+st3: | |
+ if ( ++p == pe ) | |
+ goto _test_eof3; | |
+case 3: | |
+ if ( (*p) == 102 ) | |
+ goto st4; | |
+ goto st0; | |
+st4: | |
+ if ( ++p == pe ) | |
+ goto _test_eof4; | |
+case 4: | |
+ if ( (*p) == 105 ) | |
+ goto st5; | |
+ goto st0; | |
+st5: | |
+ if ( ++p == pe ) | |
+ goto _test_eof5; | |
+case 5: | |
+ if ( (*p) == 110 ) | |
+ goto st6; | |
+ goto st0; | |
+st6: | |
+ if ( ++p == pe ) | |
+ goto _test_eof6; | |
+case 6: | |
+ if ( (*p) == 105 ) | |
+ goto st7; | |
+ goto st0; | |
+st7: | |
+ if ( ++p == pe ) | |
+ goto _test_eof7; | |
+case 7: | |
+ if ( (*p) == 116 ) | |
+ goto st8; | |
+ goto st0; | |
+st8: | |
+ if ( ++p == pe ) | |
+ goto _test_eof8; | |
+case 8: | |
+ if ( (*p) == 121 ) | |
+ goto tr16; | |
+ goto st0; | |
+st9: | |
+ if ( ++p == pe ) | |
+ goto _test_eof9; | |
+case 9: | |
+ if ( (*p) == 97 ) | |
+ goto st10; | |
+ goto st0; | |
+st10: | |
+ if ( ++p == pe ) | |
+ goto _test_eof10; | |
+case 10: | |
+ if ( (*p) == 78 ) | |
+ goto tr18; | |
+ goto st0; | |
+st11: | |
+ if ( ++p == pe ) | |
+ goto _test_eof11; | |
+case 11: | |
+ if ( (*p) == 97 ) | |
+ goto st12; | |
+ goto st0; | |
+st12: | |
+ if ( ++p == pe ) | |
+ goto _test_eof12; | |
+case 12: | |
+ if ( (*p) == 108 ) | |
+ goto st13; | |
+ goto st0; | |
+st13: | |
+ if ( ++p == pe ) | |
+ goto _test_eof13; | |
+case 13: | |
+ if ( (*p) == 115 ) | |
+ goto st14; | |
+ goto st0; | |
+st14: | |
+ if ( ++p == pe ) | |
+ goto _test_eof14; | |
+case 14: | |
+ if ( (*p) == 101 ) | |
+ goto tr22; | |
+ goto st0; | |
+st15: | |
+ if ( ++p == pe ) | |
+ goto _test_eof15; | |
+case 15: | |
+ if ( (*p) == 117 ) | |
+ goto st16; | |
+ goto st0; | |
+st16: | |
+ if ( ++p == pe ) | |
+ goto _test_eof16; | |
+case 16: | |
+ if ( (*p) == 108 ) | |
+ goto st17; | |
+ goto st0; | |
+st17: | |
+ if ( ++p == pe ) | |
+ goto _test_eof17; | |
+case 17: | |
+ if ( (*p) == 108 ) | |
+ goto tr25; | |
+ goto st0; | |
+st18: | |
+ if ( ++p == pe ) | |
+ goto _test_eof18; | |
+case 18: | |
+ if ( (*p) == 114 ) | |
+ goto st19; | |
+ goto st0; | |
+st19: | |
+ if ( ++p == pe ) | |
+ goto _test_eof19; | |
+case 19: | |
+ if ( (*p) == 117 ) | |
+ goto st20; | |
+ goto st0; | |
+st20: | |
+ if ( ++p == pe ) | |
+ goto _test_eof20; | |
+case 20: | |
+ if ( (*p) == 101 ) | |
+ goto tr28; | |
+ goto st0; | |
+ } | |
+ _test_eof21: cs = 21; goto _test_eof; | |
+ _test_eof2: cs = 2; goto _test_eof; | |
+ _test_eof3: cs = 3; goto _test_eof; | |
+ _test_eof4: cs = 4; goto _test_eof; | |
+ _test_eof5: cs = 5; goto _test_eof; | |
+ _test_eof6: cs = 6; goto _test_eof; | |
+ _test_eof7: cs = 7; goto _test_eof; | |
+ _test_eof8: cs = 8; goto _test_eof; | |
+ _test_eof9: cs = 9; goto _test_eof; | |
+ _test_eof10: cs = 10; goto _test_eof; | |
+ _test_eof11: cs = 11; goto _test_eof; | |
+ _test_eof12: cs = 12; goto _test_eof; | |
+ _test_eof13: cs = 13; goto _test_eof; | |
+ _test_eof14: cs = 14; goto _test_eof; | |
+ _test_eof15: cs = 15; goto _test_eof; | |
+ _test_eof16: cs = 16; goto _test_eof; | |
+ _test_eof17: cs = 17; goto _test_eof; | |
+ _test_eof18: cs = 18; goto _test_eof; | |
+ _test_eof19: cs = 19; goto _test_eof; | |
+ _test_eof20: cs = 20; goto _test_eof; | |
+ | |
+ _test_eof: {} | |
+ _out: {} | |
+ } | |
+#line 218 "parser.rl" | |
+ | |
+ if (cs >= JSON_value_first_final) { | |
+ return p; | |
+ } else { | |
+ return NULL; | |
+ } | |
+} | |
+ | |
+ | |
+#line 711 "parser.c" | |
+static const int JSON_integer_start = 1; | |
+static const int JSON_integer_first_final = 5; | |
+static const int JSON_integer_error = 0; | |
+ | |
+static const int JSON_integer_en_main = 1; | |
+ | |
+#line 234 "parser.rl" | |
+ | |
+ | |
+static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result) | |
+{ | |
+ int cs = EVIL; | |
+ | |
+ | |
+#line 726 "parser.c" | |
+ { | |
+ cs = JSON_integer_start; | |
+ } | |
+#line 241 "parser.rl" | |
+ json->memo = p; | |
+ | |
+#line 733 "parser.c" | |
+ { | |
+ if ( p == pe ) | |
+ goto _test_eof; | |
+ switch ( cs ) | |
+ { | |
+case 1: | |
+ switch( (*p) ) { | |
+ case 45: goto st2; | |
+ case 48: goto st3; | |
+ } | |
+ if ( 49 <= (*p) && (*p) <= 57 ) | |
+ goto st4; | |
+ goto st0; | |
+st0: | |
+cs = 0; | |
+ goto _out; | |
+st2: | |
+ if ( ++p == pe ) | |
+ goto _test_eof2; | |
+case 2: | |
+ if ( (*p) == 48 ) | |
+ goto st3; | |
+ if ( 49 <= (*p) && (*p) <= 57 ) | |
+ goto st4; | |
+ goto st0; | |
+st3: | |
+ if ( ++p == pe ) | |
+ goto _test_eof3; | |
+case 3: | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto st0; | |
+ goto tr4; | |
+tr4: | |
+#line 231 "parser.rl" | |
+ { p--; {p++; cs = 5; goto _out;} } | |
+ goto st5; | |
+st5: | |
+ if ( ++p == pe ) | |
+ goto _test_eof5; | |
+case 5: | |
+#line 774 "parser.c" | |
+ goto st0; | |
+st4: | |
+ if ( ++p == pe ) | |
+ goto _test_eof4; | |
+case 4: | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto st4; | |
+ goto tr4; | |
+ } | |
+ _test_eof2: cs = 2; goto _test_eof; | |
+ _test_eof3: cs = 3; goto _test_eof; | |
+ _test_eof5: cs = 5; goto _test_eof; | |
+ _test_eof4: cs = 4; goto _test_eof; | |
+ | |
+ _test_eof: {} | |
+ _out: {} | |
+ } | |
+#line 243 "parser.rl" | |
+ | |
+ if (cs >= JSON_integer_first_final) { | |
+ long len = p - json->memo; | |
+ *result = rb_Integer(rb_str_new(json->memo, len)); | |
+ return p + 1; | |
+ } else { | |
+ return NULL; | |
+ } | |
+} | |
+ | |
+ | |
+#line 804 "parser.c" | |
+static const int JSON_float_start = 1; | |
+static const int JSON_float_first_final = 10; | |
+static const int JSON_float_error = 0; | |
+ | |
+static const int JSON_float_en_main = 1; | |
+ | |
+#line 265 "parser.rl" | |
+ | |
+ | |
+static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result) | |
+{ | |
+ int cs = EVIL; | |
+ | |
+ | |
+#line 819 "parser.c" | |
+ { | |
+ cs = JSON_float_start; | |
+ } | |
+#line 272 "parser.rl" | |
+ json->memo = p; | |
+ | |
+#line 826 "parser.c" | |
+ { | |
+ if ( p == pe ) | |
+ goto _test_eof; | |
+ switch ( cs ) | |
+ { | |
+case 1: | |
+ switch( (*p) ) { | |
+ case 45: goto st2; | |
+ case 48: goto st3; | |
+ } | |
+ if ( 49 <= (*p) && (*p) <= 57 ) | |
+ goto st9; | |
+ goto st0; | |
+st0: | |
+cs = 0; | |
+ goto _out; | |
+st2: | |
+ if ( ++p == pe ) | |
+ goto _test_eof2; | |
+case 2: | |
+ if ( (*p) == 48 ) | |
+ goto st3; | |
+ if ( 49 <= (*p) && (*p) <= 57 ) | |
+ goto st9; | |
+ goto st0; | |
+st3: | |
+ if ( ++p == pe ) | |
+ goto _test_eof3; | |
+case 3: | |
+ switch( (*p) ) { | |
+ case 46: goto st4; | |
+ case 69: goto st6; | |
+ case 101: goto st6; | |
+ } | |
+ goto st0; | |
+st4: | |
+ if ( ++p == pe ) | |
+ goto _test_eof4; | |
+case 4: | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto st5; | |
+ goto st0; | |
+st5: | |
+ if ( ++p == pe ) | |
+ goto _test_eof5; | |
+case 5: | |
+ switch( (*p) ) { | |
+ case 69: goto st6; | |
+ case 101: goto st6; | |
+ } | |
+ if ( (*p) > 46 ) { | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto st5; | |
+ } else if ( (*p) >= 45 ) | |
+ goto st0; | |
+ goto tr7; | |
+tr7: | |
+#line 259 "parser.rl" | |
+ { p--; {p++; cs = 10; goto _out;} } | |
+ goto st10; | |
+st10: | |
+ if ( ++p == pe ) | |
+ goto _test_eof10; | |
+case 10: | |
+#line 891 "parser.c" | |
+ goto st0; | |
+st6: | |
+ if ( ++p == pe ) | |
+ goto _test_eof6; | |
+case 6: | |
+ switch( (*p) ) { | |
+ case 43: goto st7; | |
+ case 45: goto st7; | |
+ } | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto st8; | |
+ goto st0; | |
+st7: | |
+ if ( ++p == pe ) | |
+ goto _test_eof7; | |
+case 7: | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto st8; | |
+ goto st0; | |
+st8: | |
+ if ( ++p == pe ) | |
+ goto _test_eof8; | |
+case 8: | |
+ switch( (*p) ) { | |
+ case 69: goto st0; | |
+ case 101: goto st0; | |
+ } | |
+ if ( (*p) > 46 ) { | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto st8; | |
+ } else if ( (*p) >= 45 ) | |
+ goto st0; | |
+ goto tr7; | |
+st9: | |
+ if ( ++p == pe ) | |
+ goto _test_eof9; | |
+case 9: | |
+ switch( (*p) ) { | |
+ case 46: goto st4; | |
+ case 69: goto st6; | |
+ case 101: goto st6; | |
+ } | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto st9; | |
+ goto st0; | |
+ } | |
+ _test_eof2: cs = 2; goto _test_eof; | |
+ _test_eof3: cs = 3; goto _test_eof; | |
+ _test_eof4: cs = 4; goto _test_eof; | |
+ _test_eof5: cs = 5; goto _test_eof; | |
+ _test_eof10: cs = 10; goto _test_eof; | |
+ _test_eof6: cs = 6; goto _test_eof; | |
+ _test_eof7: cs = 7; goto _test_eof; | |
+ _test_eof8: cs = 8; goto _test_eof; | |
+ _test_eof9: cs = 9; goto _test_eof; | |
+ | |
+ _test_eof: {} | |
+ _out: {} | |
+ } | |
+#line 274 "parser.rl" | |
+ | |
+ if (cs >= JSON_float_first_final) { | |
+ long len = p - json->memo; | |
+ *result = rb_Float(rb_str_new(json->memo, len)); | |
+ return p + 1; | |
+ } else { | |
+ return NULL; | |
+ } | |
+} | |
+ | |
+ | |
+ | |
+#line 964 "parser.c" | |
+static const int JSON_array_start = 1; | |
+static const int JSON_array_first_final = 17; | |
+static const int JSON_array_error = 0; | |
+ | |
+static const int JSON_array_en_main = 1; | |
+ | |
+#line 310 "parser.rl" | |
+ | |
+ | |
+static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result) | |
+{ | |
+ int cs = EVIL; | |
+ | |
+ if (json->max_nesting && json->current_nesting > json->max_nesting) { | |
+ rb_raise(eNestingError, "nesting of %d is to deep", json->current_nesting); | |
+ } | |
+ *result = rb_ary_new(); | |
+ | |
+ | |
+#line 984 "parser.c" | |
+ { | |
+ cs = JSON_array_start; | |
+ } | |
+#line 322 "parser.rl" | |
+ | |
+#line 990 "parser.c" | |
+ { | |
+ if ( p == pe ) | |
+ goto _test_eof; | |
+ switch ( cs ) | |
+ { | |
+case 1: | |
+ if ( (*p) == 91 ) | |
+ goto st2; | |
+ goto st0; | |
+st0: | |
+cs = 0; | |
+ goto _out; | |
+st2: | |
+ if ( ++p == pe ) | |
+ goto _test_eof2; | |
+case 2: | |
+ switch( (*p) ) { | |
+ case 13: goto st2; | |
+ case 32: goto st2; | |
+ case 34: goto tr2; | |
+ case 45: goto tr2; | |
+ case 47: goto st13; | |
+ case 73: goto tr2; | |
+ case 78: goto tr2; | |
+ case 91: goto tr2; | |
+ case 93: goto tr4; | |
+ case 102: goto tr2; | |
+ case 110: goto tr2; | |
+ case 116: goto tr2; | |
+ case 123: goto tr2; | |
+ } | |
+ if ( (*p) > 10 ) { | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto tr2; | |
+ } else if ( (*p) >= 9 ) | |
+ goto st2; | |
+ goto st0; | |
+tr2: | |
+#line 291 "parser.rl" | |
+ { | |
+ VALUE v = Qnil; | |
+ char *np = JSON_parse_value(json, p, pe, &v); | |
+ if (np == NULL) { | |
+ p--; {p++; cs = 3; goto _out;} | |
+ } else { | |
+ rb_ary_push(*result, v); | |
+ {p = (( np))-1;} | |
+ } | |
+ } | |
+ goto st3; | |
+st3: | |
+ if ( ++p == pe ) | |
+ goto _test_eof3; | |
+case 3: | |
+#line 1045 "parser.c" | |
+ switch( (*p) ) { | |
+ case 13: goto st3; | |
+ case 32: goto st3; | |
+ case 44: goto st4; | |
+ case 47: goto st9; | |
+ case 93: goto tr4; | |
+ } | |
+ if ( 9 <= (*p) && (*p) <= 10 ) | |
+ goto st3; | |
+ goto st0; | |
+st4: | |
+ if ( ++p == pe ) | |
+ goto _test_eof4; | |
+case 4: | |
+ switch( (*p) ) { | |
+ case 13: goto st4; | |
+ case 32: goto st4; | |
+ case 34: goto tr2; | |
+ case 45: goto tr2; | |
+ case 47: goto st5; | |
+ case 73: goto tr2; | |
+ case 78: goto tr2; | |
+ case 91: goto tr2; | |
+ case 102: goto tr2; | |
+ case 110: goto tr2; | |
+ case 116: goto tr2; | |
+ case 123: goto tr2; | |
+ } | |
+ if ( (*p) > 10 ) { | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto tr2; | |
+ } else if ( (*p) >= 9 ) | |
+ goto st4; | |
+ goto st0; | |
+st5: | |
+ if ( ++p == pe ) | |
+ goto _test_eof5; | |
+case 5: | |
+ switch( (*p) ) { | |
+ case 42: goto st6; | |
+ case 47: goto st8; | |
+ } | |
+ goto st0; | |
+st6: | |
+ if ( ++p == pe ) | |
+ goto _test_eof6; | |
+case 6: | |
+ if ( (*p) == 42 ) | |
+ goto st7; | |
+ goto st6; | |
+st7: | |
+ if ( ++p == pe ) | |
+ goto _test_eof7; | |
+case 7: | |
+ switch( (*p) ) { | |
+ case 42: goto st7; | |
+ case 47: goto st4; | |
+ } | |
+ goto st6; | |
+st8: | |
+ if ( ++p == pe ) | |
+ goto _test_eof8; | |
+case 8: | |
+ if ( (*p) == 10 ) | |
+ goto st4; | |
+ goto st8; | |
+st9: | |
+ if ( ++p == pe ) | |
+ goto _test_eof9; | |
+case 9: | |
+ switch( (*p) ) { | |
+ case 42: goto st10; | |
+ case 47: goto st12; | |
+ } | |
+ goto st0; | |
+st10: | |
+ if ( ++p == pe ) | |
+ goto _test_eof10; | |
+case 10: | |
+ if ( (*p) == 42 ) | |
+ goto st11; | |
+ goto st10; | |
+st11: | |
+ if ( ++p == pe ) | |
+ goto _test_eof11; | |
+case 11: | |
+ switch( (*p) ) { | |
+ case 42: goto st11; | |
+ case 47: goto st3; | |
+ } | |
+ goto st10; | |
+st12: | |
+ if ( ++p == pe ) | |
+ goto _test_eof12; | |
+case 12: | |
+ if ( (*p) == 10 ) | |
+ goto st3; | |
+ goto st12; | |
+tr4: | |
+#line 302 "parser.rl" | |
+ { p--; {p++; cs = 17; goto _out;} } | |
+ goto st17; | |
+st17: | |
+ if ( ++p == pe ) | |
+ goto _test_eof17; | |
+case 17: | |
+#line 1152 "parser.c" | |
+ goto st0; | |
+st13: | |
+ if ( ++p == pe ) | |
+ goto _test_eof13; | |
+case 13: | |
+ switch( (*p) ) { | |
+ case 42: goto st14; | |
+ case 47: goto st16; | |
+ } | |
+ goto st0; | |
+st14: | |
+ if ( ++p == pe ) | |
+ goto _test_eof14; | |
+case 14: | |
+ if ( (*p) == 42 ) | |
+ goto st15; | |
+ goto st14; | |
+st15: | |
+ if ( ++p == pe ) | |
+ goto _test_eof15; | |
+case 15: | |
+ switch( (*p) ) { | |
+ case 42: goto st15; | |
+ case 47: goto st2; | |
+ } | |
+ goto st14; | |
+st16: | |
+ if ( ++p == pe ) | |
+ goto _test_eof16; | |
+case 16: | |
+ if ( (*p) == 10 ) | |
+ goto st2; | |
+ goto st16; | |
+ } | |
+ _test_eof2: cs = 2; goto _test_eof; | |
+ _test_eof3: cs = 3; goto _test_eof; | |
+ _test_eof4: cs = 4; goto _test_eof; | |
+ _test_eof5: cs = 5; goto _test_eof; | |
+ _test_eof6: cs = 6; goto _test_eof; | |
+ _test_eof7: cs = 7; goto _test_eof; | |
+ _test_eof8: cs = 8; goto _test_eof; | |
+ _test_eof9: cs = 9; goto _test_eof; | |
+ _test_eof10: cs = 10; goto _test_eof; | |
+ _test_eof11: cs = 11; goto _test_eof; | |
+ _test_eof12: cs = 12; goto _test_eof; | |
+ _test_eof17: cs = 17; goto _test_eof; | |
+ _test_eof13: cs = 13; goto _test_eof; | |
+ _test_eof14: cs = 14; goto _test_eof; | |
+ _test_eof15: cs = 15; goto _test_eof; | |
+ _test_eof16: cs = 16; goto _test_eof; | |
+ | |
+ _test_eof: {} | |
+ _out: {} | |
+ } | |
+#line 323 "parser.rl" | |
+ | |
+ if(cs >= JSON_array_first_final) { | |
+ return p + 1; | |
+ } else { | |
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p); | |
+ } | |
+} | |
+ | |
+static VALUE json_string_unescape(char *p, char *pe) | |
+{ | |
+ VALUE result = rb_str_buf_new(pe - p + 1); | |
+ | |
+ while (p < pe) { | |
+ if (*p == '\\') { | |
+ p++; | |
+ if (p >= pe) return Qnil; /* raise an exception later, \ at end */ | |
+ switch (*p) { | |
+ case '"': | |
+ case '\\': | |
+ rb_str_buf_cat(result, p, 1); | |
+ p++; | |
+ break; | |
+ case 'b': | |
+ rb_str_buf_cat2(result, "\b"); | |
+ p++; | |
+ break; | |
+ case 'f': | |
+ rb_str_buf_cat2(result, "\f"); | |
+ p++; | |
+ break; | |
+ case 'n': | |
+ rb_str_buf_cat2(result, "\n"); | |
+ p++; | |
+ break; | |
+ case 'r': | |
+ rb_str_buf_cat2(result, "\r"); | |
+ p++; | |
+ break; | |
+ case 't': | |
+ rb_str_buf_cat2(result, "\t"); | |
+ p++; | |
+ break; | |
+ case 'u': | |
+ if (p > pe - 4) { | |
+ return Qnil; | |
+ } else { | |
+ p = JSON_convert_UTF16_to_UTF8(result, p, pe, strictConversion); | |
+ } | |
+ break; | |
+ default: | |
+ rb_str_buf_cat(result, p, 1); | |
+ p++; | |
+ break; | |
+ } | |
+ } else { | |
+ char *q = p; | |
+ while (*q != '\\' && q < pe) q++; | |
+ rb_str_buf_cat(result, p, q - p); | |
+ p = q; | |
+ } | |
+ } | |
+ return result; | |
+} | |
+ | |
+ | |
+#line 1273 "parser.c" | |
+static const int JSON_string_start = 1; | |
+static const int JSON_string_first_final = 8; | |
+static const int JSON_string_error = 0; | |
+ | |
+static const int JSON_string_en_main = 1; | |
+ | |
+#line 401 "parser.rl" | |
+ | |
+ | |
+static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result) | |
+{ | |
+ int cs = EVIL; | |
+ | |
+ *result = rb_str_new("", 0); | |
+ | |
+#line 1289 "parser.c" | |
+ { | |
+ cs = JSON_string_start; | |
+ } | |
+#line 409 "parser.rl" | |
+ json->memo = p; | |
+ | |
+#line 1296 "parser.c" | |
+ { | |
+ if ( p == pe ) | |
+ goto _test_eof; | |
+ switch ( cs ) | |
+ { | |
+case 1: | |
+ if ( (*p) == 34 ) | |
+ goto st2; | |
+ goto st0; | |
+st0: | |
+cs = 0; | |
+ goto _out; | |
+st2: | |
+ if ( ++p == pe ) | |
+ goto _test_eof2; | |
+case 2: | |
+ switch( (*p) ) { | |
+ case 34: goto tr2; | |
+ case 92: goto st3; | |
+ } | |
+ if ( 0 <= (*p) && (*p) <= 31 ) | |
+ goto st0; | |
+ goto st2; | |
+tr2: | |
+#line 393 "parser.rl" | |
+ { | |
+ *result = json_string_unescape(json->memo + 1, p); | |
+ if (NIL_P(*result)) { p--; {p++; cs = 8; goto _out;} } else {p = (( p + 1))-1;} | |
+ } | |
+#line 398 "parser.rl" | |
+ { p--; {p++; cs = 8; goto _out;} } | |
+ goto st8; | |
+st8: | |
+ if ( ++p == pe ) | |
+ goto _test_eof8; | |
+case 8: | |
+#line 1333 "parser.c" | |
+ goto st0; | |
+st3: | |
+ if ( ++p == pe ) | |
+ goto _test_eof3; | |
+case 3: | |
+ if ( (*p) == 117 ) | |
+ goto st4; | |
+ if ( 0 <= (*p) && (*p) <= 31 ) | |
+ goto st0; | |
+ goto st2; | |
+st4: | |
+ if ( ++p == pe ) | |
+ goto _test_eof4; | |
+case 4: | |
+ if ( (*p) < 65 ) { | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto st5; | |
+ } else if ( (*p) > 70 ) { | |
+ if ( 97 <= (*p) && (*p) <= 102 ) | |
+ goto st5; | |
+ } else | |
+ goto st5; | |
+ goto st0; | |
+st5: | |
+ if ( ++p == pe ) | |
+ goto _test_eof5; | |
+case 5: | |
+ if ( (*p) < 65 ) { | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto st6; | |
+ } else if ( (*p) > 70 ) { | |
+ if ( 97 <= (*p) && (*p) <= 102 ) | |
+ goto st6; | |
+ } else | |
+ goto st6; | |
+ goto st0; | |
+st6: | |
+ if ( ++p == pe ) | |
+ goto _test_eof6; | |
+case 6: | |
+ if ( (*p) < 65 ) { | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto st7; | |
+ } else if ( (*p) > 70 ) { | |
+ if ( 97 <= (*p) && (*p) <= 102 ) | |
+ goto st7; | |
+ } else | |
+ goto st7; | |
+ goto st0; | |
+st7: | |
+ if ( ++p == pe ) | |
+ goto _test_eof7; | |
+case 7: | |
+ if ( (*p) < 65 ) { | |
+ if ( 48 <= (*p) && (*p) <= 57 ) | |
+ goto st2; | |
+ } else if ( (*p) > 70 ) { | |
+ if ( 97 <= (*p) && (*p) <= 102 ) | |
+ goto st2; | |
+ } else | |
+ goto st2; | |
+ goto st0; | |
+ } | |
+ _test_eof2: cs = 2; goto _test_eof; | |
+ _test_eof8: cs = 8; goto _test_eof; | |
+ _test_eof3: cs = 3; goto _test_eof; | |
+ _test_eof4: cs = 4; goto _test_eof; | |
+ _test_eof5: cs = 5; goto _test_eof; | |
+ _test_eof6: cs = 6; goto _test_eof; | |
+ _test_eof7: cs = 7; goto _test_eof; | |
+ | |
+ _test_eof: {} | |
+ _out: {} | |
+ } | |
+#line 411 "parser.rl" | |
+ | |
+ if (cs >= JSON_string_first_final) { | |
+ return p + 1; | |
+ } else { | |
+ return NULL; | |
+ } | |
+} | |
+ | |
+ | |
+ | |
+#line 1419 "parser.c" | |
+static const int JSON_start = 1; | |
+static const int JSON_first_final = 10; | |
+static const int JSON_error = 0; | |
+ | |
+static const int JSON_en_main = 1; | |
+ | |
+#line 445 "parser.rl" | |
+ | |
+ | |
+/* | |
+ * Document-class: JSON::Ext::Parser | |
+ * | |
+ * This is the JSON parser implemented as a C extension. It can be configured | |
+ * to be used by setting | |
+ * | |
+ * JSON.parser = JSON::Ext::Parser | |
+ * | |
+ * with the method parser= in JSON. | |
+ * | |
+ */ | |
+ | |
+/* | |
+ * call-seq: new(source, opts => {}) | |
+ * | |
+ * Creates a new JSON::Ext::Parser instance for the string _source_. | |
+ * | |
+ * Creates a new JSON::Ext::Parser instance for the string _source_. | |
+ * | |
+ * It will be configured by the _opts_ hash. _opts_ can have the following | |
+ * keys: | |
+ * | |
+ * _opts_ can have the following keys: | |
+ * * *max_nesting*: The maximum depth of nesting allowed in the parsed data | |
+ * structures. Disable depth checking with :max_nesting => false|nil|0, it | |
+ * defaults to 19. | |
+ * * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in | |
+ * defiance of RFC 4627 to be parsed by the Parser. This option defaults to | |
+ * false. | |
+ * * *create_additions*: If set to false, the Parser doesn't create | |
+ * additions even if a matchin class and create_id was found. This option | |
+ * defaults to true. | |
+ */ | |
+static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self) | |
+{ | |
+ char *ptr; | |
+ long len; | |
+ VALUE source, opts; | |
+ GET_STRUCT; | |
+ rb_scan_args(argc, argv, "11", &source, &opts); | |
+ source = StringValue(source); | |
+ ptr = RSTRING_PTR(source); | |
+ len = RSTRING_LEN(source); | |
+ if (len < 2) { | |
+ rb_raise(eParserError, "A JSON text must at least contain two octets!"); | |
+ } | |
+ if (!NIL_P(opts)) { | |
+ opts = rb_convert_type(opts, T_HASH, "Hash", "to_hash"); | |
+ if (NIL_P(opts)) { | |
+ rb_raise(rb_eArgError, "opts needs to be like a hash"); | |
+ } else { | |
+ VALUE tmp = ID2SYM(i_max_nesting); | |
+ if (st_lookup(RHASH(opts)->tbl, tmp, 0)) { | |
+ VALUE max_nesting = rb_hash_aref(opts, tmp); | |
+ if (RTEST(max_nesting)) { | |
+ Check_Type(max_nesting, T_FIXNUM); | |
+ json->max_nesting = FIX2INT(max_nesting); | |
+ } else { | |
+ json->max_nesting = 0; | |
+ } | |
+ } else { | |
+ json->max_nesting = 19; | |
+ } | |
+ tmp = ID2SYM(i_allow_nan); | |
+ if (st_lookup(RHASH(opts)->tbl, tmp, 0)) { | |
+ VALUE allow_nan = rb_hash_aref(opts, tmp); | |
+ json->allow_nan = RTEST(allow_nan) ? 1 : 0; | |
+ } else { | |
+ json->allow_nan = 0; | |
+ } | |
+ tmp = ID2SYM(i_create_additions); | |
+ if (st_lookup(RHASH(opts)->tbl, tmp, 0)) { | |
+ VALUE create_additions = rb_hash_aref(opts, tmp); | |
+ if (RTEST(create_additions)) { | |
+ json->create_id = rb_funcall(mJSON, i_create_id, 0); | |
+ } else { | |
+ json->create_id = Qnil; | |
+ } | |
+ } else { | |
+ json->create_id = rb_funcall(mJSON, i_create_id, 0); | |
+ } | |
+ } | |
+ } else { | |
+ json->max_nesting = 19; | |
+ json->allow_nan = 0; | |
+ json->create_id = rb_funcall(mJSON, i_create_id, 0); | |
+ } | |
+ json->current_nesting = 0; | |
+ /* | |
+ Convert these? | |
+ if (len >= 4 && ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0) { | |
+ rb_raise(eParserError, "Only UTF8 octet streams are supported atm!"); | |
+ } else if (len >= 4 && ptr[0] == 0 && ptr[2] == 0) { | |
+ rb_raise(eParserError, "Only UTF8 octet streams are supported atm!"); | |
+ } else if (len >= 4 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 0) { | |
+ rb_raise(eParserError, "Only UTF8 octet streams are supported atm!"); | |
+ } else if (len >= 4 && ptr[1] == 0 && ptr[3] == 0) { | |
+ rb_raise(eParserError, "Only UTF8 octet streams are supported atm!"); | |
+ } | |
+ */ | |
+ json->len = len; | |
+ json->source = ptr; | |
+ json->Vsource = source; | |
+ return self; | |
+} | |
+ | |
+/* | |
+ * call-seq: parse() | |
+ * | |
+ * Parses the current JSON text _source_ and returns the complete data | |
+ * structure as a result. | |
+ */ | |
+static VALUE cParser_parse(VALUE self) | |
+{ | |
+ char *p, *pe; | |
+ int cs = EVIL; | |
+ VALUE result = Qnil; | |
+ GET_STRUCT; | |
+ | |
+ | |
+#line 1549 "parser.c" | |
+ { | |
+ cs = JSON_start; | |
+ } | |
+#line 567 "parser.rl" | |
+ p = json->source; | |
+ pe = p + json->len; | |
+ | |
+#line 1557 "parser.c" | |
+ { | |
+ if ( p == pe ) | |
+ goto _test_eof; | |
+ switch ( cs ) | |
+ { | |
+st1: | |
+ if ( ++p == pe ) | |
+ goto _test_eof1; | |
+case 1: | |
+ switch( (*p) ) { | |
+ case 13: goto st1; | |
+ case 32: goto st1; | |
+ case 47: goto st2; | |
+ case 91: goto tr3; | |
+ case 123: goto tr4; | |
+ } | |
+ if ( 9 <= (*p) && (*p) <= 10 ) | |
+ goto st1; | |
+ goto st0; | |
+st0: | |
+cs = 0; | |
+ goto _out; | |
+st2: | |
+ if ( ++p == pe ) | |
+ goto _test_eof2; | |
+case 2: | |
+ switch( (*p) ) { | |
+ case 42: goto st3; | |
+ case 47: goto st5; | |
+ } | |
+ goto st0; | |
+st3: | |
+ if ( ++p == pe ) | |
+ goto _test_eof3; | |
+case 3: | |
+ if ( (*p) == 42 ) | |
+ goto st4; | |
+ goto st3; | |
+st4: | |
+ if ( ++p == pe ) | |
+ goto _test_eof4; | |
+case 4: | |
+ switch( (*p) ) { | |
+ case 42: goto st4; | |
+ case 47: goto st1; | |
+ } | |
+ goto st3; | |
+st5: | |
+ if ( ++p == pe ) | |
+ goto _test_eof5; | |
+case 5: | |
+ if ( (*p) == 10 ) | |
+ goto st1; | |
+ goto st5; | |
+tr3: | |
+#line 434 "parser.rl" | |
+ { | |
+ char *np; | |
+ json->current_nesting = 1; | |
+ np = JSON_parse_array(json, p, pe, &result); | |
+ if (np == NULL) { p--; {p++; cs = 10; goto _out;} } else {p = (( np))-1;} | |
+ } | |
+ goto st10; | |
+tr4: | |
+#line 427 "parser.rl" | |
+ { | |
+ char *np; | |
+ json->current_nesting = 1; | |
+ np = JSON_parse_object(json, p, pe, &result); | |
+ if (np == NULL) { p--; {p++; cs = 10; goto _out;} } else {p = (( np))-1;} | |
+ } | |
+ goto st10; | |
+st10: | |
+ if ( ++p == pe ) | |
+ goto _test_eof10; | |
+case 10: | |
+#line 1634 "parser.c" | |
+ switch( (*p) ) { | |
+ case 13: goto st10; | |
+ case 32: goto st10; | |
+ case 47: goto st6; | |
+ } | |
+ if ( 9 <= (*p) && (*p) <= 10 ) | |
+ goto st10; | |
+ goto st0; | |
+st6: | |
+ if ( ++p == pe ) | |
+ goto _test_eof6; | |
+case 6: | |
+ switch( (*p) ) { | |
+ case 42: goto st7; | |
+ case 47: goto st9; | |
+ } | |
+ goto st0; | |
+st7: | |
+ if ( ++p == pe ) | |
+ goto _test_eof7; | |
+case 7: | |
+ if ( (*p) == 42 ) | |
+ goto st8; | |
+ goto st7; | |
+st8: | |
+ if ( ++p == pe ) | |
+ goto _test_eof8; | |
+case 8: | |
+ switch( (*p) ) { | |
+ case 42: goto st8; | |
+ case 47: goto st10; | |
+ } | |
+ goto st7; | |
+st9: | |
+ if ( ++p == pe ) | |
+ goto _test_eof9; | |
+case 9: | |
+ if ( (*p) == 10 ) | |
+ goto st10; | |
+ goto st9; | |
+ } | |
+ _test_eof1: cs = 1; goto _test_eof; | |
+ _test_eof2: cs = 2; goto _test_eof; | |
+ _test_eof3: cs = 3; goto _test_eof; | |
+ _test_eof4: cs = 4; goto _test_eof; | |
+ _test_eof5: cs = 5; goto _test_eof; | |
+ _test_eof10: cs = 10; goto _test_eof; | |
+ _test_eof6: cs = 6; goto _test_eof; | |
+ _test_eof7: cs = 7; goto _test_eof; | |
+ _test_eof8: cs = 8; goto _test_eof; | |
+ _test_eof9: cs = 9; goto _test_eof; | |
+ | |
+ _test_eof: {} | |
+ _out: {} | |
+ } | |
+#line 570 "parser.rl" | |
+ | |
+ if (cs >= JSON_first_final && p == pe) { | |
+ return result; | |
+ } else { | |
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p); | |
+ } | |
+} | |
+ | |
+inline static JSON_Parser *JSON_allocate() | |
+{ | |
+ JSON_Parser *json = ALLOC(JSON_Parser); | |
+ MEMZERO(json, JSON_Parser, 1); | |
+ return json; | |
+} | |
+ | |
+static void JSON_mark(JSON_Parser *json) | |
+{ | |
+ rb_gc_mark_maybe(json->Vsource); | |
+ rb_gc_mark_maybe(json->create_id); | |
+} | |
+ | |
+static void JSON_free(JSON_Parser *json) | |
+{ | |
+ free(json); | |
+} | |
+ | |
+static VALUE cJSON_parser_s_allocate(VALUE klass) | |
+{ | |
+ JSON_Parser *json = JSON_allocate(); | |
+ return Data_Wrap_Struct(klass, JSON_mark, JSON_free, json); | |
+} | |
+ | |
+/* | |
+ * call-seq: source() | |
+ * | |
+ * Returns a copy of the current _source_ string, that was used to construct | |
+ * this Parser. | |
+ */ | |
+static VALUE cParser_source(VALUE self) | |
+{ | |
+ GET_STRUCT; | |
+ return rb_str_dup(json->Vsource); | |
+} | |
+ | |
+void Init_parser() | |
+{ | |
+ rb_require("json/common"); | |
+ mJSON = rb_define_module("JSON"); | |
+ mExt = rb_define_module_under(mJSON, "Ext"); | |
+ cParser = rb_define_class_under(mExt, "Parser", rb_cObject); | |
+ eParserError = rb_path2class("JSON::ParserError"); | |
+ eNestingError = rb_path2class("JSON::NestingError"); | |
+ rb_define_alloc_func(cParser, cJSON_parser_s_allocate); | |
+ rb_define_method(cParser, "initialize", cParser_initialize, -1); | |
+ rb_define_method(cParser, "parse", cParser_parse, 0); | |
+ rb_define_method(cParser, "source", cParser_source, 0); | |
+ | |
+ CNaN = rb_const_get(mJSON, rb_intern("NaN")); | |
+ CInfinity = rb_const_get(mJSON, rb_intern("Infinity")); | |
+ CMinusInfinity = rb_const_get(mJSON, rb_intern("MinusInfinity")); | |
+ | |
+ i_json_creatable_p = rb_intern("json_creatable?"); | |
+ i_json_create = rb_intern("json_create"); | |
+ i_create_id = rb_intern("create_id"); | |
+ i_create_additions = rb_intern("create_additions"); | |
+ i_chr = rb_intern("chr"); | |
+ i_max_nesting = rb_intern("max_nesting"); | |
+ i_allow_nan = rb_intern("allow_nan"); | |
+} | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/parser.rl b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/parser.rl | |
new file mode 100644 | |
index 0000000..f6f830a | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/parser.rl | |
@@ -0,0 +1,638 @@ | |
+#include "ruby.h" | |
+#include "re.h" | |
+#include "st.h" | |
+#include "unicode.h" | |
+ | |
+#define EVIL 0x666 | |
+ | |
+static VALUE mJSON, mExt, cParser, eParserError, eNestingError; | |
+static VALUE CNaN, CInfinity, CMinusInfinity; | |
+ | |
+static ID i_json_creatable_p, i_json_create, i_create_id, i_create_additions, | |
+ i_chr, i_max_nesting, i_allow_nan; | |
+ | |
+#define MinusInfinity "-Infinity" | |
+ | |
+typedef struct JSON_ParserStruct { | |
+ VALUE Vsource; | |
+ char *source; | |
+ long len; | |
+ char *memo; | |
+ VALUE create_id; | |
+ int max_nesting; | |
+ int current_nesting; | |
+ int allow_nan; | |
+} JSON_Parser; | |
+ | |
+static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result); | |
+static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result); | |
+static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result); | |
+static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result); | |
+static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result); | |
+static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result); | |
+ | |
+#define GET_STRUCT \ | |
+ JSON_Parser *json; \ | |
+ Data_Get_Struct(self, JSON_Parser, json); | |
+ | |
+%%{ | |
+ machine JSON_common; | |
+ | |
+ cr = '\n'; | |
+ cr_neg = [^\n]; | |
+ ws = [ \t\r\n]; | |
+ c_comment = '/*' ( any* - (any* '*/' any* ) ) '*/'; | |
+ cpp_comment = '//' cr_neg* cr; | |
+ comment = c_comment | cpp_comment; | |
+ ignore = ws | comment; | |
+ name_separator = ':'; | |
+ value_separator = ','; | |
+ Vnull = 'null'; | |
+ Vfalse = 'false'; | |
+ Vtrue = 'true'; | |
+ VNaN = 'NaN'; | |
+ VInfinity = 'Infinity'; | |
+ VMinusInfinity = '-Infinity'; | |
+ begin_value = [nft"\-[{NI] | digit; | |
+ begin_object = '{'; | |
+ end_object = '}'; | |
+ begin_array = '['; | |
+ end_array = ']'; | |
+ begin_string = '"'; | |
+ begin_name = begin_string; | |
+ begin_number = digit | '-'; | |
+}%% | |
+ | |
+%%{ | |
+ machine JSON_object; | |
+ include JSON_common; | |
+ | |
+ write data; | |
+ | |
+ action parse_value { | |
+ VALUE v = Qnil; | |
+ char *np = JSON_parse_value(json, fpc, pe, &v); | |
+ if (np == NULL) { | |
+ fhold; fbreak; | |
+ } else { | |
+ rb_hash_aset(*result, last_name, v); | |
+ fexec np; | |
+ } | |
+ } | |
+ | |
+ action parse_name { | |
+ char *np = JSON_parse_string(json, fpc, pe, &last_name); | |
+ if (np == NULL) { fhold; fbreak; } else fexec np; | |
+ } | |
+ | |
+ action exit { fhold; fbreak; } | |
+ | |
+ a_pair = ignore* begin_name >parse_name | |
+ ignore* name_separator ignore* | |
+ begin_value >parse_value; | |
+ | |
+ main := begin_object | |
+ (a_pair (ignore* value_separator a_pair)*)? | |
+ ignore* end_object @exit; | |
+}%% | |
+ | |
+static char *JSON_parse_object(JSON_Parser *json, char *p, char *pe, VALUE *result) | |
+{ | |
+ int cs = EVIL; | |
+ VALUE last_name = Qnil; | |
+ | |
+ if (json->max_nesting && json->current_nesting > json->max_nesting) { | |
+ rb_raise(eNestingError, "nesting of %d is to deep", json->current_nesting); | |
+ } | |
+ | |
+ *result = rb_hash_new(); | |
+ | |
+ %% write init; | |
+ %% write exec; | |
+ | |
+ if (cs >= JSON_object_first_final) { | |
+ if (RTEST(json->create_id)) { | |
+ VALUE klassname = rb_hash_aref(*result, json->create_id); | |
+ if (!NIL_P(klassname)) { | |
+ VALUE klass = rb_path2class(StringValueCStr(klassname)); | |
+ if RTEST(rb_funcall(klass, i_json_creatable_p, 0)) { | |
+ *result = rb_funcall(klass, i_json_create, 1, *result); | |
+ } | |
+ } | |
+ } | |
+ return p + 1; | |
+ } else { | |
+ return NULL; | |
+ } | |
+} | |
+ | |
+%%{ | |
+ machine JSON_value; | |
+ include JSON_common; | |
+ | |
+ write data; | |
+ | |
+ action parse_null { | |
+ *result = Qnil; | |
+ } | |
+ action parse_false { | |
+ *result = Qfalse; | |
+ } | |
+ action parse_true { | |
+ *result = Qtrue; | |
+ } | |
+ action parse_nan { | |
+ if (json->allow_nan) { | |
+ *result = CNaN; | |
+ } else { | |
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p - 2); | |
+ } | |
+ } | |
+ action parse_infinity { | |
+ if (json->allow_nan) { | |
+ *result = CInfinity; | |
+ } else { | |
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p - 8); | |
+ } | |
+ } | |
+ action parse_string { | |
+ char *np = JSON_parse_string(json, fpc, pe, result); | |
+ if (np == NULL) { fhold; fbreak; } else fexec np; | |
+ } | |
+ | |
+ action parse_number { | |
+ char *np; | |
+ if(pe > fpc + 9 && !strncmp(MinusInfinity, fpc, 9)) { | |
+ if (json->allow_nan) { | |
+ *result = CMinusInfinity; | |
+ fexec p + 10; | |
+ fhold; fbreak; | |
+ } else { | |
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p); | |
+ } | |
+ } | |
+ np = JSON_parse_float(json, fpc, pe, result); | |
+ if (np != NULL) fexec np; | |
+ np = JSON_parse_integer(json, fpc, pe, result); | |
+ if (np != NULL) fexec np; | |
+ fhold; fbreak; | |
+ } | |
+ | |
+ action parse_array { | |
+ char *np; | |
+ json->current_nesting++; | |
+ np = JSON_parse_array(json, fpc, pe, result); | |
+ json->current_nesting--; | |
+ if (np == NULL) { fhold; fbreak; } else fexec np; | |
+ } | |
+ | |
+ action parse_object { | |
+ char *np; | |
+ json->current_nesting++; | |
+ np = JSON_parse_object(json, fpc, pe, result); | |
+ json->current_nesting--; | |
+ if (np == NULL) { fhold; fbreak; } else fexec np; | |
+ } | |
+ | |
+ action exit { fhold; fbreak; } | |
+ | |
+main := ( | |
+ Vnull @parse_null | | |
+ Vfalse @parse_false | | |
+ Vtrue @parse_true | | |
+ VNaN @parse_nan | | |
+ VInfinity @parse_infinity | | |
+ begin_number >parse_number | | |
+ begin_string >parse_string | | |
+ begin_array >parse_array | | |
+ begin_object >parse_object | |
+ ) %*exit; | |
+}%% | |
+ | |
+static char *JSON_parse_value(JSON_Parser *json, char *p, char *pe, VALUE *result) | |
+{ | |
+ int cs = EVIL; | |
+ | |
+ %% write init; | |
+ %% write exec; | |
+ | |
+ if (cs >= JSON_value_first_final) { | |
+ return p; | |
+ } else { | |
+ return NULL; | |
+ } | |
+} | |
+ | |
+%%{ | |
+ machine JSON_integer; | |
+ | |
+ write data; | |
+ | |
+ action exit { fhold; fbreak; } | |
+ | |
+ main := '-'? ('0' | [1-9][0-9]*) (^[0-9] @exit); | |
+}%% | |
+ | |
+static char *JSON_parse_integer(JSON_Parser *json, char *p, char *pe, VALUE *result) | |
+{ | |
+ int cs = EVIL; | |
+ | |
+ %% write init; | |
+ json->memo = p; | |
+ %% write exec; | |
+ | |
+ if (cs >= JSON_integer_first_final) { | |
+ long len = p - json->memo; | |
+ *result = rb_Integer(rb_str_new(json->memo, len)); | |
+ return p + 1; | |
+ } else { | |
+ return NULL; | |
+ } | |
+} | |
+ | |
+%%{ | |
+ machine JSON_float; | |
+ include JSON_common; | |
+ | |
+ write data; | |
+ | |
+ action exit { fhold; fbreak; } | |
+ | |
+ main := '-'? ( | |
+ (('0' | [1-9][0-9]*) '.' [0-9]+ ([Ee] [+\-]?[0-9]+)?) | |
+ | (('0' | [1-9][0-9]*) ([Ee] [+\-]?[0-9]+)) | |
+ ) (^[0-9Ee.\-] @exit ); | |
+}%% | |
+ | |
+static char *JSON_parse_float(JSON_Parser *json, char *p, char *pe, VALUE *result) | |
+{ | |
+ int cs = EVIL; | |
+ | |
+ %% write init; | |
+ json->memo = p; | |
+ %% write exec; | |
+ | |
+ if (cs >= JSON_float_first_final) { | |
+ long len = p - json->memo; | |
+ *result = rb_Float(rb_str_new(json->memo, len)); | |
+ return p + 1; | |
+ } else { | |
+ return NULL; | |
+ } | |
+} | |
+ | |
+ | |
+%%{ | |
+ machine JSON_array; | |
+ include JSON_common; | |
+ | |
+ write data; | |
+ | |
+ action parse_value { | |
+ VALUE v = Qnil; | |
+ char *np = JSON_parse_value(json, fpc, pe, &v); | |
+ if (np == NULL) { | |
+ fhold; fbreak; | |
+ } else { | |
+ rb_ary_push(*result, v); | |
+ fexec np; | |
+ } | |
+ } | |
+ | |
+ action exit { fhold; fbreak; } | |
+ | |
+ next_element = value_separator ignore* begin_value >parse_value; | |
+ | |
+ main := begin_array ignore* | |
+ ((begin_value >parse_value ignore*) | |
+ (ignore* next_element ignore*)*)? | |
+ end_array @exit; | |
+}%% | |
+ | |
+static char *JSON_parse_array(JSON_Parser *json, char *p, char *pe, VALUE *result) | |
+{ | |
+ int cs = EVIL; | |
+ | |
+ if (json->max_nesting && json->current_nesting > json->max_nesting) { | |
+ rb_raise(eNestingError, "nesting of %d is to deep", json->current_nesting); | |
+ } | |
+ *result = rb_ary_new(); | |
+ | |
+ %% write init; | |
+ %% write exec; | |
+ | |
+ if(cs >= JSON_array_first_final) { | |
+ return p + 1; | |
+ } else { | |
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p); | |
+ } | |
+} | |
+ | |
+static VALUE json_string_unescape(char *p, char *pe) | |
+{ | |
+ VALUE result = rb_str_buf_new(pe - p + 1); | |
+ | |
+ while (p < pe) { | |
+ if (*p == '\\') { | |
+ p++; | |
+ if (p >= pe) return Qnil; /* raise an exception later, \ at end */ | |
+ switch (*p) { | |
+ case '"': | |
+ case '\\': | |
+ rb_str_buf_cat(result, p, 1); | |
+ p++; | |
+ break; | |
+ case 'b': | |
+ rb_str_buf_cat2(result, "\b"); | |
+ p++; | |
+ break; | |
+ case 'f': | |
+ rb_str_buf_cat2(result, "\f"); | |
+ p++; | |
+ break; | |
+ case 'n': | |
+ rb_str_buf_cat2(result, "\n"); | |
+ p++; | |
+ break; | |
+ case 'r': | |
+ rb_str_buf_cat2(result, "\r"); | |
+ p++; | |
+ break; | |
+ case 't': | |
+ rb_str_buf_cat2(result, "\t"); | |
+ p++; | |
+ break; | |
+ case 'u': | |
+ if (p > pe - 4) { | |
+ return Qnil; | |
+ } else { | |
+ p = JSON_convert_UTF16_to_UTF8(result, p, pe, strictConversion); | |
+ } | |
+ break; | |
+ default: | |
+ rb_str_buf_cat(result, p, 1); | |
+ p++; | |
+ break; | |
+ } | |
+ } else { | |
+ char *q = p; | |
+ while (*q != '\\' && q < pe) q++; | |
+ rb_str_buf_cat(result, p, q - p); | |
+ p = q; | |
+ } | |
+ } | |
+ return result; | |
+} | |
+ | |
+%%{ | |
+ machine JSON_string; | |
+ include JSON_common; | |
+ | |
+ write data; | |
+ | |
+ action parse_string { | |
+ *result = json_string_unescape(json->memo + 1, p); | |
+ if (NIL_P(*result)) { fhold; fbreak; } else fexec p + 1; | |
+ } | |
+ | |
+ action exit { fhold; fbreak; } | |
+ | |
+ main := '"' ((^(["\\] | 0..0x1f) | '\\'["\\/bfnrt] | '\\u'[0-9a-fA-F]{4} | '\\'^(["\\/bfnrtu]|0..0x1f))* %parse_string) '"' @exit; | |
+}%% | |
+ | |
+static char *JSON_parse_string(JSON_Parser *json, char *p, char *pe, VALUE *result) | |
+{ | |
+ int cs = EVIL; | |
+ | |
+ *result = rb_str_new("", 0); | |
+ %% write init; | |
+ json->memo = p; | |
+ %% write exec; | |
+ | |
+ if (cs >= JSON_string_first_final) { | |
+ return p + 1; | |
+ } else { | |
+ return NULL; | |
+ } | |
+} | |
+ | |
+ | |
+%%{ | |
+ machine JSON; | |
+ | |
+ write data; | |
+ | |
+ include JSON_common; | |
+ | |
+ action parse_object { | |
+ char *np; | |
+ json->current_nesting = 1; | |
+ np = JSON_parse_object(json, fpc, pe, &result); | |
+ if (np == NULL) { fhold; fbreak; } else fexec np; | |
+ } | |
+ | |
+ action parse_array { | |
+ char *np; | |
+ json->current_nesting = 1; | |
+ np = JSON_parse_array(json, fpc, pe, &result); | |
+ if (np == NULL) { fhold; fbreak; } else fexec np; | |
+ } | |
+ | |
+ main := ignore* ( | |
+ begin_object >parse_object | | |
+ begin_array >parse_array | |
+ ) ignore*; | |
+}%% | |
+ | |
+/* | |
+ * Document-class: JSON::Ext::Parser | |
+ * | |
+ * This is the JSON parser implemented as a C extension. It can be configured | |
+ * to be used by setting | |
+ * | |
+ * JSON.parser = JSON::Ext::Parser | |
+ * | |
+ * with the method parser= in JSON. | |
+ * | |
+ */ | |
+ | |
+/* | |
+ * call-seq: new(source, opts => {}) | |
+ * | |
+ * Creates a new JSON::Ext::Parser instance for the string _source_. | |
+ * | |
+ * Creates a new JSON::Ext::Parser instance for the string _source_. | |
+ * | |
+ * It will be configured by the _opts_ hash. _opts_ can have the following | |
+ * keys: | |
+ * | |
+ * _opts_ can have the following keys: | |
+ * * *max_nesting*: The maximum depth of nesting allowed in the parsed data | |
+ * structures. Disable depth checking with :max_nesting => false|nil|0, it | |
+ * defaults to 19. | |
+ * * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in | |
+ * defiance of RFC 4627 to be parsed by the Parser. This option defaults to | |
+ * false. | |
+ * * *create_additions*: If set to false, the Parser doesn't create | |
+ * additions even if a matchin class and create_id was found. This option | |
+ * defaults to true. | |
+ */ | |
+static VALUE cParser_initialize(int argc, VALUE *argv, VALUE self) | |
+{ | |
+ char *ptr; | |
+ long len; | |
+ VALUE source, opts; | |
+ GET_STRUCT; | |
+ rb_scan_args(argc, argv, "11", &source, &opts); | |
+ source = StringValue(source); | |
+ ptr = RSTRING_PTR(source); | |
+ len = RSTRING_LEN(source); | |
+ if (len < 2) { | |
+ rb_raise(eParserError, "A JSON text must at least contain two octets!"); | |
+ } | |
+ if (!NIL_P(opts)) { | |
+ opts = rb_convert_type(opts, T_HASH, "Hash", "to_hash"); | |
+ if (NIL_P(opts)) { | |
+ rb_raise(rb_eArgError, "opts needs to be like a hash"); | |
+ } else { | |
+ VALUE tmp = ID2SYM(i_max_nesting); | |
+ if (st_lookup(RHASH(opts)->tbl, tmp, 0)) { | |
+ VALUE max_nesting = rb_hash_aref(opts, tmp); | |
+ if (RTEST(max_nesting)) { | |
+ Check_Type(max_nesting, T_FIXNUM); | |
+ json->max_nesting = FIX2INT(max_nesting); | |
+ } else { | |
+ json->max_nesting = 0; | |
+ } | |
+ } else { | |
+ json->max_nesting = 19; | |
+ } | |
+ tmp = ID2SYM(i_allow_nan); | |
+ if (st_lookup(RHASH(opts)->tbl, tmp, 0)) { | |
+ VALUE allow_nan = rb_hash_aref(opts, tmp); | |
+ json->allow_nan = RTEST(allow_nan) ? 1 : 0; | |
+ } else { | |
+ json->allow_nan = 0; | |
+ } | |
+ tmp = ID2SYM(i_create_additions); | |
+ if (st_lookup(RHASH(opts)->tbl, tmp, 0)) { | |
+ VALUE create_additions = rb_hash_aref(opts, tmp); | |
+ if (RTEST(create_additions)) { | |
+ json->create_id = rb_funcall(mJSON, i_create_id, 0); | |
+ } else { | |
+ json->create_id = Qnil; | |
+ } | |
+ } else { | |
+ json->create_id = rb_funcall(mJSON, i_create_id, 0); | |
+ } | |
+ } | |
+ } else { | |
+ json->max_nesting = 19; | |
+ json->allow_nan = 0; | |
+ json->create_id = rb_funcall(mJSON, i_create_id, 0); | |
+ } | |
+ json->current_nesting = 0; | |
+ /* | |
+ Convert these? | |
+ if (len >= 4 && ptr[0] == 0 && ptr[1] == 0 && ptr[2] == 0) { | |
+ rb_raise(eParserError, "Only UTF8 octet streams are supported atm!"); | |
+ } else if (len >= 4 && ptr[0] == 0 && ptr[2] == 0) { | |
+ rb_raise(eParserError, "Only UTF8 octet streams are supported atm!"); | |
+ } else if (len >= 4 && ptr[1] == 0 && ptr[2] == 0 && ptr[3] == 0) { | |
+ rb_raise(eParserError, "Only UTF8 octet streams are supported atm!"); | |
+ } else if (len >= 4 && ptr[1] == 0 && ptr[3] == 0) { | |
+ rb_raise(eParserError, "Only UTF8 octet streams are supported atm!"); | |
+ } | |
+ */ | |
+ json->len = len; | |
+ json->source = ptr; | |
+ json->Vsource = source; | |
+ return self; | |
+} | |
+ | |
+/* | |
+ * call-seq: parse() | |
+ * | |
+ * Parses the current JSON text _source_ and returns the complete data | |
+ * structure as a result. | |
+ */ | |
+static VALUE cParser_parse(VALUE self) | |
+{ | |
+ char *p, *pe; | |
+ int cs = EVIL; | |
+ VALUE result = Qnil; | |
+ GET_STRUCT; | |
+ | |
+ %% write init; | |
+ p = json->source; | |
+ pe = p + json->len; | |
+ %% write exec; | |
+ | |
+ if (cs >= JSON_first_final && p == pe) { | |
+ return result; | |
+ } else { | |
+ rb_raise(eParserError, "%u: unexpected token at '%s'", __LINE__, p); | |
+ } | |
+} | |
+ | |
+inline static JSON_Parser *JSON_allocate() | |
+{ | |
+ JSON_Parser *json = ALLOC(JSON_Parser); | |
+ MEMZERO(json, JSON_Parser, 1); | |
+ return json; | |
+} | |
+ | |
+static void JSON_mark(JSON_Parser *json) | |
+{ | |
+ rb_gc_mark_maybe(json->Vsource); | |
+ rb_gc_mark_maybe(json->create_id); | |
+} | |
+ | |
+static void JSON_free(JSON_Parser *json) | |
+{ | |
+ free(json); | |
+} | |
+ | |
+static VALUE cJSON_parser_s_allocate(VALUE klass) | |
+{ | |
+ JSON_Parser *json = JSON_allocate(); | |
+ return Data_Wrap_Struct(klass, JSON_mark, JSON_free, json); | |
+} | |
+ | |
+/* | |
+ * call-seq: source() | |
+ * | |
+ * Returns a copy of the current _source_ string, that was used to construct | |
+ * this Parser. | |
+ */ | |
+static VALUE cParser_source(VALUE self) | |
+{ | |
+ GET_STRUCT; | |
+ return rb_str_dup(json->Vsource); | |
+} | |
+ | |
+void Init_parser() | |
+{ | |
+ rb_require("json/common"); | |
+ mJSON = rb_define_module("JSON"); | |
+ mExt = rb_define_module_under(mJSON, "Ext"); | |
+ cParser = rb_define_class_under(mExt, "Parser", rb_cObject); | |
+ eParserError = rb_path2class("JSON::ParserError"); | |
+ eNestingError = rb_path2class("JSON::NestingError"); | |
+ rb_define_alloc_func(cParser, cJSON_parser_s_allocate); | |
+ rb_define_method(cParser, "initialize", cParser_initialize, -1); | |
+ rb_define_method(cParser, "parse", cParser_parse, 0); | |
+ rb_define_method(cParser, "source", cParser_source, 0); | |
+ | |
+ CNaN = rb_const_get(mJSON, rb_intern("NaN")); | |
+ CInfinity = rb_const_get(mJSON, rb_intern("Infinity")); | |
+ CMinusInfinity = rb_const_get(mJSON, rb_intern("MinusInfinity")); | |
+ | |
+ i_json_creatable_p = rb_intern("json_creatable?"); | |
+ i_json_create = rb_intern("json_create"); | |
+ i_create_id = rb_intern("create_id"); | |
+ i_create_additions = rb_intern("create_additions"); | |
+ i_chr = rb_intern("chr"); | |
+ i_max_nesting = rb_intern("max_nesting"); | |
+ i_allow_nan = rb_intern("allow_nan"); | |
+} | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/unicode.c b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/unicode.c | |
new file mode 100644 | |
index 0000000..a16ff26 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/unicode.c | |
@@ -0,0 +1,154 @@ | |
+#include "unicode.h" | |
+ | |
+/* | |
+ * Copyright 2001-2004 Unicode, Inc. | |
+ * | |
+ * Disclaimer | |
+ * | |
+ * This source code is provided as is by Unicode, Inc. No claims are | |
+ * made as to fitness for any particular purpose. No warranties of any | |
+ * kind are expressed or implied. The recipient agrees to determine | |
+ * applicability of information provided. If this file has been | |
+ * purchased on magnetic or optical media from Unicode, Inc., the | |
+ * sole remedy for any claim will be exchange of defective media | |
+ * within 90 days of receipt. | |
+ * | |
+ * Limitations on Rights to Redistribute This Code | |
+ * | |
+ * Unicode, Inc. hereby grants the right to freely use the information | |
+ * supplied in this file in the creation of products supporting the | |
+ * Unicode Standard, and to make copies of this file in any form | |
+ * for internal or external distribution as long as this notice | |
+ * remains attached. | |
+ */ | |
+ | |
+/* | |
+ * Index into the table below with the first byte of a UTF-8 sequence to | |
+ * get the number of trailing bytes that are supposed to follow it. | |
+ * Note that *legal* UTF-8 values can't have 4 or 5-bytes. The table is | |
+ * left as-is for anyone who may want to do such conversion, which was | |
+ * allowed in earlier algorithms. | |
+ */ | |
+static const char trailingBytesForUTF8[256] = { | |
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | |
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | |
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | |
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | |
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | |
+ 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, | |
+ 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, | |
+ 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 3,3,3,3,3,3,3,3,4,4,4,4,5,5,5,5 | |
+}; | |
+ | |
+/* | |
+ * Magic values subtracted from a buffer value during UTF8 conversion. | |
+ * This table contains as many values as there might be trailing bytes | |
+ * in a UTF-8 sequence. | |
+ */ | |
+static const UTF32 offsetsFromUTF8[6] = { 0x00000000UL, 0x00003080UL, 0x000E2080UL, | |
+ 0x03C82080UL, 0xFA082080UL, 0x82082080UL }; | |
+ | |
+/* | |
+ * Once the bits are split out into bytes of UTF-8, this is a mask OR-ed | |
+ * into the first byte, depending on how many bytes follow. There are | |
+ * as many entries in this table as there are UTF-8 sequence types. | |
+ * (I.e., one byte sequence, two byte... etc.). Remember that sequencs | |
+ * for *legal* UTF-8 will be 4 or fewer bytes total. | |
+ */ | |
+static const UTF8 firstByteMark[7] = { 0x00, 0x00, 0xC0, 0xE0, 0xF0, 0xF8, 0xFC }; | |
+ | |
+char *JSON_convert_UTF16_to_UTF8 ( | |
+ VALUE buffer, | |
+ char *source, | |
+ char *sourceEnd, | |
+ ConversionFlags flags) | |
+{ | |
+ UTF16 *tmp, *tmpPtr, *tmpEnd; | |
+ char buf[5]; | |
+ long n = 0, i; | |
+ char *p = source - 1; | |
+ | |
+ while (p < sourceEnd && p[0] == '\\' && p[1] == 'u') { | |
+ p += 6; | |
+ n++; | |
+ } | |
+ p = source + 1; | |
+ buf[4] = 0; | |
+ tmpPtr = tmp = ALLOC_N(UTF16, n); | |
+ tmpEnd = tmp + n; | |
+ for (i = 0; i < n; i++) { | |
+ buf[0] = *p++; | |
+ buf[1] = *p++; | |
+ buf[2] = *p++; | |
+ buf[3] = *p++; | |
+ tmpPtr[i] = strtol(buf, NULL, 16); | |
+ p += 2; | |
+ } | |
+ | |
+ while (tmpPtr < tmpEnd) { | |
+ UTF32 ch; | |
+ unsigned short bytesToWrite = 0; | |
+ const UTF32 byteMask = 0xBF; | |
+ const UTF32 byteMark = 0x80; | |
+ ch = *tmpPtr++; | |
+ /* If we have a surrogate pair, convert to UTF32 first. */ | |
+ if (ch >= UNI_SUR_HIGH_START && ch <= UNI_SUR_HIGH_END) { | |
+ /* If the 16 bits following the high surrogate are in the source | |
+ * buffer... */ | |
+ if (tmpPtr < tmpEnd) { | |
+ UTF32 ch2 = *tmpPtr; | |
+ /* If it's a low surrogate, convert to UTF32. */ | |
+ if (ch2 >= UNI_SUR_LOW_START && ch2 <= UNI_SUR_LOW_END) { | |
+ ch = ((ch - UNI_SUR_HIGH_START) << halfShift) | |
+ + (ch2 - UNI_SUR_LOW_START) + halfBase; | |
+ ++tmpPtr; | |
+ } else if (flags == strictConversion) { /* it's an unpaired high surrogate */ | |
+ free(tmp); | |
+ rb_raise(rb_path2class("JSON::ParserError"), | |
+ "source sequence is illegal/malformed near %s", source); | |
+ } | |
+ } else { /* We don't have the 16 bits following the high surrogate. */ | |
+ free(tmp); | |
+ rb_raise(rb_path2class("JSON::ParserError"), | |
+ "partial character in source, but hit end near %s", source); | |
+ break; | |
+ } | |
+ } else if (flags == strictConversion) { | |
+ /* UTF-16 surrogate values are illegal in UTF-32 */ | |
+ if (ch >= UNI_SUR_LOW_START && ch <= UNI_SUR_LOW_END) { | |
+ free(tmp); | |
+ rb_raise(rb_path2class("JSON::ParserError"), | |
+ "source sequence is illegal/malformed near %s", source); | |
+ } | |
+ } | |
+ /* Figure out how many bytes the result will require */ | |
+ if (ch < (UTF32) 0x80) { | |
+ bytesToWrite = 1; | |
+ } else if (ch < (UTF32) 0x800) { | |
+ bytesToWrite = 2; | |
+ } else if (ch < (UTF32) 0x10000) { | |
+ bytesToWrite = 3; | |
+ } else if (ch < (UTF32) 0x110000) { | |
+ bytesToWrite = 4; | |
+ } else { | |
+ bytesToWrite = 3; | |
+ ch = UNI_REPLACEMENT_CHAR; | |
+ } | |
+ | |
+ buf[0] = 0; | |
+ buf[1] = 0; | |
+ buf[2] = 0; | |
+ buf[3] = 0; | |
+ p = buf + bytesToWrite; | |
+ switch (bytesToWrite) { /* note: everything falls through. */ | |
+ case 4: *--p = (UTF8) ((ch | byteMark) & byteMask); ch >>= 6; | |
+ case 3: *--p = (UTF8) ((ch | byteMark) & byteMask); ch >>= 6; | |
+ case 2: *--p = (UTF8) ((ch | byteMark) & byteMask); ch >>= 6; | |
+ case 1: *--p = (UTF8) (ch | firstByteMark[bytesToWrite]); | |
+ } | |
+ rb_str_buf_cat(buffer, p, bytesToWrite); | |
+ } | |
+ free(tmp); | |
+ source += 5 + (n - 1) * 6; | |
+ return source; | |
+} | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/unicode.h b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/unicode.h | |
new file mode 100644 | |
index 0000000..155da0c | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/ext/json/ext/parser/unicode.h | |
@@ -0,0 +1,58 @@ | |
+ | |
+#ifndef _PARSER_UNICODE_H_ | |
+#define _PARSER_UNICODE_H_ | |
+ | |
+#include "ruby.h" | |
+ | |
+typedef unsigned long UTF32; /* at least 32 bits */ | |
+typedef unsigned short UTF16; /* at least 16 bits */ | |
+typedef unsigned char UTF8; /* typically 8 bits */ | |
+ | |
+#define UNI_REPLACEMENT_CHAR (UTF32)0x0000FFFD | |
+#define UNI_MAX_BMP (UTF32)0x0000FFFF | |
+#define UNI_MAX_UTF16 (UTF32)0x0010FFFF | |
+#define UNI_MAX_UTF32 (UTF32)0x7FFFFFFF | |
+#define UNI_MAX_LEGAL_UTF32 (UTF32)0x0010FFFF | |
+ | |
+#define UNI_SUR_HIGH_START (UTF32)0xD800 | |
+#define UNI_SUR_HIGH_END (UTF32)0xDBFF | |
+#define UNI_SUR_LOW_START (UTF32)0xDC00 | |
+#define UNI_SUR_LOW_END (UTF32)0xDFFF | |
+ | |
+static const int halfShift = 10; /* used for shifting by 10 bits */ | |
+ | |
+static const UTF32 halfBase = 0x0010000UL; | |
+static const UTF32 halfMask = 0x3FFUL; | |
+ | |
+typedef enum { | |
+ conversionOK = 0, /* conversion successful */ | |
+ sourceExhausted, /* partial character in source, but hit end */ | |
+ targetExhausted, /* insuff. room in target for conversion */ | |
+ sourceIllegal /* source sequence is illegal/malformed */ | |
+} ConversionResult; | |
+ | |
+typedef enum { | |
+ strictConversion = 0, | |
+ lenientConversion | |
+} ConversionFlags; | |
+ | |
+char *JSON_convert_UTF16_to_UTF8 ( | |
+ VALUE buffer, | |
+ char *source, | |
+ char *sourceEnd, | |
+ ConversionFlags flags); | |
+ | |
+#ifndef RARRAY_PTR | |
+#define RARRAY_PTR(ARRAY) RARRAY(ARRAY)->ptr | |
+#endif | |
+#ifndef RARRAY_LEN | |
+#define RARRAY_LEN(ARRAY) RARRAY(ARRAY)->len | |
+#endif | |
+#ifndef RSTRING_PTR | |
+#define RSTRING_PTR(string) RSTRING(string)->ptr | |
+#endif | |
+#ifndef RSTRING_LEN | |
+#define RSTRING_LEN(string) RSTRING(string)->len | |
+#endif | |
+ | |
+#endif | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/install.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/install.rb | |
new file mode 100755 | |
index 0000000..adf77a0 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/install.rb | |
@@ -0,0 +1,26 @@ | |
+#!/usr/bin/env ruby | |
+ | |
+require 'rbconfig' | |
+require 'fileutils' | |
+include FileUtils::Verbose | |
+ | |
+include Config | |
+ | |
+bindir = CONFIG["bindir"] | |
+cd 'bin' do | |
+ filename = 'edit_json.rb' | |
+ #install(filename, bindir) | |
+end | |
+sitelibdir = CONFIG["sitelibdir"] | |
+cd 'lib' do | |
+ install('json.rb', sitelibdir) | |
+ mkdir_p File.join(sitelibdir, 'json') | |
+ for file in Dir['json/**/*.{rb,xpm}'] | |
+ d = File.join(sitelibdir, file) | |
+ mkdir_p File.dirname(d) | |
+ install(file, d) | |
+ end | |
+ install(File.join('json', 'editor.rb'), File.join(sitelibdir,'json')) | |
+ install(File.join('json', 'json.xpm'), File.join(sitelibdir,'json')) | |
+end | |
+warn " *** Installed PURE ruby library." | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json.rb | |
new file mode 100644 | |
index 0000000..3b0b711 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json.rb | |
@@ -0,0 +1,235 @@ | |
+require 'json/common' | |
+# = json - JSON for Ruby | |
+# | |
+# == Description | |
+# | |
+# This is a implementation of the JSON specification according to RFC 4627 | |
+# (http://www.ietf.org/rfc/rfc4627.txt). Starting from version 1.0.0 on there | |
+# will be two variants available: | |
+# | |
+# * A pure ruby variant, that relies on the iconv and the stringscan | |
+# extensions, which are both part of the ruby standard library. | |
+# * The quite a bit faster C extension variant, which is in parts implemented | |
+# in C and comes with its own unicode conversion functions and a parser | |
+# generated by the ragel state machine compiler | |
+# (http://www.cs.queensu.ca/~thurston/ragel). | |
+# | |
+# Both variants of the JSON generator escape all non-ASCII an control | |
+# characters with \uXXXX escape sequences, and support UTF-16 surrogate pairs | |
+# in order to be able to generate the whole range of unicode code points. This | |
+# means that generated JSON text is encoded as UTF-8 (because ASCII is a subset | |
+# of UTF-8) and at the same time avoids decoding problems for receiving | |
+# endpoints, that don't expect UTF-8 encoded texts. On the negative side this | |
+# may lead to a bit longer strings than necessarry. | |
+# | |
+# All strings, that are to be encoded as JSON strings, should be UTF-8 byte | |
+# sequences on the Ruby side. To encode raw binary strings, that aren't UTF-8 | |
+# encoded, please use the to_json_raw_object method of String (which produces | |
+# an object, that contains a byte array) and decode the result on the receiving | |
+# endpoint. | |
+# | |
+# == Author | |
+# | |
+# Florian Frank <mailto:[email protected]> | |
+# | |
+# == License | |
+# | |
+# This software is distributed under the same license as Ruby itself, see | |
+# http://www.ruby-lang.org/en/LICENSE.txt. | |
+# | |
+# == Download | |
+# | |
+# The latest version of this library can be downloaded at | |
+# | |
+# * http://rubyforge.org/frs?group_id=953 | |
+# | |
+# Online Documentation should be located at | |
+# | |
+# * http://json.rubyforge.org | |
+# | |
+# == Usage | |
+# | |
+# To use JSON you can | |
+# require 'json' | |
+# to load the installed variant (either the extension 'json' or the pure | |
+# variant 'json_pure'). If you have installed the extension variant, you can | |
+# pick either the extension variant or the pure variant by typing | |
+# require 'json/ext' | |
+# or | |
+# require 'json/pure' | |
+# | |
+# You can choose to load a set of common additions to ruby core's objects if | |
+# you | |
+# require 'json/add/core' | |
+# | |
+# After requiring this you can, e. g., serialise/deserialise Ruby ranges: | |
+# | |
+# JSON JSON(1..10) # => 1..10 | |
+# | |
+# To find out how to add JSON support to other or your own classes, read the | |
+# Examples section below. | |
+# | |
+# To get the best compatibility to rails' JSON implementation, you can | |
+# require 'json/add/rails' | |
+# | |
+# Both of the additions attempt to require 'json' (like above) first, if it has | |
+# not been required yet. | |
+# | |
+# == Speed Comparisons | |
+# | |
+# I have created some benchmark results (see the benchmarks subdir of the | |
+# package) for the JSON-Parser to estimate the speed up in the C extension: | |
+# | |
+# JSON::Pure::Parser:: 28.90 calls/second | |
+# JSON::Ext::Parser:: 505.50 calls/second | |
+# | |
+# This is ca. <b>17.5</b> times the speed of the pure Ruby implementation. | |
+# | |
+# I have benchmarked the JSON-Generator as well. This generates a few more | |
+# values, because there are different modes, that also influence the achieved | |
+# speed: | |
+# | |
+# * JSON::Pure::Generator: | |
+# generate:: 35.06 calls/second | |
+# pretty_generate:: 34.00 calls/second | |
+# fast_generate:: 41.06 calls/second | |
+# | |
+# * JSON::Ext::Generator: | |
+# generate:: 492.11 calls/second | |
+# pretty_generate:: 348.85 calls/second | |
+# fast_generate:: 541.60 calls/second | |
+# | |
+# * Speedup Ext/Pure: | |
+# generate safe:: 14.0 times | |
+# generate pretty:: 10.3 times | |
+# generate fast:: 13.2 times | |
+# | |
+# The rails framework includes a generator as well, also it seems to be rather | |
+# slow: I measured only 23.87 calls/second which is slower than any of my pure | |
+# generator results. Here a comparison of the different speedups with the Rails | |
+# measurement as the divisor: | |
+# | |
+# * Speedup Pure/Rails: | |
+# generate safe:: 1.5 times | |
+# generate pretty:: 1.4 times | |
+# generate fast:: 1.7 times | |
+# | |
+# * Speedup Ext/Rails: | |
+# generate safe:: 20.6 times | |
+# generate pretty:: 14.6 times | |
+# generate fast:: 22.7 times | |
+# | |
+# To achieve the fastest JSON text output, you can use the | |
+# fast_generate/fast_unparse methods. Beware, that this will disable the | |
+# checking for circular Ruby data structures, which may cause JSON to go into | |
+# an infinite loop. | |
+# | |
+# == Examples | |
+# | |
+# To create a JSON text from a ruby data structure, you | |
+# can call JSON.generate (or JSON.unparse) like that: | |
+# | |
+# json = JSON.generate [1, 2, {"a"=>3.141}, false, true, nil, 4..10] | |
+# # => "[1,2,{\"a\":3.141},false,true,null,\"4..10\"]" | |
+# | |
+# To create a valid JSON text you have to make sure, that the output is | |
+# embedded in either a JSON array [] or a JSON object {}. The easiest way to do | |
+# this, is by putting your values in a Ruby Array or Hash instance. | |
+# | |
+# To get back a ruby data structure from a JSON text, you have to call | |
+# JSON.parse on it: | |
+# | |
+# JSON.parse json | |
+# # => [1, 2, {"a"=>3.141}, false, true, nil, "4..10"] | |
+# | |
+# Note, that the range from the original data structure is a simple | |
+# string now. The reason for this is, that JSON doesn't support ranges | |
+# or arbitrary classes. In this case the json library falls back to call | |
+# Object#to_json, which is the same as #to_s.to_json. | |
+# | |
+# It's possible to add JSON support serialization to arbitrary classes by | |
+# simply implementing a more specialized version of the #to_json method, that | |
+# should return a JSON object (a hash converted to JSON with #to_json) like | |
+# this (don't forget the *a for all the arguments): | |
+# | |
+# class Range | |
+# def to_json(*a) | |
+# { | |
+# 'json_class' => self.class.name, # = 'Range' | |
+# 'data' => [ first, last, exclude_end? ] | |
+# }.to_json(*a) | |
+# end | |
+# end | |
+# | |
+# The hash key 'json_class' is the class, that will be asked to deserialise the | |
+# JSON representation later. In this case it's 'Range', but any namespace of | |
+# the form 'A::B' or '::A::B' will do. All other keys are arbitrary and can be | |
+# used to store the necessary data to configure the object to be deserialised. | |
+# | |
+# If a the key 'json_class' is found in a JSON object, the JSON parser checks | |
+# if the given class responds to the json_create class method. If so, it is | |
+# called with the JSON object converted to a Ruby hash. So a range can | |
+# be deserialised by implementing Range.json_create like this: | |
+# | |
+# class Range | |
+# def self.json_create(o) | |
+# new(*o['data']) | |
+# end | |
+# end | |
+# | |
+# Now it possible to serialise/deserialise ranges as well: | |
+# | |
+# json = JSON.generate [1, 2, {"a"=>3.141}, false, true, nil, 4..10] | |
+# # => "[1,2,{\"a\":3.141},false,true,null,{\"json_class\":\"Range\",\"data\":[4,10,false]}]" | |
+# JSON.parse json | |
+# # => [1, 2, {"a"=>3.141}, false, true, nil, 4..10] | |
+# | |
+# JSON.generate always creates the shortest possible string representation of a | |
+# ruby data structure in one line. This good for data storage or network | |
+# protocols, but not so good for humans to read. Fortunately there's also | |
+# JSON.pretty_generate (or JSON.pretty_generate) that creates a more | |
+# readable output: | |
+# | |
+# puts JSON.pretty_generate([1, 2, {"a"=>3.141}, false, true, nil, 4..10]) | |
+# [ | |
+# 1, | |
+# 2, | |
+# { | |
+# "a": 3.141 | |
+# }, | |
+# false, | |
+# true, | |
+# null, | |
+# { | |
+# "json_class": "Range", | |
+# "data": [ | |
+# 4, | |
+# 10, | |
+# false | |
+# ] | |
+# } | |
+# ] | |
+# | |
+# There are also the methods Kernel#j for unparse, and Kernel#jj for | |
+# pretty_unparse output to the console, that work analogous to Core Ruby's p | |
+# and the pp library's pp methods. | |
+# | |
+# The script tools/server.rb contains a small example if you want to test, how | |
+# receiving a JSON object from a webrick server in your browser with the | |
+# javasript prototype library (http://www.prototypejs.org) works. | |
+# | |
+module JSON | |
+ require 'json/version' | |
+ | |
+ if VARIANT_BINARY | |
+ require 'json/ext' | |
+ else | |
+ begin | |
+ require 'json/ext' | |
+ rescue LoadError | |
+ require 'json/pure' | |
+ end | |
+ end | |
+ | |
+ JSON_LOADED = true | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/Array.xpm b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/Array.xpm | |
new file mode 100644 | |
index 0000000..27c4801 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/Array.xpm | |
@@ -0,0 +1,21 @@ | |
+/* XPM */ | |
+static char * Array_xpm[] = { | |
+"16 16 2 1", | |
+" c None", | |
+". c #000000", | |
+" ", | |
+" ", | |
+" ", | |
+" .......... ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" .......... ", | |
+" ", | |
+" ", | |
+" "}; | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/FalseClass.xpm b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/FalseClass.xpm | |
new file mode 100644 | |
index 0000000..25ce608 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/FalseClass.xpm | |
@@ -0,0 +1,21 @@ | |
+/* XPM */ | |
+static char * False_xpm[] = { | |
+"16 16 2 1", | |
+" c None", | |
+". c #FF0000", | |
+" ", | |
+" ", | |
+" ", | |
+" ...... ", | |
+" . ", | |
+" . ", | |
+" . ", | |
+" ...... ", | |
+" . ", | |
+" . ", | |
+" . ", | |
+" . ", | |
+" . ", | |
+" ", | |
+" ", | |
+" "}; | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/Hash.xpm b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/Hash.xpm | |
new file mode 100644 | |
index 0000000..cd8f6f7 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/Hash.xpm | |
@@ -0,0 +1,21 @@ | |
+/* XPM */ | |
+static char * Hash_xpm[] = { | |
+"16 16 2 1", | |
+" c None", | |
+". c #000000", | |
+" ", | |
+" ", | |
+" ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" ......... ", | |
+" . . ", | |
+" . . ", | |
+" ......... ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" ", | |
+" ", | |
+" "}; | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/Key.xpm b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/Key.xpm | |
new file mode 100644 | |
index 0000000..9fd7281 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/Key.xpm | |
@@ -0,0 +1,73 @@ | |
+/* XPM */ | |
+static char * Key_xpm[] = { | |
+"16 16 54 1", | |
+" c None", | |
+". c #110007", | |
+"+ c #0E0900", | |
+"@ c #000013", | |
+"# c #070600", | |
+"$ c #F6F006", | |
+"% c #ECE711", | |
+"& c #E5EE00", | |
+"* c #16021E", | |
+"= c #120900", | |
+"- c #EDF12B", | |
+"; c #000033", | |
+"> c #0F0000", | |
+", c #FFFE03", | |
+"' c #E6E500", | |
+") c #16021B", | |
+"! c #F7F502", | |
+"~ c #000E00", | |
+"{ c #130000", | |
+"] c #FFF000", | |
+"^ c #FFE711", | |
+"/ c #140005", | |
+"( c #190025", | |
+"_ c #E9DD27", | |
+": c #E7DC04", | |
+"< c #FFEC09", | |
+"[ c #FFE707", | |
+"} c #FFDE10", | |
+"| c #150021", | |
+"1 c #160700", | |
+"2 c #FAF60E", | |
+"3 c #EFE301", | |
+"4 c #FEF300", | |
+"5 c #E7E000", | |
+"6 c #FFFF08", | |
+"7 c #0E0206", | |
+"8 c #040000", | |
+"9 c #03052E", | |
+"0 c #041212", | |
+"a c #070300", | |
+"b c #F2E713", | |
+"c c #F9DE13", | |
+"d c #36091E", | |
+"e c #00001C", | |
+"f c #1F0010", | |
+"g c #FFF500", | |
+"h c #DEDE00", | |
+"i c #050A00", | |
+"j c #FAF14A", | |
+"k c #F5F200", | |
+"l c #040404", | |
+"m c #1A0D00", | |
+"n c #EDE43D", | |
+"o c #ECE007", | |
+" ", | |
+" ", | |
+" .+@ ", | |
+" #$%&* ", | |
+" =-;>,') ", | |
+" >!~{]^/ ", | |
+" (_:<[}| ", | |
+" 1234567 ", | |
+" 890abcd ", | |
+" efghi ", | |
+" >jkl ", | |
+" mnol ", | |
+" >kl ", | |
+" ll ", | |
+" ", | |
+" "}; | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/NilClass.xpm b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/NilClass.xpm | |
new file mode 100644 | |
index 0000000..3509f06 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/NilClass.xpm | |
@@ -0,0 +1,21 @@ | |
+/* XPM */ | |
+static char * False_xpm[] = { | |
+"16 16 2 1", | |
+" c None", | |
+". c #000000", | |
+" ", | |
+" ", | |
+" ", | |
+" ... ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" . . ", | |
+" ... ", | |
+" ", | |
+" ", | |
+" "}; | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/Numeric.xpm b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/Numeric.xpm | |
new file mode 100644 | |
index 0000000..e071e2e | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/Numeric.xpm | |
@@ -0,0 +1,28 @@ | |
+/* XPM */ | |
+static char * Numeric_xpm[] = { | |
+"16 16 9 1", | |
+" c None", | |
+". c #FF0000", | |
+"+ c #0000FF", | |
+"@ c #0023DB", | |
+"# c #00EA14", | |
+"$ c #00FF00", | |
+"% c #004FAF", | |
+"& c #0028D6", | |
+"* c #00F20C", | |
+" ", | |
+" ", | |
+" ", | |
+" ... +++@#$$$$ ", | |
+" .+ %& $$ ", | |
+" . + $ ", | |
+" . + $$ ", | |
+" . ++$$$$ ", | |
+" . + $$ ", | |
+" . + $ ", | |
+" . + $ ", | |
+" . + $ $$ ", | |
+" .....++++*$$ ", | |
+" ", | |
+" ", | |
+" "}; | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/String.xpm b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/String.xpm | |
new file mode 100644 | |
index 0000000..f79a89c | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/String.xpm | |
@@ -0,0 +1,96 @@ | |
+/* XPM */ | |
+static char * String_xpm[] = { | |
+"16 16 77 1", | |
+" c None", | |
+". c #000000", | |
+"+ c #040404", | |
+"@ c #080806", | |
+"# c #090606", | |
+"$ c #EEEAE1", | |
+"% c #E7E3DA", | |
+"& c #E0DBD1", | |
+"* c #D4B46F", | |
+"= c #0C0906", | |
+"- c #E3C072", | |
+"; c #E4C072", | |
+"> c #060505", | |
+", c #0B0A08", | |
+"' c #D5B264", | |
+") c #D3AF5A", | |
+"! c #080602", | |
+"~ c #E1B863", | |
+"{ c #DDB151", | |
+"] c #DBAE4A", | |
+"^ c #DDB152", | |
+"/ c #DDB252", | |
+"( c #070705", | |
+"_ c #0C0A07", | |
+": c #D3A33B", | |
+"< c #020201", | |
+"[ c #DAAA41", | |
+"} c #040302", | |
+"| c #E4D9BF", | |
+"1 c #0B0907", | |
+"2 c #030201", | |
+"3 c #020200", | |
+"4 c #C99115", | |
+"5 c #080704", | |
+"6 c #DBC8A2", | |
+"7 c #E7D7B4", | |
+"8 c #E0CD9E", | |
+"9 c #080601", | |
+"0 c #040400", | |
+"a c #010100", | |
+"b c #0B0B08", | |
+"c c #DCBF83", | |
+"d c #DCBC75", | |
+"e c #DEB559", | |
+"f c #040301", | |
+"g c #BC8815", | |
+"h c #120E07", | |
+"i c #060402", | |
+"j c #0A0804", | |
+"k c #D4A747", | |
+"l c #D6A12F", | |
+"m c #0E0C05", | |
+"n c #C8C1B0", | |
+"o c #1D1B15", | |
+"p c #D7AD51", | |
+"q c #070502", | |
+"r c #080804", | |
+"s c #BC953B", | |
+"t c #C4BDAD", | |
+"u c #0B0807", | |
+"v c #DBAC47", | |
+"w c #1B150A", | |
+"x c #B78A2C", | |
+"y c #D8A83C", | |
+"z c #D4A338", | |
+"A c #0F0B03", | |
+"B c #181105", | |
+"C c #C59325", | |
+"D c #C18E1F", | |
+"E c #060600", | |
+"F c #CC992D", | |
+"G c #B98B25", | |
+"H c #B3831F", | |
+"I c #C08C1C", | |
+"J c #060500", | |
+"K c #0E0C03", | |
+"L c #0D0A00", | |
+" ", | |
+" .+@# ", | |
+" .$%&*= ", | |
+" .-;>,')! ", | |
+" .~. .{]. ", | |
+" .^/. (_:< ", | |
+" .[.}|$12 ", | |
+" 345678}90 ", | |
+" a2bcdefgh ", | |
+" ijkl.mno ", | |
+" <pq. rstu ", | |
+" .]v. wx= ", | |
+" .yzABCDE ", | |
+" .FGHIJ ", | |
+" 0KL0 ", | |
+" "}; | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/TrueClass.xpm b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/TrueClass.xpm | |
new file mode 100644 | |
index 0000000..143eef4 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/TrueClass.xpm | |
@@ -0,0 +1,21 @@ | |
+/* XPM */ | |
+static char * TrueClass_xpm[] = { | |
+"16 16 2 1", | |
+" c None", | |
+". c #0BF311", | |
+" ", | |
+" ", | |
+" ", | |
+" ......... ", | |
+" . ", | |
+" . ", | |
+" . ", | |
+" . ", | |
+" . ", | |
+" . ", | |
+" . ", | |
+" . ", | |
+" . ", | |
+" ", | |
+" ", | |
+" "}; | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/add/core.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/add/core.rb | |
new file mode 100644 | |
index 0000000..5a56ed7 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/add/core.rb | |
@@ -0,0 +1,135 @@ | |
+# This file contains implementations of ruby core's custom objects for | |
+# serialisation/deserialisation. | |
+ | |
+unless Object.const_defined?(:JSON) and ::JSON.const_defined?(:JSON_LOADED) and | |
+ ::JSON::JSON_LOADED | |
+ require 'json' | |
+end | |
+require 'date' | |
+ | |
+class Time | |
+ def self.json_create(object) | |
+ if usec = object.delete('u') # used to be tv_usec -> tv_nsec | |
+ object['n'] = usec * 1000 | |
+ end | |
+ if respond_to?(:tv_nsec) | |
+ at(*object.values_at('s', 'n')) | |
+ else | |
+ at(object['s'], object['n'] / 1000) | |
+ end | |
+ end | |
+ | |
+ def to_json(*args) | |
+ { | |
+ 'json_class' => self.class.name, | |
+ 's' => tv_sec, | |
+ 'n' => respond_to?(:tv_nsec) ? tv_nsec : tv_usec * 1000 | |
+ }.to_json(*args) | |
+ end | |
+end | |
+ | |
+class Date | |
+ def self.json_create(object) | |
+ civil(*object.values_at('y', 'm', 'd', 'sg')) | |
+ end | |
+ | |
+ alias start sg unless method_defined?(:start) | |
+ | |
+ def to_json(*args) | |
+ { | |
+ 'json_class' => self.class.name, | |
+ 'y' => year, | |
+ 'm' => month, | |
+ 'd' => day, | |
+ 'sg' => start, | |
+ }.to_json(*args) | |
+ end | |
+end | |
+ | |
+class DateTime | |
+ def self.json_create(object) | |
+ args = object.values_at('y', 'm', 'd', 'H', 'M', 'S') | |
+ of_a, of_b = object['of'].split('/') | |
+ if of_b and of_b != '0' | |
+ args << Rational(of_a.to_i, of_b.to_i) | |
+ else | |
+ args << of_a | |
+ end | |
+ args << object['sg'] | |
+ civil(*args) | |
+ end | |
+ | |
+ alias start sg unless method_defined?(:start) | |
+ | |
+ def to_json(*args) | |
+ { | |
+ 'json_class' => self.class.name, | |
+ 'y' => year, | |
+ 'm' => month, | |
+ 'd' => day, | |
+ 'H' => hour, | |
+ 'M' => min, | |
+ 'S' => sec, | |
+ 'of' => offset.to_s, | |
+ 'sg' => start, | |
+ }.to_json(*args) | |
+ end | |
+end | |
+ | |
+class Range | |
+ def self.json_create(object) | |
+ new(*object['a']) | |
+ end | |
+ | |
+ def to_json(*args) | |
+ { | |
+ 'json_class' => self.class.name, | |
+ 'a' => [ first, last, exclude_end? ] | |
+ }.to_json(*args) | |
+ end | |
+end | |
+ | |
+class Struct | |
+ def self.json_create(object) | |
+ new(*object['v']) | |
+ end | |
+ | |
+ def to_json(*args) | |
+ klass = self.class.name | |
+ klass.empty? and raise JSON::JSONError, "Only named structs are supported!" | |
+ { | |
+ 'json_class' => klass, | |
+ 'v' => values, | |
+ }.to_json(*args) | |
+ end | |
+end | |
+ | |
+class Exception | |
+ def self.json_create(object) | |
+ result = new(object['m']) | |
+ result.set_backtrace object['b'] | |
+ result | |
+ end | |
+ | |
+ def to_json(*args) | |
+ { | |
+ 'json_class' => self.class.name, | |
+ 'm' => message, | |
+ 'b' => backtrace, | |
+ }.to_json(*args) | |
+ end | |
+end | |
+ | |
+class Regexp | |
+ def self.json_create(object) | |
+ new(object['s'], object['o']) | |
+ end | |
+ | |
+ def to_json(*) | |
+ { | |
+ 'json_class' => self.class.name, | |
+ 'o' => options, | |
+ 's' => source, | |
+ }.to_json | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/add/rails.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/add/rails.rb | |
new file mode 100644 | |
index 0000000..e86ed1a | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/add/rails.rb | |
@@ -0,0 +1,58 @@ | |
+# This file contains implementations of rails custom objects for | |
+# serialisation/deserialisation. | |
+ | |
+unless Object.const_defined?(:JSON) and ::JSON.const_defined?(:JSON_LOADED) and | |
+ ::JSON::JSON_LOADED | |
+ require 'json' | |
+end | |
+ | |
+class Object | |
+ def self.json_create(object) | |
+ obj = new | |
+ for key, value in object | |
+ next if key == 'json_class' | |
+ instance_variable_set "@#{key}", value | |
+ end | |
+ obj | |
+ end | |
+ | |
+ def to_json(*a) | |
+ result = { | |
+ 'json_class' => self.class.name | |
+ } | |
+ instance_variables.inject(result) do |r, name| | |
+ r[name[1..-1]] = instance_variable_get name | |
+ r | |
+ end | |
+ result.to_json(*a) | |
+ end | |
+end | |
+ | |
+class Symbol | |
+ def to_json(*a) | |
+ to_s.to_json(*a) | |
+ end | |
+end | |
+ | |
+module Enumerable | |
+ def to_json(*a) | |
+ to_a.to_json(*a) | |
+ end | |
+end | |
+ | |
+# class Regexp | |
+# def to_json(*) | |
+# inspect | |
+# end | |
+# end | |
+# | |
+# The above rails definition has some problems: | |
+# | |
+# 1. { 'foo' => /bar/ }.to_json # => "{foo: /bar/}" | |
+# This isn't valid JSON, because the regular expression syntax is not | |
+# defined in RFC 4627. (And unquoted strings are disallowed there, too.) | |
+# Though it is valid Javascript. | |
+# | |
+# 2. { 'foo' => /bar/mix }.to_json # => "{foo: /bar/mix}" | |
+# This isn't even valid Javascript. | |
+ | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/common.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/common.rb | |
new file mode 100644 | |
index 0000000..499fcc0 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/common.rb | |
@@ -0,0 +1,354 @@ | |
+require 'json/version' | |
+ | |
+module JSON | |
+ class << self | |
+ # If _object_ is string-like parse the string and return the parsed result | |
+ # as a Ruby data structure. Otherwise generate a JSON text from the Ruby | |
+ # data structure object and return it. | |
+ # | |
+ # The _opts_ argument is passed through to generate/parse respectively, see | |
+ # generate and parse for their documentation. | |
+ def [](object, opts = {}) | |
+ if object.respond_to? :to_str | |
+ JSON.parse(object.to_str, opts => {}) | |
+ else | |
+ JSON.generate(object, opts => {}) | |
+ end | |
+ end | |
+ | |
+ # Returns the JSON parser class, that is used by JSON. This might be either | |
+ # JSON::Ext::Parser or JSON::Pure::Parser. | |
+ attr_reader :parser | |
+ | |
+ # Set the JSON parser class _parser_ to be used by JSON. | |
+ def parser=(parser) # :nodoc: | |
+ @parser = parser | |
+ remove_const :Parser if const_defined? :Parser | |
+ const_set :Parser, parser | |
+ end | |
+ | |
+ # Return the constant located at _path_. The format of _path_ has to be | |
+ # either ::A::B::C or A::B::C. In any case A has to be located at the top | |
+ # level (absolute namespace path?). If there doesn't exist a constant at | |
+ # the given path, an ArgumentError is raised. | |
+ def deep_const_get(path) # :nodoc: | |
+ path = path.to_s | |
+ path.split(/::/).inject(Object) do |p, c| | |
+ case | |
+ when c.empty? then p | |
+ when p.const_defined?(c) then p.const_get(c) | |
+ else raise ArgumentError, "can't find const #{path}" | |
+ end | |
+ end | |
+ end | |
+ | |
+ # Set the module _generator_ to be used by JSON. | |
+ def generator=(generator) # :nodoc: | |
+ @generator = generator | |
+ generator_methods = generator::GeneratorMethods | |
+ for const in generator_methods.constants | |
+ klass = deep_const_get(const) | |
+ modul = generator_methods.const_get(const) | |
+ klass.class_eval do | |
+ instance_methods(false).each do |m| | |
+ m.to_s == 'to_json' and remove_method m | |
+ end | |
+ include modul | |
+ end | |
+ end | |
+ self.state = generator::State | |
+ const_set :State, self.state | |
+ end | |
+ | |
+ # Returns the JSON generator modul, that is used by JSON. This might be | |
+ # either JSON::Ext::Generator or JSON::Pure::Generator. | |
+ attr_reader :generator | |
+ | |
+ # Returns the JSON generator state class, that is used by JSON. This might | |
+ # be either JSON::Ext::Generator::State or JSON::Pure::Generator::State. | |
+ attr_accessor :state | |
+ | |
+ # This is create identifier, that is used to decide, if the _json_create_ | |
+ # hook of a class should be called. It defaults to 'json_class'. | |
+ attr_accessor :create_id | |
+ end | |
+ self.create_id = 'json_class' | |
+ | |
+ NaN = (-1.0) ** 0.5 | |
+ | |
+ Infinity = 1.0/0 | |
+ | |
+ MinusInfinity = -Infinity | |
+ | |
+ # The base exception for JSON errors. | |
+ class JSONError < StandardError; end | |
+ | |
+ # This exception is raised, if a parser error occurs. | |
+ class ParserError < JSONError; end | |
+ | |
+ # This exception is raised, if the nesting of parsed datastructures is too | |
+ # deep. | |
+ class NestingError < ParserError; end | |
+ | |
+ # This exception is raised, if a generator or unparser error occurs. | |
+ class GeneratorError < JSONError; end | |
+ # For backwards compatibility | |
+ UnparserError = GeneratorError | |
+ | |
+ # If a circular data structure is encountered while unparsing | |
+ # this exception is raised. | |
+ class CircularDatastructure < GeneratorError; end | |
+ | |
+ # This exception is raised, if the required unicode support is missing on the | |
+ # system. Usually this means, that the iconv library is not installed. | |
+ class MissingUnicodeSupport < JSONError; end | |
+ | |
+ module_function | |
+ | |
+ # Parse the JSON string _source_ into a Ruby data structure and return it. | |
+ # | |
+ # _opts_ can have the following | |
+ # keys: | |
+ # * *max_nesting*: The maximum depth of nesting allowed in the parsed data | |
+ # structures. Disable depth checking with :max_nesting => false, it defaults | |
+ # to 19. | |
+ # * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in | |
+ # defiance of RFC 4627 to be parsed by the Parser. This option defaults | |
+ # to false. | |
+ # * *create_additions*: If set to false, the Parser doesn't create | |
+ # additions even if a matchin class and create_id was found. This option | |
+ # defaults to true. | |
+ def parse(source, opts = {}) | |
+ JSON.parser.new(source, opts).parse | |
+ end | |
+ | |
+ # Parse the JSON string _source_ into a Ruby data structure and return it. | |
+ # The bang version of the parse method, defaults to the more dangerous values | |
+ # for the _opts_ hash, so be sure only to parse trusted _source_ strings. | |
+ # | |
+ # _opts_ can have the following keys: | |
+ # * *max_nesting*: The maximum depth of nesting allowed in the parsed data | |
+ # structures. Enable depth checking with :max_nesting => anInteger. The parse! | |
+ # methods defaults to not doing max depth checking: This can be dangerous, | |
+ # if someone wants to fill up your stack. | |
+ # * *allow_nan*: If set to true, allow NaN, Infinity, and -Infinity in | |
+ # defiance of RFC 4627 to be parsed by the Parser. This option defaults | |
+ # to true. | |
+ # * *create_additions*: If set to false, the Parser doesn't create | |
+ # additions even if a matchin class and create_id was found. This option | |
+ # defaults to true. | |
+ def parse!(source, opts = {}) | |
+ opts = { | |
+ :max_nesting => false, | |
+ :allow_nan => true | |
+ }.update(opts) | |
+ JSON.parser.new(source, opts).parse | |
+ end | |
+ | |
+ # Unparse the Ruby data structure _obj_ into a single line JSON string and | |
+ # return it. _state_ is | |
+ # * a JSON::State object, | |
+ # * or a Hash like object (responding to to_hash), | |
+ # * an object convertible into a hash by a to_h method, | |
+ # that is used as or to configure a State object. | |
+ # | |
+ # It defaults to a state object, that creates the shortest possible JSON text | |
+ # in one line, checks for circular data structures and doesn't allow NaN, | |
+ # Infinity, and -Infinity. | |
+ # | |
+ # A _state_ hash can have the following keys: | |
+ # * *indent*: a string used to indent levels (default: ''), | |
+ # * *space*: a string that is put after, a : or , delimiter (default: ''), | |
+ # * *space_before*: a string that is put before a : pair delimiter (default: ''), | |
+ # * *object_nl*: a string that is put at the end of a JSON object (default: ''), | |
+ # * *array_nl*: a string that is put at the end of a JSON array (default: ''), | |
+ # * *check_circular*: true if checking for circular data structures | |
+ # should be done (the default), false otherwise. | |
+ # * *allow_nan*: true if NaN, Infinity, and -Infinity should be | |
+ # generated, otherwise an exception is thrown, if these values are | |
+ # encountered. This options defaults to false. | |
+ # * *max_nesting*: The maximum depth of nesting allowed in the data | |
+ # structures from which JSON is to be generated. Disable depth checking | |
+ # with :max_nesting => false, it defaults to 19. | |
+ # | |
+ # See also the fast_generate for the fastest creation method with the least | |
+ # amount of sanity checks, and the pretty_generate method for some | |
+ # defaults for a pretty output. | |
+ def generate(obj, state = nil) | |
+ if state | |
+ state = State.from_state(state) | |
+ else | |
+ state = State.new | |
+ end | |
+ obj.to_json(state) | |
+ end | |
+ | |
+ # :stopdoc: | |
+ # I want to deprecate these later, so I'll first be silent about them, and | |
+ # later delete them. | |
+ alias unparse generate | |
+ module_function :unparse | |
+ # :startdoc: | |
+ | |
+ # Unparse the Ruby data structure _obj_ into a single line JSON string and | |
+ # return it. This method disables the checks for circles in Ruby objects, and | |
+ # also generates NaN, Infinity, and, -Infinity float values. | |
+ # | |
+ # *WARNING*: Be careful not to pass any Ruby data structures with circles as | |
+ # _obj_ argument, because this will cause JSON to go into an infinite loop. | |
+ def fast_generate(obj) | |
+ obj.to_json(nil) | |
+ end | |
+ | |
+ # :stopdoc: | |
+ # I want to deprecate these later, so I'll first be silent about them, and later delete them. | |
+ alias fast_unparse fast_generate | |
+ module_function :fast_unparse | |
+ # :startdoc: | |
+ | |
+ # Unparse the Ruby data structure _obj_ into a JSON string and return it. The | |
+ # returned string is a prettier form of the string returned by #unparse. | |
+ # | |
+ # The _opts_ argument can be used to configure the generator, see the | |
+ # generate method for a more detailed explanation. | |
+ def pretty_generate(obj, opts = nil) | |
+ state = JSON.state.new( | |
+ :indent => ' ', | |
+ :space => ' ', | |
+ :object_nl => "\n", | |
+ :array_nl => "\n", | |
+ :check_circular => true | |
+ ) | |
+ if opts | |
+ if opts.respond_to? :to_hash | |
+ opts = opts.to_hash | |
+ elsif opts.respond_to? :to_h | |
+ opts = opts.to_h | |
+ else | |
+ raise TypeError, "can't convert #{opts.class} into Hash" | |
+ end | |
+ state.configure(opts) | |
+ end | |
+ obj.to_json(state) | |
+ end | |
+ | |
+ # :stopdoc: | |
+ # I want to deprecate these later, so I'll first be silent about them, and later delete them. | |
+ alias pretty_unparse pretty_generate | |
+ module_function :pretty_unparse | |
+ # :startdoc: | |
+ | |
+ # Load a ruby data structure from a JSON _source_ and return it. A source can | |
+ # either be a string-like object, an IO like object, or an object responding | |
+ # to the read method. If _proc_ was given, it will be called with any nested | |
+ # Ruby object as an argument recursively in depth first order. | |
+ # | |
+ # This method is part of the implementation of the load/dump interface of | |
+ # Marshal and YAML. | |
+ def load(source, proc = nil) | |
+ if source.respond_to? :to_str | |
+ source = source.to_str | |
+ elsif source.respond_to? :to_io | |
+ source = source.to_io.read | |
+ else | |
+ source = source.read | |
+ end | |
+ result = parse(source, :max_nesting => false, :allow_nan => true) | |
+ recurse_proc(result, &proc) if proc | |
+ result | |
+ end | |
+ | |
+ def recurse_proc(result, &proc) | |
+ case result | |
+ when Array | |
+ result.each { |x| recurse_proc x, &proc } | |
+ proc.call result | |
+ when Hash | |
+ result.each { |x, y| recurse_proc x, &proc; recurse_proc y, &proc } | |
+ proc.call result | |
+ else | |
+ proc.call result | |
+ end | |
+ end | |
+ private :recurse_proc | |
+ module_function :recurse_proc | |
+ | |
+ alias restore load | |
+ module_function :restore | |
+ | |
+ # Dumps _obj_ as a JSON string, i.e. calls generate on the object and returns | |
+ # the result. | |
+ # | |
+ # If anIO (an IO like object or an object that responds to the write method) | |
+ # was given, the resulting JSON is written to it. | |
+ # | |
+ # If the number of nested arrays or objects exceeds _limit_ an ArgumentError | |
+ # exception is raised. This argument is similar (but not exactly the | |
+ # same!) to the _limit_ argument in Marshal.dump. | |
+ # | |
+ # This method is part of the implementation of the load/dump interface of | |
+ # Marshal and YAML. | |
+ def dump(obj, anIO = nil, limit = nil) | |
+ if anIO and limit.nil? | |
+ anIO = anIO.to_io if anIO.respond_to?(:to_io) | |
+ unless anIO.respond_to?(:write) | |
+ limit = anIO | |
+ anIO = nil | |
+ end | |
+ end | |
+ limit ||= 0 | |
+ result = generate(obj, :allow_nan => true, :max_nesting => limit) | |
+ if anIO | |
+ anIO.write result | |
+ anIO | |
+ else | |
+ result | |
+ end | |
+ rescue JSON::NestingError | |
+ raise ArgumentError, "exceed depth limit" | |
+ end | |
+end | |
+ | |
+module ::Kernel | |
+ # Outputs _objs_ to STDOUT as JSON strings in the shortest form, that is in | |
+ # one line. | |
+ def j(*objs) | |
+ objs.each do |obj| | |
+ puts JSON::generate(obj, :allow_nan => true, :max_nesting => false) | |
+ end | |
+ nil | |
+ end | |
+ | |
+ # Ouputs _objs_ to STDOUT as JSON strings in a pretty format, with | |
+ # indentation and over many lines. | |
+ def jj(*objs) | |
+ objs.each do |obj| | |
+ puts JSON::pretty_generate(obj, :allow_nan => true, :max_nesting => false) | |
+ end | |
+ nil | |
+ end | |
+ | |
+ # If _object_ is string-like parse the string and return the parsed result as | |
+ # a Ruby data structure. Otherwise generate a JSON text from the Ruby data | |
+ # structure object and return it. | |
+ # | |
+ # The _opts_ argument is passed through to generate/parse respectively, see | |
+ # generate and parse for their documentation. | |
+ def JSON(object, opts = {}) | |
+ if object.respond_to? :to_str | |
+ JSON.parse(object.to_str, opts) | |
+ else | |
+ JSON.generate(object, opts) | |
+ end | |
+ end | |
+end | |
+ | |
+class ::Class | |
+ # Returns true, if this class can be used to create an instance | |
+ # from a serialised JSON string. The class has to implement a class | |
+ # method _json_create_ that expects a hash as first parameter, which includes | |
+ # the required data. | |
+ def json_creatable? | |
+ respond_to?(:json_create) | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/editor.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/editor.rb | |
new file mode 100644 | |
index 0000000..12a7f94 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/editor.rb | |
@@ -0,0 +1,1362 @@ | |
+# To use the GUI JSON editor, start the edit_json.rb executable script. It | |
+# requires ruby-gtk to be installed. | |
+ | |
+require 'gtk2' | |
+require 'iconv' | |
+require 'json' | |
+require 'rbconfig' | |
+require 'open-uri' | |
+ | |
+module JSON | |
+ module Editor | |
+ include Gtk | |
+ | |
+ # Beginning of the editor window title | |
+ TITLE = 'JSON Editor'.freeze | |
+ | |
+ # Columns constants | |
+ ICON_COL, TYPE_COL, CONTENT_COL = 0, 1, 2 | |
+ | |
+ # JSON primitive types (Containers) | |
+ CONTAINER_TYPES = %w[Array Hash].sort | |
+ # All JSON primitive types | |
+ ALL_TYPES = (%w[TrueClass FalseClass Numeric String NilClass] + | |
+ CONTAINER_TYPES).sort | |
+ | |
+ # The Nodes necessary for the tree representation of a JSON document | |
+ ALL_NODES = (ALL_TYPES + %w[Key]).sort | |
+ | |
+ DEFAULT_DIALOG_KEY_PRESS_HANDLER = lambda do |dialog, event| | |
+ case event.keyval | |
+ when Gdk::Keyval::GDK_Return | |
+ dialog.response Dialog::RESPONSE_ACCEPT | |
+ when Gdk::Keyval::GDK_Escape | |
+ dialog.response Dialog::RESPONSE_REJECT | |
+ end | |
+ end | |
+ | |
+ # Returns the Gdk::Pixbuf of the icon named _name_ from the icon cache. | |
+ def Editor.fetch_icon(name) | |
+ @icon_cache ||= {} | |
+ unless @icon_cache.key?(name) | |
+ path = File.dirname(__FILE__) | |
+ @icon_cache[name] = Gdk::Pixbuf.new(File.join(path, name + '.xpm')) | |
+ end | |
+ @icon_cache[name] | |
+ end | |
+ | |
+ # Opens an error dialog on top of _window_ showing the error message | |
+ # _text_. | |
+ def Editor.error_dialog(window, text) | |
+ dialog = MessageDialog.new(window, Dialog::MODAL, | |
+ MessageDialog::ERROR, | |
+ MessageDialog::BUTTONS_CLOSE, text) | |
+ dialog.show_all | |
+ dialog.run | |
+ rescue TypeError | |
+ dialog = MessageDialog.new(Editor.window, Dialog::MODAL, | |
+ MessageDialog::ERROR, | |
+ MessageDialog::BUTTONS_CLOSE, text) | |
+ dialog.show_all | |
+ dialog.run | |
+ ensure | |
+ dialog.destroy if dialog | |
+ end | |
+ | |
+ # Opens a yes/no question dialog on top of _window_ showing the error | |
+ # message _text_. If yes was answered _true_ is returned, otherwise | |
+ # _false_. | |
+ def Editor.question_dialog(window, text) | |
+ dialog = MessageDialog.new(window, Dialog::MODAL, | |
+ MessageDialog::QUESTION, | |
+ MessageDialog::BUTTONS_YES_NO, text) | |
+ dialog.show_all | |
+ dialog.run do |response| | |
+ return Gtk::Dialog::RESPONSE_YES === response | |
+ end | |
+ ensure | |
+ dialog.destroy if dialog | |
+ end | |
+ | |
+ # Convert the tree model starting from Gtk::TreeIter _iter_ into a Ruby | |
+ # data structure and return it. | |
+ def Editor.model2data(iter) | |
+ return nil if iter.nil? | |
+ case iter.type | |
+ when 'Hash' | |
+ hash = {} | |
+ iter.each { |c| hash[c.content] = Editor.model2data(c.first_child) } | |
+ hash | |
+ when 'Array' | |
+ array = Array.new(iter.n_children) | |
+ iter.each_with_index { |c, i| array[i] = Editor.model2data(c) } | |
+ array | |
+ when 'Key' | |
+ iter.content | |
+ when 'String' | |
+ iter.content | |
+ when 'Numeric' | |
+ content = iter.content | |
+ if /\./.match(content) | |
+ content.to_f | |
+ else | |
+ content.to_i | |
+ end | |
+ when 'TrueClass' | |
+ true | |
+ when 'FalseClass' | |
+ false | |
+ when 'NilClass' | |
+ nil | |
+ else | |
+ fail "Unknown type found in model: #{iter.type}" | |
+ end | |
+ end | |
+ | |
+ # Convert the Ruby data structure _data_ into tree model data for Gtk and | |
+ # returns the whole model. If the parameter _model_ wasn't given a new | |
+ # Gtk::TreeStore is created as the model. The _parent_ parameter specifies | |
+ # the parent node (iter, Gtk:TreeIter instance) to which the data is | |
+ # appended, alternativeley the result of the yielded block is used as iter. | |
+ def Editor.data2model(data, model = nil, parent = nil) | |
+ model ||= TreeStore.new(Gdk::Pixbuf, String, String) | |
+ iter = if block_given? | |
+ yield model | |
+ else | |
+ model.append(parent) | |
+ end | |
+ case data | |
+ when Hash | |
+ iter.type = 'Hash' | |
+ data.sort.each do |key, value| | |
+ pair_iter = model.append(iter) | |
+ pair_iter.type = 'Key' | |
+ pair_iter.content = key.to_s | |
+ Editor.data2model(value, model, pair_iter) | |
+ end | |
+ when Array | |
+ iter.type = 'Array' | |
+ data.each do |value| | |
+ Editor.data2model(value, model, iter) | |
+ end | |
+ when Numeric | |
+ iter.type = 'Numeric' | |
+ iter.content = data.to_s | |
+ when String, true, false, nil | |
+ iter.type = data.class.name | |
+ iter.content = data.nil? ? 'null' : data.to_s | |
+ else | |
+ iter.type = 'String' | |
+ iter.content = data.to_s | |
+ end | |
+ model | |
+ end | |
+ | |
+ # The Gtk::TreeIter class is reopened and some auxiliary methods are added. | |
+ class Gtk::TreeIter | |
+ include Enumerable | |
+ | |
+ # Traverse each of this Gtk::TreeIter instance's children | |
+ # and yield to them. | |
+ def each | |
+ n_children.times { |i| yield nth_child(i) } | |
+ end | |
+ | |
+ # Recursively traverse all nodes of this Gtk::TreeIter's subtree | |
+ # (including self) and yield to them. | |
+ def recursive_each(&block) | |
+ yield self | |
+ each do |i| | |
+ i.recursive_each(&block) | |
+ end | |
+ end | |
+ | |
+ # Remove the subtree of this Gtk::TreeIter instance from the | |
+ # model _model_. | |
+ def remove_subtree(model) | |
+ while current = first_child | |
+ model.remove(current) | |
+ end | |
+ end | |
+ | |
+ # Returns the type of this node. | |
+ def type | |
+ self[TYPE_COL] | |
+ end | |
+ | |
+ # Sets the type of this node to _value_. This implies setting | |
+ # the respective icon accordingly. | |
+ def type=(value) | |
+ self[TYPE_COL] = value | |
+ self[ICON_COL] = Editor.fetch_icon(value) | |
+ end | |
+ | |
+ # Returns the content of this node. | |
+ def content | |
+ self[CONTENT_COL] | |
+ end | |
+ | |
+ # Sets the content of this node to _value_. | |
+ def content=(value) | |
+ self[CONTENT_COL] = value | |
+ end | |
+ end | |
+ | |
+ # This module bundles some method, that can be used to create a menu. It | |
+ # should be included into the class in question. | |
+ module MenuExtension | |
+ include Gtk | |
+ | |
+ # Creates a Menu, that includes MenuExtension. _treeview_ is the | |
+ # Gtk::TreeView, on which it operates. | |
+ def initialize(treeview) | |
+ @treeview = treeview | |
+ @menu = Menu.new | |
+ end | |
+ | |
+ # Returns the Gtk::TreeView of this menu. | |
+ attr_reader :treeview | |
+ | |
+ # Returns the menu. | |
+ attr_reader :menu | |
+ | |
+ # Adds a Gtk::SeparatorMenuItem to this instance's #menu. | |
+ def add_separator | |
+ menu.append SeparatorMenuItem.new | |
+ end | |
+ | |
+ # Adds a Gtk::MenuItem to this instance's #menu. _label_ is the label | |
+ # string, _klass_ is the item type, and _callback_ is the procedure, that | |
+ # is called if the _item_ is activated. | |
+ def add_item(label, keyval = nil, klass = MenuItem, &callback) | |
+ label = "#{label} (C-#{keyval.chr})" if keyval | |
+ item = klass.new(label) | |
+ item.signal_connect(:activate, &callback) | |
+ if keyval | |
+ self.signal_connect(:'key-press-event') do |item, event| | |
+ if event.state & Gdk::Window::ModifierType::CONTROL_MASK != 0 and | |
+ event.keyval == keyval | |
+ callback.call item | |
+ end | |
+ end | |
+ end | |
+ menu.append item | |
+ item | |
+ end | |
+ | |
+ # This method should be implemented in subclasses to create the #menu of | |
+ # this instance. It has to be called after an instance of this class is | |
+ # created, to build the menu. | |
+ def create | |
+ raise NotImplementedError | |
+ end | |
+ | |
+ def method_missing(*a, &b) | |
+ treeview.__send__(*a, &b) | |
+ end | |
+ end | |
+ | |
+ # This class creates the popup menu, that opens when clicking onto the | |
+ # treeview. | |
+ class PopUpMenu | |
+ include MenuExtension | |
+ | |
+ # Change the type or content of the selected node. | |
+ def change_node(item) | |
+ if current = selection.selected | |
+ parent = current.parent | |
+ old_type, old_content = current.type, current.content | |
+ if ALL_TYPES.include?(old_type) | |
+ @clipboard_data = Editor.model2data(current) | |
+ type, content = ask_for_element(parent, current.type, | |
+ current.content) | |
+ if type | |
+ current.type, current.content = type, content | |
+ current.remove_subtree(model) | |
+ toplevel.display_status("Changed a node in tree.") | |
+ window.change | |
+ end | |
+ else | |
+ toplevel.display_status( | |
+ "Cannot change node of type #{old_type} in tree!") | |
+ end | |
+ end | |
+ end | |
+ | |
+ # Cut the selected node and its subtree, and save it into the | |
+ # clipboard. | |
+ def cut_node(item) | |
+ if current = selection.selected | |
+ if current and current.type == 'Key' | |
+ @clipboard_data = { | |
+ current.content => Editor.model2data(current.first_child) | |
+ } | |
+ else | |
+ @clipboard_data = Editor.model2data(current) | |
+ end | |
+ model.remove(current) | |
+ window.change | |
+ toplevel.display_status("Cut a node from tree.") | |
+ end | |
+ end | |
+ | |
+ # Copy the selected node and its subtree, and save it into the | |
+ # clipboard. | |
+ def copy_node(item) | |
+ if current = selection.selected | |
+ if current and current.type == 'Key' | |
+ @clipboard_data = { | |
+ current.content => Editor.model2data(current.first_child) | |
+ } | |
+ else | |
+ @clipboard_data = Editor.model2data(current) | |
+ end | |
+ window.change | |
+ toplevel.display_status("Copied a node from tree.") | |
+ end | |
+ end | |
+ | |
+ # Paste the data in the clipboard into the selected Array or Hash by | |
+ # appending it. | |
+ def paste_node_appending(item) | |
+ if current = selection.selected | |
+ if @clipboard_data | |
+ case current.type | |
+ when 'Array' | |
+ Editor.data2model(@clipboard_data, model, current) | |
+ expand_collapse(current) | |
+ when 'Hash' | |
+ if @clipboard_data.is_a? Hash | |
+ parent = current.parent | |
+ hash = Editor.model2data(current) | |
+ model.remove(current) | |
+ hash.update(@clipboard_data) | |
+ Editor.data2model(hash, model, parent) | |
+ if parent | |
+ expand_collapse(parent) | |
+ elsif @expanded | |
+ expand_all | |
+ end | |
+ window.change | |
+ else | |
+ toplevel.display_status( | |
+ "Cannot paste non-#{current.type} data into '#{current.type}'!") | |
+ end | |
+ else | |
+ toplevel.display_status( | |
+ "Cannot paste node below '#{current.type}'!") | |
+ end | |
+ else | |
+ toplevel.display_status("Nothing to paste in clipboard!") | |
+ end | |
+ else | |
+ toplevel.display_status("Append a node into the root first!") | |
+ end | |
+ end | |
+ | |
+ # Paste the data in the clipboard into the selected Array inserting it | |
+ # before the selected element. | |
+ def paste_node_inserting_before(item) | |
+ if current = selection.selected | |
+ if @clipboard_data | |
+ parent = current.parent or return | |
+ parent_type = parent.type | |
+ if parent_type == 'Array' | |
+ selected_index = parent.each_with_index do |c, i| | |
+ break i if c == current | |
+ end | |
+ Editor.data2model(@clipboard_data, model, parent) do |m| | |
+ m.insert_before(parent, current) | |
+ end | |
+ expand_collapse(current) | |
+ toplevel.display_status("Inserted an element to " + | |
+ "'#{parent_type}' before index #{selected_index}.") | |
+ window.change | |
+ else | |
+ toplevel.display_status( | |
+ "Cannot insert node below '#{parent_type}'!") | |
+ end | |
+ else | |
+ toplevel.display_status("Nothing to paste in clipboard!") | |
+ end | |
+ else | |
+ toplevel.display_status("Append a node into the root first!") | |
+ end | |
+ end | |
+ | |
+ # Append a new node to the selected Hash or Array. | |
+ def append_new_node(item) | |
+ if parent = selection.selected | |
+ parent_type = parent.type | |
+ case parent_type | |
+ when 'Hash' | |
+ key, type, content = ask_for_hash_pair(parent) | |
+ key or return | |
+ iter = create_node(parent, 'Key', key) | |
+ iter = create_node(iter, type, content) | |
+ toplevel.display_status( | |
+ "Added a (key, value)-pair to '#{parent_type}'.") | |
+ window.change | |
+ when 'Array' | |
+ type, content = ask_for_element(parent) | |
+ type or return | |
+ iter = create_node(parent, type, content) | |
+ window.change | |
+ toplevel.display_status("Appendend an element to '#{parent_type}'.") | |
+ else | |
+ toplevel.display_status("Cannot append to '#{parent_type}'!") | |
+ end | |
+ else | |
+ type, content = ask_for_element | |
+ type or return | |
+ iter = create_node(nil, type, content) | |
+ window.change | |
+ end | |
+ end | |
+ | |
+ # Insert a new node into an Array before the selected element. | |
+ def insert_new_node(item) | |
+ if current = selection.selected | |
+ parent = current.parent or return | |
+ parent_parent = parent.parent | |
+ parent_type = parent.type | |
+ if parent_type == 'Array' | |
+ selected_index = parent.each_with_index do |c, i| | |
+ break i if c == current | |
+ end | |
+ type, content = ask_for_element(parent) | |
+ type or return | |
+ iter = model.insert_before(parent, current) | |
+ iter.type, iter.content = type, content | |
+ toplevel.display_status("Inserted an element to " + | |
+ "'#{parent_type}' before index #{selected_index}.") | |
+ window.change | |
+ else | |
+ toplevel.display_status( | |
+ "Cannot insert node below '#{parent_type}'!") | |
+ end | |
+ else | |
+ toplevel.display_status("Append a node into the root first!") | |
+ end | |
+ end | |
+ | |
+ # Recursively collapse/expand a subtree starting from the selected node. | |
+ def collapse_expand(item) | |
+ if current = selection.selected | |
+ if row_expanded?(current.path) | |
+ collapse_row(current.path) | |
+ else | |
+ expand_row(current.path, true) | |
+ end | |
+ else | |
+ toplevel.display_status("Append a node into the root first!") | |
+ end | |
+ end | |
+ | |
+ # Create the menu. | |
+ def create | |
+ add_item("Change node", ?n, &method(:change_node)) | |
+ add_separator | |
+ add_item("Cut node", ?X, &method(:cut_node)) | |
+ add_item("Copy node", ?C, &method(:copy_node)) | |
+ add_item("Paste node (appending)", ?A, &method(:paste_node_appending)) | |
+ add_item("Paste node (inserting before)", ?I, | |
+ &method(:paste_node_inserting_before)) | |
+ add_separator | |
+ add_item("Append new node", ?a, &method(:append_new_node)) | |
+ add_item("Insert new node before", ?i, &method(:insert_new_node)) | |
+ add_separator | |
+ add_item("Collapse/Expand node (recursively)", ?e, | |
+ &method(:collapse_expand)) | |
+ | |
+ menu.show_all | |
+ signal_connect(:button_press_event) do |widget, event| | |
+ if event.kind_of? Gdk::EventButton and event.button == 3 | |
+ menu.popup(nil, nil, event.button, event.time) | |
+ end | |
+ end | |
+ signal_connect(:popup_menu) do | |
+ menu.popup(nil, nil, 0, Gdk::Event::CURRENT_TIME) | |
+ end | |
+ end | |
+ end | |
+ | |
+ # This class creates the File pulldown menu. | |
+ class FileMenu | |
+ include MenuExtension | |
+ | |
+ # Clear the model and filename, but ask to save the JSON document, if | |
+ # unsaved changes have occured. | |
+ def new(item) | |
+ window.clear | |
+ end | |
+ | |
+ # Open a file and load it into the editor. Ask to save the JSON document | |
+ # first, if unsaved changes have occured. | |
+ def open(item) | |
+ window.file_open | |
+ end | |
+ | |
+ def open_location(item) | |
+ window.location_open | |
+ end | |
+ | |
+ # Revert the current JSON document in the editor to the saved version. | |
+ def revert(item) | |
+ window.instance_eval do | |
+ @filename and file_open(@filename) | |
+ end | |
+ end | |
+ | |
+ # Save the current JSON document. | |
+ def save(item) | |
+ window.file_save | |
+ end | |
+ | |
+ # Save the current JSON document under the given filename. | |
+ def save_as(item) | |
+ window.file_save_as | |
+ end | |
+ | |
+ # Quit the editor, after asking to save any unsaved changes first. | |
+ def quit(item) | |
+ window.quit | |
+ end | |
+ | |
+ # Create the menu. | |
+ def create | |
+ title = MenuItem.new('File') | |
+ title.submenu = menu | |
+ add_item('New', &method(:new)) | |
+ add_item('Open', ?o, &method(:open)) | |
+ add_item('Open location', ?l, &method(:open_location)) | |
+ add_item('Revert', &method(:revert)) | |
+ add_separator | |
+ add_item('Save', ?s, &method(:save)) | |
+ add_item('Save As', ?S, &method(:save_as)) | |
+ add_separator | |
+ add_item('Quit', ?q, &method(:quit)) | |
+ title | |
+ end | |
+ end | |
+ | |
+ # This class creates the Edit pulldown menu. | |
+ class EditMenu | |
+ include MenuExtension | |
+ | |
+ # Copy data from model into primary clipboard. | |
+ def copy(item) | |
+ data = Editor.model2data(model.iter_first) | |
+ json = JSON.pretty_generate(data, :max_nesting => false) | |
+ c = Gtk::Clipboard.get(Gdk::Selection::PRIMARY) | |
+ c.text = json | |
+ end | |
+ | |
+ # Copy json text from primary clipboard into model. | |
+ def paste(item) | |
+ c = Gtk::Clipboard.get(Gdk::Selection::PRIMARY) | |
+ if json = c.wait_for_text | |
+ window.ask_save if @changed | |
+ begin | |
+ window.edit json | |
+ rescue JSON::ParserError | |
+ window.clear | |
+ end | |
+ end | |
+ end | |
+ | |
+ # Find a string in all nodes' contents and select the found node in the | |
+ # treeview. | |
+ def find(item) | |
+ @search = ask_for_find_term(@search) or return | |
+ iter = model.get_iter('0') or return | |
+ iter.recursive_each do |i| | |
+ if @iter | |
+ if @iter != i | |
+ next | |
+ else | |
+ @iter = nil | |
+ next | |
+ end | |
+ elsif @search.match(i[CONTENT_COL]) | |
+ set_cursor(i.path, nil, false) | |
+ @iter = i | |
+ break | |
+ end | |
+ end | |
+ end | |
+ | |
+ # Repeat the last search given by #find. | |
+ def find_again(item) | |
+ @search or return | |
+ iter = model.get_iter('0') | |
+ iter.recursive_each do |i| | |
+ if @iter | |
+ if @iter != i | |
+ next | |
+ else | |
+ @iter = nil | |
+ next | |
+ end | |
+ elsif @search.match(i[CONTENT_COL]) | |
+ set_cursor(i.path, nil, false) | |
+ @iter = i | |
+ break | |
+ end | |
+ end | |
+ end | |
+ | |
+ # Sort (Reverse sort) all elements of the selected array by the given | |
+ # expression. _x_ is the element in question. | |
+ def sort(item) | |
+ if current = selection.selected | |
+ if current.type == 'Array' | |
+ parent = current.parent | |
+ ary = Editor.model2data(current) | |
+ order, reverse = ask_for_order | |
+ order or return | |
+ begin | |
+ block = eval "lambda { |x| #{order} }" | |
+ if reverse | |
+ ary.sort! { |a,b| block[b] <=> block[a] } | |
+ else | |
+ ary.sort! { |a,b| block[a] <=> block[b] } | |
+ end | |
+ rescue => e | |
+ Editor.error_dialog(self, "Failed to sort Array with #{order}: #{e}!") | |
+ else | |
+ Editor.data2model(ary, model, parent) do |m| | |
+ m.insert_before(parent, current) | |
+ end | |
+ model.remove(current) | |
+ expand_collapse(parent) | |
+ window.change | |
+ toplevel.display_status("Array has been sorted.") | |
+ end | |
+ else | |
+ toplevel.display_status("Only Array nodes can be sorted!") | |
+ end | |
+ else | |
+ toplevel.display_status("Select an Array to sort first!") | |
+ end | |
+ end | |
+ | |
+ # Create the menu. | |
+ def create | |
+ title = MenuItem.new('Edit') | |
+ title.submenu = menu | |
+ add_item('Copy', ?c, &method(:copy)) | |
+ add_item('Paste', ?v, &method(:paste)) | |
+ add_separator | |
+ add_item('Find', ?f, &method(:find)) | |
+ add_item('Find Again', ?g, &method(:find_again)) | |
+ add_separator | |
+ add_item('Sort', ?S, &method(:sort)) | |
+ title | |
+ end | |
+ end | |
+ | |
+ class OptionsMenu | |
+ include MenuExtension | |
+ | |
+ # Collapse/Expand all nodes by default. | |
+ def collapsed_nodes(item) | |
+ if expanded | |
+ self.expanded = false | |
+ collapse_all | |
+ else | |
+ self.expanded = true | |
+ expand_all | |
+ end | |
+ end | |
+ | |
+ # Toggle pretty saving mode on/off. | |
+ def pretty_saving(item) | |
+ @pretty_item.toggled | |
+ window.change | |
+ end | |
+ | |
+ attr_reader :pretty_item | |
+ | |
+ # Create the menu. | |
+ def create | |
+ title = MenuItem.new('Options') | |
+ title.submenu = menu | |
+ add_item('Collapsed nodes', nil, CheckMenuItem, &method(:collapsed_nodes)) | |
+ @pretty_item = add_item('Pretty saving', nil, CheckMenuItem, | |
+ &method(:pretty_saving)) | |
+ @pretty_item.active = true | |
+ window.unchange | |
+ title | |
+ end | |
+ end | |
+ | |
+ # This class inherits from Gtk::TreeView, to configure it and to add a lot | |
+ # of behaviour to it. | |
+ class JSONTreeView < Gtk::TreeView | |
+ include Gtk | |
+ | |
+ # Creates a JSONTreeView instance, the parameter _window_ is | |
+ # a MainWindow instance and used for self delegation. | |
+ def initialize(window) | |
+ @window = window | |
+ super(TreeStore.new(Gdk::Pixbuf, String, String)) | |
+ self.selection.mode = SELECTION_BROWSE | |
+ | |
+ @expanded = false | |
+ self.headers_visible = false | |
+ add_columns | |
+ add_popup_menu | |
+ end | |
+ | |
+ # Returns the MainWindow instance of this JSONTreeView. | |
+ attr_reader :window | |
+ | |
+ # Returns true, if nodes are autoexpanding, false otherwise. | |
+ attr_accessor :expanded | |
+ | |
+ private | |
+ | |
+ def add_columns | |
+ cell = CellRendererPixbuf.new | |
+ column = TreeViewColumn.new('Icon', cell, | |
+ 'pixbuf' => ICON_COL | |
+ ) | |
+ append_column(column) | |
+ | |
+ cell = CellRendererText.new | |
+ column = TreeViewColumn.new('Type', cell, | |
+ 'text' => TYPE_COL | |
+ ) | |
+ append_column(column) | |
+ | |
+ cell = CellRendererText.new | |
+ cell.editable = true | |
+ column = TreeViewColumn.new('Content', cell, | |
+ 'text' => CONTENT_COL | |
+ ) | |
+ cell.signal_connect(:edited, &method(:cell_edited)) | |
+ append_column(column) | |
+ end | |
+ | |
+ def unify_key(iter, key) | |
+ return unless iter.type == 'Key' | |
+ parent = iter.parent | |
+ if parent.any? { |c| c != iter and c.content == key } | |
+ old_key = key | |
+ i = 0 | |
+ begin | |
+ key = sprintf("%s.%d", old_key, i += 1) | |
+ end while parent.any? { |c| c != iter and c.content == key } | |
+ end | |
+ iter.content = key | |
+ end | |
+ | |
+ def cell_edited(cell, path, value) | |
+ iter = model.get_iter(path) | |
+ case iter.type | |
+ when 'Key' | |
+ unify_key(iter, value) | |
+ toplevel.display_status('Key has been changed.') | |
+ when 'FalseClass' | |
+ value.downcase! | |
+ if value == 'true' | |
+ iter.type, iter.content = 'TrueClass', 'true' | |
+ end | |
+ when 'TrueClass' | |
+ value.downcase! | |
+ if value == 'false' | |
+ iter.type, iter.content = 'FalseClass', 'false' | |
+ end | |
+ when 'Numeric' | |
+ iter.content = (Integer(value) rescue Float(value) rescue 0).to_s | |
+ when 'String' | |
+ iter.content = value | |
+ when 'Hash', 'Array' | |
+ return | |
+ else | |
+ fail "Unknown type found in model: #{iter.type}" | |
+ end | |
+ window.change | |
+ end | |
+ | |
+ def configure_value(value, type) | |
+ value.editable = false | |
+ case type | |
+ when 'Array', 'Hash' | |
+ value.text = '' | |
+ when 'TrueClass' | |
+ value.text = 'true' | |
+ when 'FalseClass' | |
+ value.text = 'false' | |
+ when 'NilClass' | |
+ value.text = 'null' | |
+ when 'Numeric', 'String' | |
+ value.text ||= '' | |
+ value.editable = true | |
+ else | |
+ raise ArgumentError, "unknown type '#{type}' encountered" | |
+ end | |
+ end | |
+ | |
+ def add_popup_menu | |
+ menu = PopUpMenu.new(self) | |
+ menu.create | |
+ end | |
+ | |
+ public | |
+ | |
+ # Create a _type_ node with content _content_, and add it to _parent_ | |
+ # in the model. If _parent_ is nil, create a new model and put it into | |
+ # the editor treeview. | |
+ def create_node(parent, type, content) | |
+ iter = if parent | |
+ model.append(parent) | |
+ else | |
+ new_model = Editor.data2model(nil) | |
+ toplevel.view_new_model(new_model) | |
+ new_model.iter_first | |
+ end | |
+ iter.type, iter.content = type, content | |
+ expand_collapse(parent) if parent | |
+ iter | |
+ end | |
+ | |
+ # Ask for a hash key, value pair to be added to the Hash node _parent_. | |
+ def ask_for_hash_pair(parent) | |
+ key_input = type_input = value_input = nil | |
+ | |
+ dialog = Dialog.new("New (key, value) pair for Hash", nil, nil, | |
+ [ Stock::OK, Dialog::RESPONSE_ACCEPT ], | |
+ [ Stock::CANCEL, Dialog::RESPONSE_REJECT ] | |
+ ) | |
+ dialog.width_request = 640 | |
+ | |
+ hbox = HBox.new(false, 5) | |
+ hbox.pack_start(Label.new("Key:"), false) | |
+ hbox.pack_start(key_input = Entry.new) | |
+ key_input.text = @key || '' | |
+ dialog.vbox.pack_start(hbox, false) | |
+ key_input.signal_connect(:activate) do | |
+ if parent.any? { |c| c.content == key_input.text } | |
+ toplevel.display_status('Key already exists in Hash!') | |
+ key_input.text = '' | |
+ else | |
+ toplevel.display_status('Key has been changed.') | |
+ end | |
+ end | |
+ | |
+ hbox = HBox.new(false, 5) | |
+ hbox.pack_start(Label.new("Type:"), false) | |
+ hbox.pack_start(type_input = ComboBox.new(true)) | |
+ ALL_TYPES.each { |t| type_input.append_text(t) } | |
+ type_input.active = @type || 0 | |
+ dialog.vbox.pack_start(hbox, false) | |
+ | |
+ type_input.signal_connect(:changed) do | |
+ value_input.editable = false | |
+ case ALL_TYPES[type_input.active] | |
+ when 'Array', 'Hash' | |
+ value_input.text = '' | |
+ when 'TrueClass' | |
+ value_input.text = 'true' | |
+ when 'FalseClass' | |
+ value_input.text = 'false' | |
+ when 'NilClass' | |
+ value_input.text = 'null' | |
+ else | |
+ value_input.text = '' | |
+ value_input.editable = true | |
+ end | |
+ end | |
+ | |
+ hbox = HBox.new(false, 5) | |
+ hbox.pack_start(Label.new("Value:"), false) | |
+ hbox.pack_start(value_input = Entry.new) | |
+ value_input.width_chars = 60 | |
+ value_input.text = @value || '' | |
+ dialog.vbox.pack_start(hbox, false) | |
+ | |
+ dialog.signal_connect(:'key-press-event', &DEFAULT_DIALOG_KEY_PRESS_HANDLER) | |
+ dialog.show_all | |
+ self.focus = dialog | |
+ dialog.run do |response| | |
+ if response == Dialog::RESPONSE_ACCEPT | |
+ @key = key_input.text | |
+ type = ALL_TYPES[@type = type_input.active] | |
+ content = value_input.text | |
+ return @key, type, content | |
+ end | |
+ end | |
+ return | |
+ ensure | |
+ dialog.destroy | |
+ end | |
+ | |
+ # Ask for an element to be appended _parent_. | |
+ def ask_for_element(parent = nil, default_type = nil, value_text = @content) | |
+ type_input = value_input = nil | |
+ | |
+ dialog = Dialog.new( | |
+ "New element into #{parent ? parent.type : 'root'}", | |
+ nil, nil, | |
+ [ Stock::OK, Dialog::RESPONSE_ACCEPT ], | |
+ [ Stock::CANCEL, Dialog::RESPONSE_REJECT ] | |
+ ) | |
+ hbox = HBox.new(false, 5) | |
+ hbox.pack_start(Label.new("Type:"), false) | |
+ hbox.pack_start(type_input = ComboBox.new(true)) | |
+ default_active = 0 | |
+ types = parent ? ALL_TYPES : CONTAINER_TYPES | |
+ types.each_with_index do |t, i| | |
+ type_input.append_text(t) | |
+ if t == default_type | |
+ default_active = i | |
+ end | |
+ end | |
+ type_input.active = default_active | |
+ dialog.vbox.pack_start(hbox, false) | |
+ type_input.signal_connect(:changed) do | |
+ configure_value(value_input, types[type_input.active]) | |
+ end | |
+ | |
+ hbox = HBox.new(false, 5) | |
+ hbox.pack_start(Label.new("Value:"), false) | |
+ hbox.pack_start(value_input = Entry.new) | |
+ value_input.width_chars = 60 | |
+ value_input.text = value_text if value_text | |
+ configure_value(value_input, types[type_input.active]) | |
+ | |
+ dialog.vbox.pack_start(hbox, false) | |
+ | |
+ dialog.signal_connect(:'key-press-event', &DEFAULT_DIALOG_KEY_PRESS_HANDLER) | |
+ dialog.show_all | |
+ self.focus = dialog | |
+ dialog.run do |response| | |
+ if response == Dialog::RESPONSE_ACCEPT | |
+ type = types[type_input.active] | |
+ @content = case type | |
+ when 'Numeric' | |
+ Integer(value_input.text) rescue Float(value_input.text) rescue 0 | |
+ else | |
+ value_input.text | |
+ end.to_s | |
+ return type, @content | |
+ end | |
+ end | |
+ return | |
+ ensure | |
+ dialog.destroy if dialog | |
+ end | |
+ | |
+ # Ask for an order criteria for sorting, using _x_ for the element in | |
+ # question. Returns the order criterium, and true/false for reverse | |
+ # sorting. | |
+ def ask_for_order | |
+ dialog = Dialog.new( | |
+ "Give an order criterium for 'x'.", | |
+ nil, nil, | |
+ [ Stock::OK, Dialog::RESPONSE_ACCEPT ], | |
+ [ Stock::CANCEL, Dialog::RESPONSE_REJECT ] | |
+ ) | |
+ hbox = HBox.new(false, 5) | |
+ | |
+ hbox.pack_start(Label.new("Order:"), false) | |
+ hbox.pack_start(order_input = Entry.new) | |
+ order_input.text = @order || 'x' | |
+ order_input.width_chars = 60 | |
+ | |
+ hbox.pack_start(reverse_checkbox = CheckButton.new('Reverse'), false) | |
+ | |
+ dialog.vbox.pack_start(hbox, false) | |
+ | |
+ dialog.signal_connect(:'key-press-event', &DEFAULT_DIALOG_KEY_PRESS_HANDLER) | |
+ dialog.show_all | |
+ self.focus = dialog | |
+ dialog.run do |response| | |
+ if response == Dialog::RESPONSE_ACCEPT | |
+ return @order = order_input.text, reverse_checkbox.active? | |
+ end | |
+ end | |
+ return | |
+ ensure | |
+ dialog.destroy if dialog | |
+ end | |
+ | |
+ # Ask for a find term to search for in the tree. Returns the term as a | |
+ # string. | |
+ def ask_for_find_term(search = nil) | |
+ dialog = Dialog.new( | |
+ "Find a node matching regex in tree.", | |
+ nil, nil, | |
+ [ Stock::OK, Dialog::RESPONSE_ACCEPT ], | |
+ [ Stock::CANCEL, Dialog::RESPONSE_REJECT ] | |
+ ) | |
+ hbox = HBox.new(false, 5) | |
+ | |
+ hbox.pack_start(Label.new("Regex:"), false) | |
+ hbox.pack_start(regex_input = Entry.new) | |
+ hbox.pack_start(icase_checkbox = CheckButton.new('Icase'), false) | |
+ regex_input.width_chars = 60 | |
+ if search | |
+ regex_input.text = search.source | |
+ icase_checkbox.active = search.casefold? | |
+ end | |
+ | |
+ dialog.vbox.pack_start(hbox, false) | |
+ | |
+ dialog.signal_connect(:'key-press-event', &DEFAULT_DIALOG_KEY_PRESS_HANDLER) | |
+ dialog.show_all | |
+ self.focus = dialog | |
+ dialog.run do |response| | |
+ if response == Dialog::RESPONSE_ACCEPT | |
+ begin | |
+ return Regexp.new(regex_input.text, icase_checkbox.active? ? Regexp::IGNORECASE : 0) | |
+ rescue => e | |
+ Editor.error_dialog(self, "Evaluation of regex /#{regex_input.text}/ failed: #{e}!") | |
+ return | |
+ end | |
+ end | |
+ end | |
+ return | |
+ ensure | |
+ dialog.destroy if dialog | |
+ end | |
+ | |
+ # Expand or collapse row pointed to by _iter_ according | |
+ # to the #expanded attribute. | |
+ def expand_collapse(iter) | |
+ if expanded | |
+ expand_row(iter.path, true) | |
+ else | |
+ collapse_row(iter.path) | |
+ end | |
+ end | |
+ end | |
+ | |
+ # The editor main window | |
+ class MainWindow < Gtk::Window | |
+ include Gtk | |
+ | |
+ def initialize(encoding) | |
+ @changed = false | |
+ @encoding = encoding | |
+ super(TOPLEVEL) | |
+ display_title | |
+ set_default_size(800, 600) | |
+ signal_connect(:delete_event) { quit } | |
+ | |
+ vbox = VBox.new(false, 0) | |
+ add(vbox) | |
+ #vbox.border_width = 0 | |
+ | |
+ @treeview = JSONTreeView.new(self) | |
+ @treeview.signal_connect(:'cursor-changed') do | |
+ display_status('') | |
+ end | |
+ | |
+ menu_bar = create_menu_bar | |
+ vbox.pack_start(menu_bar, false, false, 0) | |
+ | |
+ sw = ScrolledWindow.new(nil, nil) | |
+ sw.shadow_type = SHADOW_ETCHED_IN | |
+ sw.set_policy(POLICY_AUTOMATIC, POLICY_AUTOMATIC) | |
+ vbox.pack_start(sw, true, true, 0) | |
+ sw.add(@treeview) | |
+ | |
+ @status_bar = Statusbar.new | |
+ vbox.pack_start(@status_bar, false, false, 0) | |
+ | |
+ @filename ||= nil | |
+ if @filename | |
+ data = read_data(@filename) | |
+ view_new_model Editor.data2model(data) | |
+ end | |
+ | |
+ signal_connect(:button_release_event) do |_,event| | |
+ if event.button == 2 | |
+ c = Gtk::Clipboard.get(Gdk::Selection::PRIMARY) | |
+ if url = c.wait_for_text | |
+ location_open url | |
+ end | |
+ false | |
+ else | |
+ true | |
+ end | |
+ end | |
+ end | |
+ | |
+ # Creates the menu bar with the pulldown menus and returns it. | |
+ def create_menu_bar | |
+ menu_bar = MenuBar.new | |
+ @file_menu = FileMenu.new(@treeview) | |
+ menu_bar.append @file_menu.create | |
+ @edit_menu = EditMenu.new(@treeview) | |
+ menu_bar.append @edit_menu.create | |
+ @options_menu = OptionsMenu.new(@treeview) | |
+ menu_bar.append @options_menu.create | |
+ menu_bar | |
+ end | |
+ | |
+ # Sets editor status to changed, to indicate that the edited data | |
+ # containts unsaved changes. | |
+ def change | |
+ @changed = true | |
+ display_title | |
+ end | |
+ | |
+ # Sets editor status to unchanged, to indicate that the edited data | |
+ # doesn't containt unsaved changes. | |
+ def unchange | |
+ @changed = false | |
+ display_title | |
+ end | |
+ | |
+ # Puts a new model _model_ into the Gtk::TreeView to be edited. | |
+ def view_new_model(model) | |
+ @treeview.model = model | |
+ @treeview.expanded = true | |
+ @treeview.expand_all | |
+ unchange | |
+ end | |
+ | |
+ # Displays _text_ in the status bar. | |
+ def display_status(text) | |
+ @cid ||= nil | |
+ @status_bar.pop(@cid) if @cid | |
+ @cid = @status_bar.get_context_id('dummy') | |
+ @status_bar.push(@cid, text) | |
+ end | |
+ | |
+ # Opens a dialog, asking, if changes should be saved to a file. | |
+ def ask_save | |
+ if Editor.question_dialog(self, | |
+ "Unsaved changes to JSON model. Save?") | |
+ if @filename | |
+ file_save | |
+ else | |
+ file_save_as | |
+ end | |
+ end | |
+ end | |
+ | |
+ # Quit this editor, that is, leave this editor's main loop. | |
+ def quit | |
+ ask_save if @changed | |
+ if Gtk.main_level > 0 | |
+ destroy | |
+ Gtk.main_quit | |
+ end | |
+ nil | |
+ end | |
+ | |
+ # Display the new title according to the editor's current state. | |
+ def display_title | |
+ title = TITLE.dup | |
+ title << ": #@filename" if @filename | |
+ title << " *" if @changed | |
+ self.title = title | |
+ end | |
+ | |
+ # Clear the current model, after asking to save all unsaved changes. | |
+ def clear | |
+ ask_save if @changed | |
+ @filename = nil | |
+ self.view_new_model nil | |
+ end | |
+ | |
+ def check_pretty_printed(json) | |
+ pretty = !!((nl_index = json.index("\n")) && nl_index != json.size - 1) | |
+ @options_menu.pretty_item.active = pretty | |
+ end | |
+ private :check_pretty_printed | |
+ | |
+ # Open the data at the location _uri_, if given. Otherwise open a dialog | |
+ # to ask for the _uri_. | |
+ def location_open(uri = nil) | |
+ uri = ask_for_location unless uri | |
+ uri or return | |
+ ask_save if @changed | |
+ data = load_location(uri) or return | |
+ view_new_model Editor.data2model(data) | |
+ end | |
+ | |
+ # Open the file _filename_ or call the #select_file method to ask for a | |
+ # filename. | |
+ def file_open(filename = nil) | |
+ filename = select_file('Open as a JSON file') unless filename | |
+ data = load_file(filename) or return | |
+ view_new_model Editor.data2model(data) | |
+ end | |
+ | |
+ # Edit the string _json_ in the editor. | |
+ def edit(json) | |
+ if json.respond_to? :read | |
+ json = json.read | |
+ end | |
+ data = parse_json json | |
+ view_new_model Editor.data2model(data) | |
+ end | |
+ | |
+ # Save the current file. | |
+ def file_save | |
+ if @filename | |
+ store_file(@filename) | |
+ else | |
+ file_save_as | |
+ end | |
+ end | |
+ | |
+ # Save the current file as the filename | |
+ def file_save_as | |
+ filename = select_file('Save as a JSON file') | |
+ store_file(filename) | |
+ end | |
+ | |
+ # Store the current JSON document to _path_. | |
+ def store_file(path) | |
+ if path | |
+ data = Editor.model2data(@treeview.model.iter_first) | |
+ File.open(path + '.tmp', 'wb') do |output| | |
+ data or break | |
+ if @options_menu.pretty_item.active? | |
+ output.puts JSON.pretty_generate(data, :max_nesting => false) | |
+ else | |
+ output.write JSON.generate(data, :max_nesting => false) | |
+ end | |
+ end | |
+ File.rename path + '.tmp', path | |
+ @filename = path | |
+ toplevel.display_status("Saved data to '#@filename'.") | |
+ unchange | |
+ end | |
+ rescue SystemCallError => e | |
+ Editor.error_dialog(self, "Failed to store JSON file: #{e}!") | |
+ end | |
+ | |
+ # Load the file named _filename_ into the editor as a JSON document. | |
+ def load_file(filename) | |
+ if filename | |
+ if File.directory?(filename) | |
+ Editor.error_dialog(self, "Try to select a JSON file!") | |
+ nil | |
+ else | |
+ @filename = filename | |
+ if data = read_data(filename) | |
+ toplevel.display_status("Loaded data from '#@filename'.") | |
+ end | |
+ display_title | |
+ data | |
+ end | |
+ end | |
+ end | |
+ | |
+ # Load the data at location _uri_ into the editor as a JSON document. | |
+ def load_location(uri) | |
+ data = read_data(uri) or return | |
+ @filename = nil | |
+ toplevel.display_status("Loaded data from '#{uri}'.") | |
+ display_title | |
+ data | |
+ end | |
+ | |
+ def parse_json(json) | |
+ check_pretty_printed(json) | |
+ if @encoding && !/^utf8$/i.match(@encoding) | |
+ iconverter = Iconv.new('utf8', @encoding) | |
+ json = iconverter.iconv(json) | |
+ end | |
+ JSON::parse(json, :max_nesting => false, :create_additions => false) | |
+ end | |
+ private :parse_json | |
+ | |
+ # Read a JSON document from the file named _filename_, parse it into a | |
+ # ruby data structure, and return the data. | |
+ def read_data(filename) | |
+ open(filename) do |f| | |
+ json = f.read | |
+ return parse_json(json) | |
+ end | |
+ rescue => e | |
+ Editor.error_dialog(self, "Failed to parse JSON file: #{e}!") | |
+ return | |
+ end | |
+ | |
+ # Open a file selecton dialog, displaying _message_, and return the | |
+ # selected filename or nil, if no file was selected. | |
+ def select_file(message) | |
+ filename = nil | |
+ fs = FileSelection.new(message) | |
+ fs.set_modal(true) | |
+ @default_dir = File.join(Dir.pwd, '') unless @default_dir | |
+ fs.set_filename(@default_dir) | |
+ fs.set_transient_for(self) | |
+ fs.signal_connect(:destroy) { Gtk.main_quit } | |
+ fs.ok_button.signal_connect(:clicked) do | |
+ filename = fs.filename | |
+ @default_dir = File.join(File.dirname(filename), '') | |
+ fs.destroy | |
+ Gtk.main_quit | |
+ end | |
+ fs.cancel_button.signal_connect(:clicked) do | |
+ fs.destroy | |
+ Gtk.main_quit | |
+ end | |
+ fs.show_all | |
+ Gtk.main | |
+ filename | |
+ end | |
+ | |
+ # Ask for location URI a to load data from. Returns the URI as a string. | |
+ def ask_for_location | |
+ dialog = Dialog.new( | |
+ "Load data from location...", | |
+ nil, nil, | |
+ [ Stock::OK, Dialog::RESPONSE_ACCEPT ], | |
+ [ Stock::CANCEL, Dialog::RESPONSE_REJECT ] | |
+ ) | |
+ hbox = HBox.new(false, 5) | |
+ | |
+ hbox.pack_start(Label.new("Location:"), false) | |
+ hbox.pack_start(location_input = Entry.new) | |
+ location_input.width_chars = 60 | |
+ location_input.text = @location || '' | |
+ | |
+ dialog.vbox.pack_start(hbox, false) | |
+ | |
+ dialog.signal_connect(:'key-press-event', &DEFAULT_DIALOG_KEY_PRESS_HANDLER) | |
+ dialog.show_all | |
+ dialog.run do |response| | |
+ if response == Dialog::RESPONSE_ACCEPT | |
+ return @location = location_input.text | |
+ end | |
+ end | |
+ return | |
+ ensure | |
+ dialog.destroy if dialog | |
+ end | |
+ end | |
+ | |
+ class << self | |
+ # Starts a JSON Editor. If a block was given, it yields | |
+ # to the JSON::Editor::MainWindow instance. | |
+ def start(encoding = 'utf8') # :yield: window | |
+ Gtk.init | |
+ @window = Editor::MainWindow.new(encoding) | |
+ @window.icon_list = [ Editor.fetch_icon('json') ] | |
+ yield @window if block_given? | |
+ @window.show_all | |
+ Gtk.main | |
+ end | |
+ | |
+ # Edit the string _json_ with encoding _encoding_ in the editor. | |
+ def edit(json, encoding = 'utf8') | |
+ start(encoding) do |window| | |
+ window.edit json | |
+ end | |
+ end | |
+ | |
+ attr_reader :window | |
+ end | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/ext.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/ext.rb | |
new file mode 100644 | |
index 0000000..ff4fa42 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/ext.rb | |
@@ -0,0 +1,13 @@ | |
+require 'json/common' | |
+ | |
+module JSON | |
+ # This module holds all the modules/classes that implement JSON's | |
+ # functionality as C extensions. | |
+ module Ext | |
+ require 'json/ext/parser' | |
+ require 'json/ext/generator' | |
+ $DEBUG and warn "Using c extension for JSON." | |
+ JSON.parser = Parser | |
+ JSON.generator = Generator | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/json.xpm b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/json.xpm | |
new file mode 100644 | |
index 0000000..2cb626b | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/json.xpm | |
@@ -0,0 +1,1499 @@ | |
+/* XPM */ | |
+static char * json_xpm[] = { | |
+"64 64 1432 2", | |
+" c None", | |
+". c #641839", | |
+"+ c #CF163C", | |
+"@ c #D31C3B", | |
+"# c #E11A38", | |
+"$ c #5F242D", | |
+"% c #320C22", | |
+"& c #9B532D", | |
+"* c #F32E34", | |
+"= c #820F33", | |
+"- c #4B0F34", | |
+"; c #8E1237", | |
+"> c #944029", | |
+", c #961325", | |
+"' c #A00C24", | |
+") c #872C23", | |
+"! c #694021", | |
+"~ c #590D1F", | |
+"{ c #420528", | |
+"] c #D85A2D", | |
+"^ c #7E092B", | |
+"/ c #0E0925", | |
+"( c #0D081F", | |
+"_ c #0F081E", | |
+": c #12071F", | |
+"< c #360620", | |
+"[ c #682A21", | |
+"} c #673F21", | |
+"| c #780E21", | |
+"1 c #A82320", | |
+"2 c #8D1D1F", | |
+"3 c #970127", | |
+"4 c #0D0123", | |
+"5 c #0D0324", | |
+"6 c #3B1E28", | |
+"7 c #C28429", | |
+"8 c #0C0523", | |
+"9 c #0C041E", | |
+"0 c #0E031A", | |
+"a c #11031A", | |
+"b c #13031B", | |
+"c c #13031C", | |
+"d c #11031D", | |
+"e c #19051E", | |
+"f c #390E20", | |
+"g c #9C0C20", | |
+"h c #C00721", | |
+"i c #980320", | |
+"j c #14031E", | |
+"k c #CD9F32", | |
+"l c #C29F2E", | |
+"m c #0F0325", | |
+"n c #0D0321", | |
+"o c #0E0324", | |
+"p c #D08329", | |
+"q c #9D1B27", | |
+"r c #1C0320", | |
+"s c #0D011A", | |
+"t c #120117", | |
+"u c #130017", | |
+"v c #150018", | |
+"w c #160119", | |
+"x c #17021A", | |
+"y c #15021B", | |
+"z c #11021E", | |
+"A c #0F021F", | |
+"B c #8C1821", | |
+"C c #CF4522", | |
+"D c #831821", | |
+"E c #BA7033", | |
+"F c #EDB339", | |
+"G c #C89733", | |
+"H c #280727", | |
+"I c #0F051F", | |
+"J c #0E0420", | |
+"K c #591F27", | |
+"L c #E47129", | |
+"M c #612224", | |
+"N c #0C021D", | |
+"O c #120018", | |
+"P c #140017", | |
+"Q c #170017", | |
+"R c #190018", | |
+"S c #1B0019", | |
+"T c #1B011A", | |
+"U c #18011B", | |
+"V c #15011C", | |
+"W c #12031E", | |
+"X c #460A21", | |
+"Y c #A13823", | |
+"Z c #784323", | |
+"` c #5A0C21", | |
+" . c #BC4530", | |
+".. c #EB5B38", | |
+"+. c #CE4E3B", | |
+"@. c #DD9334", | |
+"#. c #751A27", | |
+"$. c #11071E", | |
+"%. c #0F041C", | |
+"&. c #1E0824", | |
+"*. c #955A28", | |
+"=. c #9A5027", | |
+"-. c #1E0321", | |
+";. c #11011A", | |
+">. c #140018", | |
+",. c #180018", | |
+"'. c #1F001A", | |
+"). c #20001B", | |
+"!. c #1E001A", | |
+"~. c #1B001A", | |
+"{. c #16021B", | |
+"]. c #16041E", | |
+"^. c #220622", | |
+"/. c #5F3525", | |
+"(. c #DE5724", | |
+"_. c #611021", | |
+":. c #0F0925", | |
+"<. c #D1892E", | |
+"[. c #F27036", | |
+"}. c #EC633B", | |
+"|. c #DA293C", | |
+"1. c #E64833", | |
+"2. c #912226", | |
+"3. c #11081C", | |
+"4. c #110419", | |
+"5. c #0F041E", | |
+"6. c #451425", | |
+"7. c #BF6F28", | |
+"8. c #332225", | |
+"9. c #0E021E", | |
+"0. c #13001B", | |
+"a. c #17001A", | |
+"b. c #1C001B", | |
+"c. c #21001C", | |
+"d. c #23001C", | |
+"e. c #21001B", | |
+"f. c #19021A", | |
+"g. c #17041E", | |
+"h. c #150721", | |
+"i. c #602424", | |
+"j. c #D51223", | |
+"k. c #540820", | |
+"l. c #D04D2D", | |
+"m. c #EA8933", | |
+"n. c #875637", | |
+"o. c #88543A", | |
+"p. c #E5923A", | |
+"q. c #891931", | |
+"r. c #130B25", | |
+"s. c #10051B", | |
+"t. c #110217", | |
+"u. c #12021A", | |
+"v. c #761826", | |
+"w. c #E2A728", | |
+"x. c #300224", | |
+"y. c #10011E", | |
+"z. c #16001B", | |
+"A. c #1B001B", | |
+"B. c #21001A", | |
+"C. c #1E0019", | |
+"D. c #1D0019", | |
+"E. c #1A011A", | |
+"F. c #17031C", | |
+"G. c #120720", | |
+"H. c #4E0822", | |
+"I. c #670721", | |
+"J. c #C07630", | |
+"K. c #F59734", | |
+"L. c #BE1B35", | |
+"M. c #0E1435", | |
+"N. c #522037", | |
+"O. c #DB8039", | |
+"P. c #D45933", | |
+"Q. c #420927", | |
+"R. c #0F041D", | |
+"S. c #140118", | |
+"T. c #13021D", | |
+"U. c #100423", | |
+"V. c #7B6227", | |
+"W. c #C04326", | |
+"X. c #0E0020", | |
+"Y. c #13001D", | |
+"Z. c #18001B", | |
+"`. c #1E001B", | |
+" + c #22001C", | |
+".+ c #22001B", | |
+"++ c #1B011B", | |
+"@+ c #16041D", | |
+"#+ c #130520", | |
+"$+ c #860521", | |
+"%+ c #710520", | |
+"&+ c #670A2A", | |
+"*+ c #A66431", | |
+"=+ c #E97536", | |
+"-+ c #F8833A", | |
+";+ c #F77A3A", | |
+">+ c #C45337", | |
+",+ c #0A1C35", | |
+"'+ c #993638", | |
+")+ c #F7863B", | |
+"!+ c #F49736", | |
+"~+ c #94462B", | |
+"{+ c #0E031F", | |
+"]+ c #130119", | |
+"^+ c #160018", | |
+"/+ c #16011B", | |
+"(+ c #15021F", | |
+"_+ c #120123", | |
+":+ c #A65C28", | |
+"<+ c #5C4D23", | |
+"[+ c #0F001F", | |
+"}+ c #14001D", | |
+"|+ c #1A001B", | |
+"1+ c #1F001B", | |
+"2+ c #24001D", | |
+"3+ c #25001D", | |
+"4+ c #24001C", | |
+"5+ c #1F001C", | |
+"6+ c #1A011C", | |
+"7+ c #16021E", | |
+"8+ c #3F0421", | |
+"9+ c #BC0522", | |
+"0+ c #1C041E", | |
+"a+ c #7F5531", | |
+"b+ c #E68A38", | |
+"c+ c #F8933E", | |
+"d+ c #FA7942", | |
+"e+ c #FB7543", | |
+"f+ c #FA6F41", | |
+"g+ c #F1793D", | |
+"h+ c #7D3B3A", | |
+"i+ c #28263B", | |
+"j+ c #D45441", | |
+"k+ c #F8A238", | |
+"l+ c #996B2D", | |
+"m+ c #0E0421", | |
+"n+ c #12011A", | |
+"o+ c #180019", | |
+"p+ c #17001C", | |
+"q+ c #12001F", | |
+"r+ c #4C2B2A", | |
+"s+ c #DB8130", | |
+"t+ c #540023", | |
+"u+ c #0F0120", | |
+"v+ c #16011C", | |
+"w+ c #22001D", | |
+"x+ c #25001F", | |
+"y+ c #26001F", | |
+"z+ c #25001E", | |
+"A+ c #24001E", | |
+"B+ c #1D001C", | |
+"C+ c #18011D", | |
+"D+ c #16031F", | |
+"E+ c #3C0522", | |
+"F+ c #9B0821", | |
+"G+ c #13041E", | |
+"H+ c #F6462E", | |
+"I+ c #E6AB37", | |
+"J+ c #E7A03E", | |
+"K+ c #FA9F44", | |
+"L+ c #FB8A48", | |
+"M+ c #FD7A4A", | |
+"N+ c #FD794A", | |
+"O+ c #FD7748", | |
+"P+ c #FD7E45", | |
+"Q+ c #FD8343", | |
+"R+ c #FB5D42", | |
+"S+ c #6E3A40", | |
+"T+ c #EE8A37", | |
+"U+ c #7E252B", | |
+"V+ c #100520", | |
+"W+ c #13011A", | |
+"X+ c #170019", | |
+"Y+ c #15001C", | |
+"Z+ c #0F0020", | |
+"`+ c #564427", | |
+" @ c #E0BA29", | |
+".@ c #5E2B25", | |
+"+@ c #10011F", | |
+"@@ c #17011C", | |
+"#@ c #1E001D", | |
+"$@ c #23001F", | |
+"%@ c #250020", | |
+"&@ c #24001F", | |
+"*@ c #23001E", | |
+"=@ c #21001E", | |
+"-@ c #1B001C", | |
+";@ c #17021D", | |
+">@ c #14041E", | |
+",@ c #AC0B25", | |
+"'@ c #5E1420", | |
+")@ c #F28635", | |
+"!@ c #C2733E", | |
+"~@ c #984C44", | |
+"{@ c #EA9148", | |
+"]@ c #FB844B", | |
+"^@ c #FD7E4C", | |
+"/@ c #FE7E4C", | |
+"(@ c #FE7E4B", | |
+"_@ c #FE7749", | |
+":@ c #FD7148", | |
+"<@ c #FB7D46", | |
+"[@ c #F89641", | |
+"}@ c #B95634", | |
+"|@ c #0D0927", | |
+"1@ c #11041D", | |
+"2@ c #150119", | |
+"3@ c #180017", | |
+"4@ c #16001A", | |
+"5@ c #13001E", | |
+"6@ c #110023", | |
+"7@ c #944C29", | |
+"8@ c #EE6229", | |
+"9@ c #3D0324", | |
+"0@ c #12021F", | |
+"a@ c #19011D", | |
+"b@ c #21001F", | |
+"c@ c #22001F", | |
+"d@ c #20001E", | |
+"e@ c #1F001D", | |
+"f@ c #1C001C", | |
+"g@ c #19011C", | |
+"h@ c #3D1621", | |
+"i@ c #B53622", | |
+"j@ c #31061F", | |
+"k@ c #841D34", | |
+"l@ c #F2703F", | |
+"m@ c #C14445", | |
+"n@ c #E67349", | |
+"o@ c #FB8E4B", | |
+"p@ c #FD834C", | |
+"q@ c #FE834D", | |
+"r@ c #FE834C", | |
+"s@ c #FE804C", | |
+"t@ c #FD814B", | |
+"u@ c #FB7D49", | |
+"v@ c #F79B43", | |
+"w@ c #AF1234", | |
+"x@ c #0D0625", | |
+"y@ c #13021C", | |
+"z@ c #1A0019", | |
+"A@ c #190019", | |
+"B@ c #410225", | |
+"C@ c #D39729", | |
+"D@ c #AA5927", | |
+"E@ c #0E0422", | |
+"F@ c #15021E", | |
+"G@ c #1A011D", | |
+"H@ c #1D001D", | |
+"I@ c #15031D", | |
+"J@ c #240820", | |
+"K@ c #A01023", | |
+"L@ c #670B21", | |
+"M@ c #3D0D33", | |
+"N@ c #E63C3E", | |
+"O@ c #EF7C45", | |
+"P@ c #F59048", | |
+"Q@ c #FB944A", | |
+"R@ c #FD904A", | |
+"S@ c #FE8E4B", | |
+"T@ c #FE854A", | |
+"U@ c #FE854B", | |
+"V@ c #FE884C", | |
+"W@ c #FC954B", | |
+"X@ c #F8AB45", | |
+"Y@ c #C37A35", | |
+"Z@ c #0D0425", | |
+"`@ c #13011B", | |
+" # c #170018", | |
+".# c #1A0018", | |
+"+# c #1C0019", | |
+"@# c #15001B", | |
+"## c #100120", | |
+"$# c #311F25", | |
+"%# c #E68E28", | |
+"&# c #7A1425", | |
+"*# c #130321", | |
+"=# c #17011E", | |
+"-# c #1A001D", | |
+";# c #19001B", | |
+"># c #16021C", | |
+",# c #130521", | |
+"'# c #6F3123", | |
+")# c #6D3022", | |
+"!# c #C89433", | |
+"~# c #EA7E3E", | |
+"{# c #DB2943", | |
+"]# c #EF7745", | |
+"^# c #FB8544", | |
+"/# c #FD9A43", | |
+"(# c #FE9941", | |
+"_# c #FE9D43", | |
+":# c #FEA548", | |
+"<# c #FEAE49", | |
+"[# c #FCB944", | |
+"}# c #CA9F35", | |
+"|# c #0E0225", | |
+"1# c #11001B", | |
+"2# c #160019", | |
+"3# c #12011B", | |
+"4# c #0F0220", | |
+"5# c #351D26", | |
+"6# c #D85B28", | |
+"7# c #6C0F26", | |
+"8# c #190121", | |
+"9# c #1B001E", | |
+"0# c #1A001C", | |
+"a# c #1D001B", | |
+"b# c #130220", | |
+"c# c #703A23", | |
+"d# c #713A23", | |
+"e# c #140327", | |
+"f# c #411B36", | |
+"g# c #C8713E", | |
+"h# c #7A3A3F", | |
+"i# c #CE2C3C", | |
+"j# c #E77338", | |
+"k# c #9C6535", | |
+"l# c #9C6233", | |
+"m# c #9C6332", | |
+"n# c #9C6A35", | |
+"o# c #C37D3C", | |
+"p# c #FEAC41", | |
+"q# c #FEC23E", | |
+"r# c #826330", | |
+"s# c #100122", | |
+"t# c #120019", | |
+"u# c #150017", | |
+"v# c #190017", | |
+"w# c #1B0018", | |
+"x# c #12001A", | |
+"y# c #10021F", | |
+"z# c #1A0326", | |
+"A# c #5F292A", | |
+"B# c #7B4E29", | |
+"C# c #3C0E25", | |
+"D# c #1A0020", | |
+"E# c #14021F", | |
+"F# c #723B23", | |
+"G# c #14001A", | |
+"H# c #58042A", | |
+"I# c #A28337", | |
+"J# c #C8813B", | |
+"K# c #B14B38", | |
+"L# c #761231", | |
+"M# c #5A132A", | |
+"N# c #0D0726", | |
+"O# c #0C0623", | |
+"P# c #0B0723", | |
+"Q# c #0B0A26", | |
+"R# c #321C2D", | |
+"S# c #C45B33", | |
+"T# c #FEBB33", | |
+"U# c #13052A", | |
+"V# c #13011F", | |
+"W# c #160017", | |
+"X# c #15001A", | |
+"Y# c #12001D", | |
+"Z# c #94062A", | |
+"`# c #630D2C", | |
+" $ c #85292B", | |
+".$ c #AA5E29", | |
+"+$ c #1F0123", | |
+"@$ c #19011F", | |
+"#$ c #1E001C", | |
+"$$ c #15031F", | |
+"%$ c #712122", | |
+"&$ c #712223", | |
+"*$ c #14011B", | |
+"=$ c #110321", | |
+"-$ c #AF0C2B", | |
+";$ c #E7D534", | |
+">$ c #EAC934", | |
+",$ c #84582D", | |
+"'$ c #1B0824", | |
+")$ c #11041E", | |
+"!$ c #10021B", | |
+"~$ c #100119", | |
+"{$ c #100218", | |
+"]$ c #0F041A", | |
+"^$ c #0E0720", | |
+"/$ c #2C1026", | |
+"($ c #D8A328", | |
+"_$ c #140322", | |
+":$ c #160016", | |
+"<$ c #14001F", | |
+"[$ c #120024", | |
+"}$ c #100128", | |
+"|$ c #3C032F", | |
+"1$ c #2C062E", | |
+"2$ c #29022B", | |
+"3$ c #A31D29", | |
+"4$ c #976A25", | |
+"5$ c #1A0321", | |
+"6$ c #17031E", | |
+"7$ c #1B021D", | |
+"8$ c #20001C", | |
+"9$ c #14041F", | |
+"0$ c #703422", | |
+"a$ c #6F3522", | |
+"b$ c #8D0328", | |
+"c$ c #920329", | |
+"d$ c #0F0326", | |
+"e$ c #100321", | |
+"f$ c #11021B", | |
+"g$ c #130117", | |
+"h$ c #140016", | |
+"i$ c #150015", | |
+"j$ c #140015", | |
+"k$ c #130116", | |
+"l$ c #120219", | |
+"m$ c #11031C", | |
+"n$ c #12031D", | |
+"o$ c #170016", | |
+"p$ c #160020", | |
+"q$ c #250029", | |
+"r$ c #670033", | |
+"s$ c #DCA238", | |
+"t$ c #F5C736", | |
+"u$ c #9A732E", | |
+"v$ c #110227", | |
+"w$ c #110324", | |
+"x$ c #811924", | |
+"y$ c #A04323", | |
+"z$ c #250721", | |
+"A$ c #1A041F", | |
+"B$ c #1E011D", | |
+"C$ c #1C011C", | |
+"D$ c #18031D", | |
+"E$ c #130721", | |
+"F$ c #6F3623", | |
+"G$ c #6B3622", | |
+"H$ c #1A001A", | |
+"I$ c #14011F", | |
+"J$ c #12011E", | |
+"K$ c #11011C", | |
+"L$ c #140117", | |
+"M$ c #170015", | |
+"N$ c #150016", | |
+"O$ c #120119", | |
+"P$ c #11011B", | |
+"Q$ c #11001A", | |
+"R$ c #130018", | |
+"S$ c #170118", | |
+"T$ c #170119", | |
+"U$ c #18021E", | |
+"V$ c #1A0126", | |
+"W$ c #6F2332", | |
+"X$ c #E5563B", | |
+"Y$ c #F1B83F", | |
+"Z$ c #F6CC38", | |
+"`$ c #9D7A2D", | |
+" % c #130123", | |
+".% c #130320", | |
+"+% c #2A0721", | |
+"@% c #B00E24", | |
+"#% c #7D0B23", | |
+"$% c #1F0522", | |
+"%% c #1E0220", | |
+"&% c #1D011E", | |
+"*% c #1A031E", | |
+"=% c #15051F", | |
+"-% c #241322", | |
+";% c #A32F23", | |
+">% c #670E21", | |
+",% c #1C001A", | |
+"'% c #19001A", | |
+")% c #180016", | |
+"!% c #160118", | |
+"~% c #140219", | |
+"{% c #11021C", | |
+"]% c #10021E", | |
+"^% c #0F011D", | |
+"/% c #170117", | |
+"(% c #160219", | |
+"_% c #17041D", | |
+":% c #190523", | |
+"<% c #8C042E", | |
+"[% c #B65838", | |
+"}% c #E9D73F", | |
+"|% c #EED43E", | |
+"1% c #D85538", | |
+"2% c #493129", | |
+"3% c #130120", | |
+"4% c #15021D", | |
+"5% c #330822", | |
+"6% c #8A0825", | |
+"7% c #3C0424", | |
+"8% c #1E0322", | |
+"9% c #1C0321", | |
+"0% c #180421", | |
+"a% c #130822", | |
+"b% c #AF2D24", | |
+"c% c #BC5623", | |
+"d% c #2F071F", | |
+"e% c #1A041C", | |
+"f% c #1C031C", | |
+"g% c #1D011C", | |
+"h% c #160117", | |
+"i% c #150419", | |
+"j% c #12081D", | |
+"k% c #0F0923", | |
+"l% c #A77027", | |
+"m% c #A60525", | |
+"n% c #11021A", | |
+"o% c #130218", | |
+"p% c #150319", | |
+"q% c #16061D", | |
+"r% c #180923", | |
+"s% c #9C1D2B", | |
+"t% c #A32636", | |
+"u% c #A66E3B", | |
+"v% c #4B2E3C", | |
+"w% c #412C36", | |
+"x% c #36012D", | |
+"y% c #140123", | |
+"z% c #17001E", | |
+"A% c #19011B", | |
+"B% c #1A0421", | |
+"C% c #340425", | |
+"D% c #9E0326", | |
+"E% c #1F0424", | |
+"F% c #1C0524", | |
+"G% c #180724", | |
+"H% c #A91024", | |
+"I% c #D55D24", | |
+"J% c #90071E", | |
+"K% c #3C051D", | |
+"L% c #1C021C", | |
+"M% c #1C011A", | |
+"N% c #1D001A", | |
+"O% c #160116", | |
+"P% c #150216", | |
+"Q% c #140217", | |
+"R% c #140618", | |
+"S% c #120D1D", | |
+"T% c #231925", | |
+"U% c #B16A2E", | |
+"V% c #FDAC34", | |
+"W% c #D58631", | |
+"X% c #280E2A", | |
+"Y% c #0D0A23", | |
+"Z% c #0F0920", | |
+"`% c #120C21", | |
+" & c #1F1026", | |
+".& c #A3352E", | |
+"+& c #EE9F36", | |
+"@& c #5D2A3C", | |
+"#& c #960D3C", | |
+"$& c #970638", | |
+"%& c #A00330", | |
+"&& c #4D0126", | |
+"*& c #1C001F", | |
+"=& c #280120", | |
+"-& c #290223", | |
+";& c #1F0425", | |
+">& c #260726", | |
+",& c #340A26", | |
+"'& c #850925", | |
+")& c #3A0823", | |
+"!& c #82071D", | |
+"~& c #5E071D", | |
+"{& c #18051C", | |
+"]& c #18021A", | |
+"^& c #190118", | |
+"/& c #160217", | |
+"(& c #150418", | |
+"_& c #130618", | |
+":& c #110718", | |
+"<& c #10081A", | |
+"[& c #110D1D", | |
+"}& c #291C24", | |
+"|& c #A73B2D", | |
+"1& c #FD6B36", | |
+"2& c #FD853C", | |
+"3& c #FD863B", | |
+"4& c #C24A35", | |
+"5& c #6B442F", | |
+"6& c #6D302D", | |
+"7& c #6E252E", | |
+"8& c #8E3B32", | |
+"9& c #DE7739", | |
+"0& c #F48E3F", | |
+"a& c #DD8D41", | |
+"b& c #854F3D", | |
+"c& c #7E2D35", | |
+"d& c #33082B", | |
+"e& c #1C0222", | |
+"f& c #20001F", | |
+"g& c #1F0222", | |
+"h& c #1A0524", | |
+"i& c #440C27", | |
+"j& c #BC1427", | |
+"k& c #20041B", | |
+"l& c #53061C", | |
+"m& c #25071B", | |
+"n& c #11061A", | |
+"o& c #130418", | |
+"p& c #140317", | |
+"q& c #150217", | |
+"r& c #160318", | |
+"s& c #12051B", | |
+"t& c #100C1D", | |
+"u& c #0E101E", | |
+"v& c #0C121F", | |
+"w& c #0C1321", | |
+"x& c #781725", | |
+"y& c #B25D2C", | |
+"z& c #FA6335", | |
+"A& c #FD633C", | |
+"B& c #FE6D42", | |
+"C& c #FE7C42", | |
+"D& c #FE813F", | |
+"E& c #FE873C", | |
+"F& c #FD743B", | |
+"G& c #FB683B", | |
+"H& c #FA7A3E", | |
+"I& c #F98242", | |
+"J& c #F97844", | |
+"K& c #F98943", | |
+"L& c #F79C3D", | |
+"M& c #A25133", | |
+"N& c #280B28", | |
+"O& c #1D021F", | |
+"P& c #1F011C", | |
+"Q& c #280321", | |
+"R& c #1C0724", | |
+"S& c #3F1C27", | |
+"T& c #D33C27", | |
+"U& c #0E061B", | |
+"V& c #0C091C", | |
+"W& c #0C0A1B", | |
+"X& c #0E091A", | |
+"Y& c #11081B", | |
+"Z& c #100A20", | |
+"`& c #0E0D23", | |
+" * c #551227", | |
+".* c #B21829", | |
+"+* c #C42329", | |
+"@* c #C62C29", | |
+"#* c #C55429", | |
+"$* c #E76F2B", | |
+"%* c #F14232", | |
+"&* c #F95E3A", | |
+"** c #FC6740", | |
+"=* c #FE6E45", | |
+"-* c #FE7246", | |
+";* c #FE7545", | |
+">* c #FE7744", | |
+",* c #FD7745", | |
+"'* c #FD7845", | |
+")* c #FD7847", | |
+"!* c #FD7948", | |
+"~* c #FD7B44", | |
+"{* c #FC7C3B", | |
+"]* c #6F3130", | |
+"^* c #140B24", | |
+"/* c #19031D", | |
+"(* c #1C011B", | |
+"_* c #5A011F", | |
+":* c #B70421", | |
+"<* c #380824", | |
+"[* c #3E2626", | |
+"}* c #9F5626", | |
+"|* c #13051E", | |
+"1* c #360A21", | |
+"2* c #361223", | |
+"3* c #371724", | |
+"4* c #381824", | |
+"5* c #3B1524", | |
+"6* c #3E1E26", | |
+"7* c #471A29", | |
+"8* c #DB252E", | |
+"9* c #ED2733", | |
+"0* c #EE5436", | |
+"a* c #F04237", | |
+"b* c #F33934", | |
+"c* c #F53D2F", | |
+"d* c #D7312B", | |
+"e* c #AF212B", | |
+"f* c #3A2C31", | |
+"g* c #F65F39", | |
+"h* c #FB6F41", | |
+"i* c #FD6D45", | |
+"j* c #FE7047", | |
+"k* c #FE7647", | |
+"l* c #FE7847", | |
+"m* c #FE7848", | |
+"n* c #FE7748", | |
+"o* c #FE7948", | |
+"p* c #FE7C48", | |
+"q* c #FE7C47", | |
+"r* c #FE7642", | |
+"s* c #FE7439", | |
+"t* c #6D332C", | |
+"u* c #100B21", | |
+"v* c #16031B", | |
+"w* c #2B001B", | |
+"x* c #22011F", | |
+"y* c #220521", | |
+"z* c #1B0A23", | |
+"A* c #421425", | |
+"B* c #951924", | |
+"C* c #381023", | |
+"D* c #E94028", | |
+"E* c #E7302B", | |
+"F* c #EF432D", | |
+"G* c #F4302E", | |
+"H* c #F32C30", | |
+"I* c #CB4432", | |
+"J* c #DD3235", | |
+"K* c #EF4B3A", | |
+"L* c #F0333E", | |
+"M* c #CC3D3F", | |
+"N* c #E4313C", | |
+"O* c #F34834", | |
+"P* c #D13E2C", | |
+"Q* c #431825", | |
+"R* c #0E1424", | |
+"S* c #3C202C", | |
+"T* c #F15537", | |
+"U* c #F97140", | |
+"V* c #FC6E45", | |
+"W* c #FE7547", | |
+"X* c #FE7947", | |
+"Y* c #FE7B48", | |
+"Z* c #FE7D48", | |
+"`* c #FE8047", | |
+" = c #FE7A42", | |
+".= c #FE7A38", | |
+"+= c #6D442B", | |
+"@= c #0F0B21", | |
+"#= c #15031A", | |
+"$= c #49001B", | |
+"%= c #2F001C", | |
+"&= c #21021E", | |
+"*= c #220620", | |
+"== c #1B0D23", | |
+"-= c #641625", | |
+";= c #951823", | |
+">= c #390F25", | |
+",= c #AC3A2A", | |
+"'= c #B6492E", | |
+")= c #ED7531", | |
+"!= c #F45A34", | |
+"~= c #F54C36", | |
+"{= c #C72D39", | |
+"]= c #DE283C", | |
+"^= c #F33B40", | |
+"/= c #F34142", | |
+"(= c #D0393F", | |
+"_= c #E72E39", | |
+":= c #DB3C2E", | |
+"<= c #461724", | |
+"[= c #0F0D1E", | |
+"}= c #140B1E", | |
+"|= c #341427", | |
+"1= c #CB4834", | |
+"2= c #F7743F", | |
+"3= c #FB7145", | |
+"4= c #FE7747", | |
+"5= c #FE7A47", | |
+"6= c #FF7B48", | |
+"7= c #FF7C48", | |
+"8= c #FE7F47", | |
+"9= c #FE8247", | |
+"0= c #FE8642", | |
+"a= c #FE8439", | |
+"b= c #6D442D", | |
+"c= c #0F0A21", | |
+"d= c #14031A", | |
+"e= c #20031D", | |
+"f= c #210821", | |
+"g= c #191024", | |
+"h= c #CC1C25", | |
+"i= c #961423", | |
+"j= c #2C162C", | |
+"k= c #BD242E", | |
+"l= c #EF2C31", | |
+"m= c #F54C34", | |
+"n= c #F34037", | |
+"o= c #F5353A", | |
+"p= c #F7413D", | |
+"q= c #F8423D", | |
+"r= c #F93A39", | |
+"s= c #F95731", | |
+"t= c #341425", | |
+"u= c #110A1D", | |
+"v= c #140619", | |
+"w= c #18051B", | |
+"x= c #200F26", | |
+"y= c #864833", | |
+"z= c #F8773F", | |
+"A= c #FC7445", | |
+"B= c #FF7E48", | |
+"C= c #FF7E49", | |
+"D= c #FF7D49", | |
+"E= c #FF7D48", | |
+"F= c #FE8347", | |
+"G= c #FE8743", | |
+"H= c #FE893B", | |
+"I= c #6E452F", | |
+"J= c #100E23", | |
+"K= c #14041A", | |
+"L= c #55041D", | |
+"M= c #540921", | |
+"N= c #161124", | |
+"O= c #CE6A25", | |
+"P= c #3F1129", | |
+"Q= c #170A29", | |
+"R= c #0F0F29", | |
+"S= c #15132B", | |
+"T= c #1E182D", | |
+"U= c #A82B3D", | |
+"V= c #CB6633", | |
+"W= c #CC6932", | |
+"X= c #CC3D2D", | |
+"Y= c #331225", | |
+"Z= c #0F091C", | |
+"`= c #120417", | |
+" - c #160216", | |
+".- c #190419", | |
+"+- c #210F26", | |
+"@- c #8C4934", | |
+"#- c #F97A40", | |
+"$- c #FC7545", | |
+"%- c #FF7B49", | |
+"&- c #FE7D46", | |
+"*- c #FE7E43", | |
+"=- c #FD7B3E", | |
+"-- c #FA6934", | |
+";- c #532328", | |
+">- c #130B1D", | |
+",- c #150519", | |
+"'- c #14041C", | |
+")- c #120920", | |
+"!- c #C43624", | |
+"~- c #A21E23", | |
+"{- c #F87C30", | |
+"]- c #C9302D", | |
+"^- c #300F2A", | |
+"/- c #591129", | |
+"(- c #171328", | |
+"_- c #171628", | |
+":- c #141829", | |
+"<- c #101A2B", | |
+"[- c #0F172B", | |
+"}- c #0F1226", | |
+"|- c #0E0C20", | |
+"1- c #100619", | |
+"2- c #140316", | |
+"3- c #19051B", | |
+"4- c #3C1428", | |
+"5- c #E04B36", | |
+"6- c #FA7B41", | |
+"7- c #FD7346", | |
+"8- c #FE7548", | |
+"9- c #FF7849", | |
+"0- c #FF7749", | |
+"a- c #FE7B47", | |
+"b- c #FE7945", | |
+"c- c #FC7740", | |
+"d- c #FA7E39", | |
+"e- c #C1432F", | |
+"f- c #131523", | |
+"g- c #130A1C", | |
+"h- c #420621", | |
+"i- c #D08423", | |
+"j- c #F87739", | |
+"k- c #C03D37", | |
+"l- c #962B34", | |
+"m- c #A14332", | |
+"n- c #E54B30", | |
+"o- c #9E3E2F", | |
+"p- c #7F262E", | |
+"q- c #922D2E", | |
+"r- c #9C4B2E", | |
+"s- c #65212C", | |
+"t- c #101628", | |
+"u- c #101022", | |
+"v- c #11091C", | |
+"w- c #130619", | |
+"x- c #160A1E", | |
+"y- c #43252C", | |
+"z- c #F66439", | |
+"A- c #FA6942", | |
+"B- c #FD6C47", | |
+"C- c #FE6E48", | |
+"D- c #FE6F48", | |
+"E- c #FE7049", | |
+"F- c #FE714A", | |
+"G- c #FE744A", | |
+"H- c #FE7846", | |
+"I- c #FD7243", | |
+"J- c #FC703E", | |
+"K- c #FA6C37", | |
+"L- c #81312B", | |
+"M- c #121123", | |
+"N- c #15071D", | |
+"O- c #16031A", | |
+"P- c #17021B", | |
+"Q- c #8F3D22", | |
+"R- c #F8393E", | |
+"S- c #E42A3D", | |
+"T- c #E7473B", | |
+"U- c #FB503B", | |
+"V- c #FB4F3A", | |
+"W- c #F95439", | |
+"X- c #ED4C38", | |
+"Y- c #F45938", | |
+"Z- c #FB6537", | |
+"`- c #EA5236", | |
+" ; c #CE6232", | |
+".; c #CD392C", | |
+"+; c #181425", | |
+"@; c #120F21", | |
+"#; c #130D20", | |
+"$; c #151225", | |
+"%; c #903431", | |
+"&; c #F8703D", | |
+"*; c #FB6344", | |
+"=; c #FD6748", | |
+"-; c #FE6849", | |
+";; c #FE6949", | |
+">; c #FE6A49", | |
+",; c #FE6C4A", | |
+"'; c #FE704A", | |
+"); c #FE734A", | |
+"!; c #FE7449", | |
+"~; c #FE7347", | |
+"{; c #FE7145", | |
+"]; c #FD6C42", | |
+"^; c #FD753D", | |
+"/; c #F36E35", | |
+"(; c #CB452C", | |
+"_; c #600D24", | |
+":; c #1C061F", | |
+"<; c #1E031F", | |
+"[; c #5B3821", | |
+"}; c #CE9822", | |
+"|; c #FA4341", | |
+"1; c #FB4341", | |
+"2; c #FC4541", | |
+"3; c #FC4542", | |
+"4; c #FC4143", | |
+"5; c #FC4D42", | |
+"6; c #FB5042", | |
+"7; c #FB5342", | |
+"8; c #FC5242", | |
+"9; c #FD4F40", | |
+"0; c #FD503E", | |
+"a; c #FB6339", | |
+"b; c #F45E33", | |
+"c; c #A12A2E", | |
+"d; c #401E2C", | |
+"e; c #452D2F", | |
+"f; c #F74F38", | |
+"g; c #FA5940", | |
+"h; c #FC6245", | |
+"i; c #FE6447", | |
+"j; c #FE6449", | |
+"k; c #FE6549", | |
+"l; c #FE6749", | |
+"m; c #FE6B49", | |
+"n; c #FE6D49", | |
+"o; c #FE6D48", | |
+"p; c #FE6D47", | |
+"q; c #FE6D45", | |
+"r; c #FE6C44", | |
+"s; c #FE6A42", | |
+"t; c #FE663C", | |
+"u; c #FC6233", | |
+"v; c #752129", | |
+"w; c #1F0922", | |
+"x; c #750520", | |
+"y; c #81061F", | |
+"z; c #FA3D42", | |
+"A; c #FB4142", | |
+"B; c #FD4543", | |
+"C; c #FD4844", | |
+"D; c #FD4A45", | |
+"E; c #FD4D45", | |
+"F; c #FD5045", | |
+"G; c #FD5345", | |
+"H; c #FE5346", | |
+"I; c #FE5445", | |
+"J; c #FD5444", | |
+"K; c #FC4F41", | |
+"L; c #FA513D", | |
+"M; c #F95339", | |
+"N; c #F63736", | |
+"O; c #F75737", | |
+"P; c #F95F3B", | |
+"Q; c #FB5840", | |
+"R; c #FD5F43", | |
+"S; c #FE6345", | |
+"T; c #FE6547", | |
+"U; c #FE6548", | |
+"V; c #FE6448", | |
+"W; c #FE6248", | |
+"X; c #FE6348", | |
+"Y; c #FE6748", | |
+"Z; c #FE6848", | |
+"`; c #FE6846", | |
+" > c #FE6A45", | |
+".> c #FE6D43", | |
+"+> c #FE703F", | |
+"@> c #FC6F36", | |
+"#> c #6F302B", | |
+"$> c #140A22", | |
+"%> c #FA3B42", | |
+"&> c #FC4243", | |
+"*> c #FD4744", | |
+"=> c #FE4A45", | |
+"-> c #FE4C47", | |
+";> c #FE4D47", | |
+">> c #FE5047", | |
+",> c #FE5347", | |
+"'> c #FE5447", | |
+")> c #FD5246", | |
+"!> c #FB503F", | |
+"~> c #FA543D", | |
+"{> c #9B3D3B", | |
+"]> c #A3433B", | |
+"^> c #F9683D", | |
+"/> c #FC6940", | |
+"(> c #FE6342", | |
+"_> c #FE6645", | |
+":> c #FE6646", | |
+"<> c #FE6147", | |
+"[> c #FE6048", | |
+"}> c #FE6148", | |
+"|> c #FE6746", | |
+"1> c #FE6A46", | |
+"2> c #FE6F45", | |
+"3> c #FE7441", | |
+"4> c #FC7D39", | |
+"5> c #6C422E", | |
+"6> c #0F0F23", | |
+"7> c #FA4142", | |
+"8> c #FC4643", | |
+"9> c #FE4D46", | |
+"0> c #FE4E47", | |
+"a> c #FE4F48", | |
+"b> c #FE5148", | |
+"c> c #FE5348", | |
+"d> c #FE5548", | |
+"e> c #FE5247", | |
+"f> c #FD5445", | |
+"g> c #FC5544", | |
+"h> c #F96041", | |
+"i> c #D33F3D", | |
+"j> c #392D39", | |
+"k> c #973C38", | |
+"l> c #F94E3A", | |
+"m> c #FD693E", | |
+"n> c #FE6C43", | |
+"o> c #FE6047", | |
+"p> c #FE5D47", | |
+"q> c #FE5E48", | |
+"r> c #FE6948", | |
+"s> c #FE6947", | |
+"t> c #FE6B47", | |
+"u> c #FE6E46", | |
+"v> c #FD6D43", | |
+"w> c #FB723D", | |
+"x> c #D54A33", | |
+"y> c #301C29", | |
+"z> c #FB4A42", | |
+"A> c #FD4B44", | |
+"B> c #FE4F47", | |
+"C> c #FE5048", | |
+"D> c #FE5648", | |
+"E> c #FE5848", | |
+"F> c #FE5747", | |
+"G> c #FE5547", | |
+"H> c #FC5945", | |
+"I> c #F95742", | |
+"J> c #F3543D", | |
+"K> c #A33336", | |
+"L> c #302032", | |
+"M> c #152433", | |
+"N> c #CD3E38", | |
+"O> c #FD5A3F", | |
+"P> c #FE6343", | |
+"Q> c #FE6446", | |
+"R> c #FE6247", | |
+"S> c #FE6A47", | |
+"T> c #FC6542", | |
+"U> c #FB6A3B", | |
+"V> c #FA6D34", | |
+"W> c #D73C2D", | |
+"X> c #442428", | |
+"Y> c #281323", | |
+"Z> c #FD4E42", | |
+"`> c #FD4D43", | |
+" , c #FE4D45", | |
+"., c #FE5248", | |
+"+, c #FE5947", | |
+"@, c #FE5C47", | |
+"#, c #FE5B47", | |
+"$, c #FE5A47", | |
+"%, c #FE5847", | |
+"&, c #FC5C45", | |
+"*, c #F95B43", | |
+"=, c #F3613F", | |
+"-, c #E74F37", | |
+";, c #8C2431", | |
+">, c #161E2F", | |
+",, c #CD4E33", | |
+"', c #FD503A", | |
+"), c #FE5D40", | |
+"!, c #FE6445", | |
+"~, c #FE6946", | |
+"{, c #FE6847", | |
+"], c #FE6747", | |
+"^, c #FD6644", | |
+"/, c #FD6241", | |
+"(, c #FD5B3D", | |
+"_, c #FE6739", | |
+":, c #FE6135", | |
+"<, c #AB4830", | |
+"[, c #733E2A", | |
+"}, c #161224", | |
+"|, c #FC4E42", | |
+"1, c #FE4D44", | |
+"2, c #FE4E46", | |
+"3, c #FE5147", | |
+"4, c #FE5E47", | |
+"5, c #FD5C46", | |
+"6, c #FA5B44", | |
+"7, c #F45441", | |
+"8, c #EB393A", | |
+"9, c #CC3433", | |
+"0, c #47212F", | |
+"a, c #59242F", | |
+"b, c #FC6734", | |
+"c, c #FC6F3A", | |
+"d, c #FC723E", | |
+"e, c #FD6540", | |
+"f, c #FE6442", | |
+"g, c #FE6643", | |
+"h, c #FE6944", | |
+"i, c #FE6546", | |
+"j, c #FE6444", | |
+"k, c #FE6143", | |
+"l, c #FE5E41", | |
+"m, c #FE613F", | |
+"n, c #FE683C", | |
+"o, c #FE7937", | |
+"p, c #A25030", | |
+"q, c #692629", | |
+"r, c #151122", | |
+"s, c #FA573F", | |
+"t, c #FB4D40", | |
+"u, c #FC4F43", | |
+"v, c #FE5246", | |
+"w, c #FF6347", | |
+"x, c #FE5F48", | |
+"y, c #F65942", | |
+"z, c #F0493D", | |
+"A, c #ED3736", | |
+"B, c #73262F", | |
+"C, c #10152C", | |
+"D, c #3B292F", | |
+"E, c #363034", | |
+"F, c #AC3938", | |
+"G, c #FC6B3B", | |
+"H, c #FD763C", | |
+"I, c #FE6D3F", | |
+"J, c #FE6341", | |
+"K, c #FE6642", | |
+"L, c #FE6745", | |
+"M, c #FE6245", | |
+"N, c #FE6244", | |
+"O, c #FE6841", | |
+"P, c #FF683B", | |
+"Q, c #EC7035", | |
+"R, c #D0412D", | |
+"S, c #3A1627", | |
+"T, c #CF3938", | |
+"U, c #F6543C", | |
+"V, c #FB5040", | |
+"W, c #FD5544", | |
+"X, c #FE5A48", | |
+"Y, c #FE5D48", | |
+"Z, c #FE5F47", | |
+"`, c #FF6147", | |
+" ' c #FD5C45", | |
+".' c #FB5B43", | |
+"+' c #FA5A42", | |
+"@' c #F76040", | |
+"#' c #F4623D", | |
+"$' c #F26D38", | |
+"%' c #EC4130", | |
+"&' c #380E2B", | |
+"*' c #13122C", | |
+"=' c #362D31", | |
+"-' c #353435", | |
+";' c #352E37", | |
+">' c #2D3337", | |
+",' c #CC5838", | |
+"'' c #CD6F3A", | |
+")' c #CE6E3D", | |
+"!' c #FE793F", | |
+"~' c #FD7541", | |
+"{' c #FD6243", | |
+"]' c #FE6545", | |
+"^' c #FF6543", | |
+"/' c #FF6240", | |
+"(' c #FE723B", | |
+"_' c #FE8034", | |
+":' c #442D2C", | |
+"<' c #311725", | |
+"[' c #222830", | |
+"}' c #B73B36", | |
+"|' c #F94C3D", | |
+"1' c #FD5543", | |
+"2' c #FE5B48", | |
+"3' c #FF5E47", | |
+"4' c #FE5C48", | |
+"5' c #FC5B44", | |
+"6' c #F95640", | |
+"7' c #C34E3D", | |
+"8' c #A45A3A", | |
+"9' c #F37438", | |
+"0' c #F28935", | |
+"a' c #AF422F", | |
+"b' c #240D2B", | |
+"c' c #88292F", | |
+"d' c #FA8E34", | |
+"e' c #FC7E38", | |
+"f' c #FC5939", | |
+"g' c #694A37", | |
+"h' c #693437", | |
+"i' c #382638", | |
+"j' c #142439", | |
+"k' c #9F483A", | |
+"l' c #C45E3C", | |
+"m' c #FD7240", | |
+"n' c #FF6645", | |
+"o' c #FF6245", | |
+"p' c #FF6045", | |
+"q' c #FF6146", | |
+"r' c #FF6246", | |
+"s' c #FF6446", | |
+"t' c #FF6545", | |
+"u' c #FE763F", | |
+"v' c #FE7237", | |
+"w' c #C65331", | |
+"x' c #3D272A", | |
+"y' c #0D1E2B", | |
+"z' c #683032", | |
+"A' c #F9453A", | |
+"B' c #FD5341", | |
+"C' c #FE5A46", | |
+"D' c #FF5A48", | |
+"E' c #FE5948", | |
+"F' c #FD5A47", | |
+"G' c #FC5D43", | |
+"H' c #F95B3D", | |
+"I' c #713F37", | |
+"J' c #1E2D32", | |
+"K' c #C44531", | |
+"L' c #EF7A2F", | |
+"M' c #6B2E2C", | |
+"N' c #0F0E2C", | |
+"O' c #F56633", | |
+"P' c #FA803A", | |
+"Q' c #FC673E", | |
+"R' c #FD673E", | |
+"S' c #FC6F3C", | |
+"T' c #FA6E3B", | |
+"U' c #C6633A", | |
+"V' c #A06739", | |
+"W' c #835638", | |
+"X' c #381F38", | |
+"Y' c #713B38", | |
+"Z' c #7B503C", | |
+"`' c #FE7741", | |
+" ) c #FE7344", | |
+".) c #FE6D46", | |
+"+) c #FF6946", | |
+"@) c #FF5E46", | |
+"#) c #FF5D46", | |
+"$) c #FF5D47", | |
+"%) c #FF5F48", | |
+"&) c #FF6248", | |
+"*) c #FE6941", | |
+"=) c #FC783C", | |
+"-) c #C46B35", | |
+";) c #892730", | |
+">) c #111629", | |
+",) c #1F2630", | |
+"') c #AD3939", | |
+")) c #FC5D41", | |
+"!) c #FE5946", | |
+"~) c #FF5848", | |
+"{) c #FE5549", | |
+"]) c #FC5E42", | |
+"^) c #FA673B", | |
+"/) c #DB7033", | |
+"() c #392E2B", | |
+"_) c #311A28", | |
+":) c #3C2127", | |
+"<) c #1D1027", | |
+"[) c #92102C", | |
+"}) c #F58336", | |
+"|) c #FA673E", | |
+"1) c #FD6642", | |
+"2) c #FD5A41", | |
+"3) c #FC6D41", | |
+"4) c #FC6D3F", | |
+"5) c #FD683E", | |
+"6) c #F38C39", | |
+"7) c #CE6535", | |
+"8) c #612E34", | |
+"9) c #1D2637", | |
+"0) c #71513E", | |
+"a) c #FF6847", | |
+"b) c #FF5F47", | |
+"c) c #FF5A46", | |
+"d) c #FF5847", | |
+"e) c #FF5748", | |
+"f) c #FF594A", | |
+"g) c #FF5E4B", | |
+"h) c #FE654C", | |
+"i) c #FE694B", | |
+"j) c #FE6B48", | |
+"k) c #FC6A43", | |
+"l) c #F7683E", | |
+"m) c #EC6E39", | |
+" ", | |
+" ", | |
+" ", | |
+" ", | |
+" ", | |
+" ", | |
+" ", | |
+" ", | |
+" . + @ # $ % ", | |
+" & * = - ; > , ' ) ! ~ ", | |
+" { ] ^ / ( _ : < [ } | 1 2 ", | |
+" 3 4 5 6 7 8 9 0 a b c d e f g h i j ", | |
+" k l m n o p q r s t u v w x y z A B C D ", | |
+" E F G H I J K L M N O P Q R S T U V W X Y Z ` ", | |
+" ...+.@.#.$.%.&.*.=.-.;.>.,.S '.).!.~.{.].^./.(._. ", | |
+" :.<.[.}.|.1.2.3.4.5.6.7.8.9.0.a.b.c.d.e.!.S f.g.h.i.j.k. ", | |
+" l.m.n.o.p.q.r.s.t.u.J v.w.x.y.z.A.c.d.d.B.C.D.E.F.G.H.I. ", | |
+" J.K.L.M.N.O.P.Q.R.t S.T.U.V.W.X.Y.Z.`. +d.d..+B.'.++@+#+$+%+ ", | |
+" &+*+=+-+;+>+,+'+)+!+~+{+]+^+/+(+_+:+<+[+}+|+1+d.2+3+4+d.5+6+7+8+9+0+ ", | |
+" a+b+c+d+e+f+g+h+i+j+k+l+m+n+^+o+p+q+r+s+t+u+v+b.w+x+y+z+A+w+B+C+D+E+F+G+ ", | |
+" H+I+J+K+L+M+N+O+P+Q+R+S+T+U+V+W+Q ,.X+Y+Z+`+ @.@+@@@#@$@%@&@*@=@#@-@;@>@,@'@ ", | |
+" )@!@~@{@]@^@/@(@_@:@<@[@}@|@1@2@3@R ,.4@5@6@7@8@9@0@a@#@b@c@=@d@e@f@g@>@h@i@j@ ", | |
+" k@l@m@n@o@p@q@r@s@t@u@v@w@x@y@^+R S z@[email protected]+B@C@D@E@F@G@H@#@e@#@#@f@g@I@J@K@L@ ", | |
+" M@N@O@P@Q@R@S@T@U@V@W@X@Y@Z@`@ #.#+#+#S A@@###$#%#&#*#=#-#f@B+B+B+f@;#>#,#'#)# ", | |
+" !#~#{#]#^#/#(#(#_#:#<#[#}#|#1#^+.#S +#+#z@2#3#4#5#6#7#8#9#0#A.B+B+a#A.@@b#c#d# ", | |
+" e#f#g#h#i#j#k#l#m#n#o#p#q#r#s#t#u#v#.#w#S R ^+x#y#z#A#B#C#D#-#A.a#`.`.b.g@E#d#F# ", | |
+" G#0@H#I#J#K#L#M#N#O#P#Q#R#S#T#U#V#>.W#3@v#R R X+X#Y#s#Z#`# $.$+$@$g@f@5+5+#$6+$$%$&$ ", | |
+" *$=$-$;$>$,$'$)$!$~${$]$^$/$($_$*$u#:$Q 3@,.X+z.<$[$}$|$1$2$3$4$5$6$7$e@8$#$G@9$0$a$ ", | |
+" ,.4@E#b$c$d$e$f$g$h$i$j$k$l$m$n$`@>.:$o$3@,. #a.p$q$r$s$t$u$v$w$x$y$z$A$B$#@C$D$E$F$G$ ", | |
+" R S H$v+I$J$K$n+L$:$o$o$M$N$L$O$P$Q$R$N$o$3@S$T$U$V$W$X$Y$Z$`$ %.%+%@%#%$%%%&%*%=%-%;%>% ", | |
+" E.,%~.'%Z.4@v W#o$)%)%)%Q !%~%{%]%^%Q$u u#/%(%_%:%<%[%}%|%1%2%3%4%=%5%6%7%8%9%0%a%b%c%d% ", | |
+" e%f%g%a#,%,%z@R 3@3@3@)%Q h%i%j%k%l%m%{+n%o%p%q%r%s%t%u%v%w%x%y%z%A%*%B%C%D%E%F%G%H%I% ", | |
+" J%K%L%M%N%D.S v#)%)%O%P%Q%R%S%T%U%V%W%X%Y%Z%`% &.&+&@&#&$&%&&&*&f@a##@=&-&;&>&,&'&)& ", | |
+" !&~&{&]&^&.#w#^&/%/&(&_&:&<&[&}&|&1&2&3&4&5&6&7&8&9&0&a&b&c&d&e&e@1+5+e@f&g&h&i&j& ", | |
+" k&l&m&n&o&p&q&r&i%s&3.t&u&v&w&x&y&z&A&B&C&D&E&F&G&H&I&J&K&L&M&N&O&P&1+`.e@f&Q&R&S&T& ", | |
+" 0 U&V&W&X&<&Y&j%Z&`& *.*+*@*#*$*%*&***=*-*;*>*>*,*'*)*!*~*{*]*^*/*(*a#B+#@_*:*<*[*}* ", | |
+" |*1*2*3*4*5*6*7*8*9*0*a*b*c*d*e*f*g*h*i*j*k*l*m*n*o*p*q*r*s*t*u*v*E.w*d.e@x*y*z*A*B* ", | |
+" C*D*E*F*G*H*I*J*K*L*M*N*O*P*Q*R*S*T*U*V*W*l*X*o*o*Y*Z*`* =.=+=@=#='%$=%=e@&=*===-=;= ", | |
+" >=,='=)=!=~={=]=^=/=(=_=:=<=[=}=|=1=2=3=4=5=p*6=6=7=8=9=0=a=b=c=d=A@~.b.B+e=f=g=h=i= ", | |
+" j=k=l=m=n=o=p=q=r=s=t=u=v=w=x=y=z=A=5=Z*B=C=D=E=8=F=G=H=I=J=K=S$R z@'%L=M=N=O= ", | |
+" P=Q=R=S=T=U=V=W=X=Y=Z=`= -.-+-@-#-$-5=p*E=D=%-%-q*&-*-=---;->-,-/%3@^+'-)-!-~- ", | |
+" {-]-^-/-(-_-:-<-[-}-|-1-2- -3-4-5-6-7-8-n*m*9-0-9-o*a-b-c-d-e-f-g-(&h%w c h-i- ", | |
+" j-k-l-m-n-o-p-q-r-s-t-u-v-w-,-x-y-z-A-B-C-D-E-E-F-G-_@m*H-I-J-K-L-M-N-O-P-(+Q- ", | |
+" R-S-T-U-V-W-X-Y-Z-`- ;.;+;@;#;$;%;&;*;=;-;-;;;>;,;';);!;~;{;];^;/;(;_;:;<;[;}; ", | |
+" |;1;2;3;4;5;6;7;8;9;0;a;b;c;d;e;f;g;h;i;j;j;k;k;l;m;n;o;p;q;r;s;t;u;v;w;x;y; ", | |
+" z;A;B;C;D;E;F;G;H;I;J;K;L;M;N;O;P;Q;R;S;T;U;V;W;X;k;Y;Z;`; >r;.>+>@>#>$> ", | |
+" %>&>*>=>->;>>>,>'>,>)>F;8;!>~>{>]>^>/>(>_>:>i;<>[>X;}>i;|>1>q;2>3>4>5>6> ", | |
+" 7>8>=>9>0>a>b>c>d>,>e>e>f>g>h>i>j>k>l>m>n>:>i;o>p>q>W;r>s>t>p;u>v>w>x>y> ", | |
+" z>A>9>0>B>C>c>D>E>F>G>G>F>H>I>J>K>L>M>N>O>P>Q>R>o>R>T;s>S>S>S>t>1>T>U>V>W>X>Y> ", | |
+" Z>`> ,9>B>.,D>+,@,#,$,%,$,&,*,=,-,;,>,,,',),P>!,!,_>~,t>s>{,],{,],^,/,(,_,:,<,[,}, ", | |
+" |,`>1,2,3,G>+,4,o>o>4,@,@,5,6,7,8,9,0,a,b,c,d,e,f,g,h, >~,|>T;T;T;i,j,k,l,m,n,o,p,q,r, ", | |
+" s,t,u,v,G>%,@,o>w,R>x,p>@,5,6,y,z,A,B,C,D,E,F,G,H,I,J,K,L,L,i,i;i;i;Q>S;M,N,P>O,P,Q,R,S, ", | |
+" T,U,V,W,%,X,Y,Z,`,[>q>@, '.'+'@'#'$'%'&'*'='-';'>',''')'!'~'{'N,i,:>_>]'M,M,Q>_>^'/'('_':'<' ", | |
+" ['}'|'1'$,X,2'p>3'4'2'@,5'6'7'8'9'0'a'b'c'd'e'f'g'h'i'j'k'l'd,m'g, > >n'o'p'q'r's't'.>u'v'w'x' ", | |
+" y'z'A'B'C'X,X,2'D'E'E'F'G'H'I'J'K'L'M'N'O'P'Q'R'S'T'U'V'W'X'Y'Z'`' ).)+)r'@)#)$)%)&)l;1>*)=)-);) ", | |
+" >),)')))!)X,E'X,~){)d>!)])^)/)()_):)<)[)})|)1)f,2)3)4)5)6)7)8)9)0)*--*a)b)c)d)e)f)g)h)i)j)k)l)m) ", | |
+" ", | |
+" ", | |
+" ", | |
+" ", | |
+" ", | |
+" ", | |
+" ", | |
+" "}; | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/pure.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/pure.rb | |
new file mode 100644 | |
index 0000000..b86d905 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/pure.rb | |
@@ -0,0 +1,75 @@ | |
+require 'json/common' | |
+require 'json/pure/parser' | |
+require 'json/pure/generator' | |
+ | |
+module JSON | |
+ begin | |
+ require 'iconv' | |
+ # An iconv instance to convert from UTF8 to UTF16 Big Endian. | |
+ UTF16toUTF8 = Iconv.new('utf-8', 'utf-16be') # :nodoc: | |
+ # An iconv instance to convert from UTF16 Big Endian to UTF8. | |
+ UTF8toUTF16 = Iconv.new('utf-16be', 'utf-8') # :nodoc: | |
+ UTF8toUTF16.iconv('no bom') | |
+ rescue Errno::EINVAL, Iconv::InvalidEncoding | |
+ # Iconv doesn't support big endian utf-16. Let's try to hack this manually | |
+ # into the converters. | |
+ begin | |
+ old_verbose, $VERBSOSE = $VERBOSE, nil | |
+ # An iconv instance to convert from UTF8 to UTF16 Big Endian. | |
+ UTF16toUTF8 = Iconv.new('utf-8', 'utf-16') # :nodoc: | |
+ # An iconv instance to convert from UTF16 Big Endian to UTF8. | |
+ UTF8toUTF16 = Iconv.new('utf-16', 'utf-8') # :nodoc: | |
+ UTF8toUTF16.iconv('no bom') | |
+ if UTF8toUTF16.iconv("\xe2\x82\xac") == "\xac\x20" | |
+ swapper = Class.new do | |
+ def initialize(iconv) # :nodoc: | |
+ @iconv = iconv | |
+ end | |
+ | |
+ def iconv(string) # :nodoc: | |
+ result = @iconv.iconv(string) | |
+ JSON.swap!(result) | |
+ end | |
+ end | |
+ UTF8toUTF16 = swapper.new(UTF8toUTF16) # :nodoc: | |
+ end | |
+ if UTF16toUTF8.iconv("\xac\x20") == "\xe2\x82\xac" | |
+ swapper = Class.new do | |
+ def initialize(iconv) # :nodoc: | |
+ @iconv = iconv | |
+ end | |
+ | |
+ def iconv(string) # :nodoc: | |
+ string = JSON.swap!(string.dup) | |
+ @iconv.iconv(string) | |
+ end | |
+ end | |
+ UTF16toUTF8 = swapper.new(UTF16toUTF8) # :nodoc: | |
+ end | |
+ rescue Errno::EINVAL, Iconv::InvalidEncoding | |
+ raise MissingUnicodeSupport, "iconv doesn't seem to support UTF-8/UTF-16 conversions" | |
+ ensure | |
+ $VERBOSE = old_verbose | |
+ end | |
+ rescue LoadError | |
+ raise MissingUnicodeSupport, | |
+ "iconv couldn't be loaded, which is required for UTF-8/UTF-16 conversions" | |
+ end | |
+ | |
+ # Swap consecutive bytes of _string_ in place. | |
+ def self.swap!(string) # :nodoc: | |
+ 0.upto(string.size / 2) do |i| | |
+ break unless string[2 * i + 1] | |
+ string[2 * i], string[2 * i + 1] = string[2 * i + 1], string[2 * i] | |
+ end | |
+ string | |
+ end | |
+ | |
+ # This module holds all the modules/classes that implement JSON's | |
+ # functionality in pure ruby. | |
+ module Pure | |
+ $DEBUG and warn "Using pure library for JSON." | |
+ JSON.parser = Parser | |
+ JSON.generator = Generator | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/pure/generator.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/pure/generator.rb | |
new file mode 100644 | |
index 0000000..c8bbfd0 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/pure/generator.rb | |
@@ -0,0 +1,394 @@ | |
+module JSON | |
+ MAP = { | |
+ "\x0" => '\u0000', | |
+ "\x1" => '\u0001', | |
+ "\x2" => '\u0002', | |
+ "\x3" => '\u0003', | |
+ "\x4" => '\u0004', | |
+ "\x5" => '\u0005', | |
+ "\x6" => '\u0006', | |
+ "\x7" => '\u0007', | |
+ "\b" => '\b', | |
+ "\t" => '\t', | |
+ "\n" => '\n', | |
+ "\xb" => '\u000b', | |
+ "\f" => '\f', | |
+ "\r" => '\r', | |
+ "\xe" => '\u000e', | |
+ "\xf" => '\u000f', | |
+ "\x10" => '\u0010', | |
+ "\x11" => '\u0011', | |
+ "\x12" => '\u0012', | |
+ "\x13" => '\u0013', | |
+ "\x14" => '\u0014', | |
+ "\x15" => '\u0015', | |
+ "\x16" => '\u0016', | |
+ "\x17" => '\u0017', | |
+ "\x18" => '\u0018', | |
+ "\x19" => '\u0019', | |
+ "\x1a" => '\u001a', | |
+ "\x1b" => '\u001b', | |
+ "\x1c" => '\u001c', | |
+ "\x1d" => '\u001d', | |
+ "\x1e" => '\u001e', | |
+ "\x1f" => '\u001f', | |
+ '"' => '\"', | |
+ '\\' => '\\\\', | |
+ '/' => '\/', | |
+ } # :nodoc: | |
+ | |
+ # Convert a UTF8 encoded Ruby string _string_ to a JSON string, encoded with | |
+ # UTF16 big endian characters as \u????, and return it. | |
+ def utf8_to_json(string) # :nodoc: | |
+ string = string.gsub(/["\\\/\x0-\x1f]/) { MAP[$&] } | |
+ string.gsub!(/( | |
+ (?: | |
+ [\xc2-\xdf][\x80-\xbf] | | |
+ [\xe0-\xef][\x80-\xbf]{2} | | |
+ [\xf0-\xf4][\x80-\xbf]{3} | |
+ )+ | | |
+ [\x80-\xc1\xf5-\xff] # invalid | |
+ )/nx) { |c| | |
+ c.size == 1 and raise GeneratorError, "invalid utf8 byte: '#{c}'" | |
+ s = JSON::UTF8toUTF16.iconv(c).unpack('H*')[0] | |
+ s.gsub!(/.{4}/n, '\\\\u\&') | |
+ } | |
+ string | |
+ rescue Iconv::Failure => e | |
+ raise GeneratorError, "Caught #{e.class}: #{e}" | |
+ end | |
+ module_function :utf8_to_json | |
+ | |
+ module Pure | |
+ module Generator | |
+ # This class is used to create State instances, that are use to hold data | |
+ # while generating a JSON text from a a Ruby data structure. | |
+ class State | |
+ # Creates a State object from _opts_, which ought to be Hash to create | |
+ # a new State instance configured by _opts_, something else to create | |
+ # an unconfigured instance. If _opts_ is a State object, it is just | |
+ # returned. | |
+ def self.from_state(opts) | |
+ case opts | |
+ when self | |
+ opts | |
+ when Hash | |
+ new(opts) | |
+ else | |
+ new | |
+ end | |
+ end | |
+ | |
+ # Instantiates a new State object, configured by _opts_. | |
+ # | |
+ # _opts_ can have the following keys: | |
+ # | |
+ # * *indent*: a string used to indent levels (default: ''), | |
+ # * *space*: a string that is put after, a : or , delimiter (default: ''), | |
+ # * *space_before*: a string that is put before a : pair delimiter (default: ''), | |
+ # * *object_nl*: a string that is put at the end of a JSON object (default: ''), | |
+ # * *array_nl*: a string that is put at the end of a JSON array (default: ''), | |
+ # * *check_circular*: true if checking for circular data structures | |
+ # should be done (the default), false otherwise. | |
+ # * *check_circular*: true if checking for circular data structures | |
+ # should be done, false (the default) otherwise. | |
+ # * *allow_nan*: true if NaN, Infinity, and -Infinity should be | |
+ # generated, otherwise an exception is thrown, if these values are | |
+ # encountered. This options defaults to false. | |
+ def initialize(opts = {}) | |
+ @seen = {} | |
+ @indent = '' | |
+ @space = '' | |
+ @space_before = '' | |
+ @object_nl = '' | |
+ @array_nl = '' | |
+ @check_circular = true | |
+ @allow_nan = false | |
+ configure opts | |
+ end | |
+ | |
+ # This string is used to indent levels in the JSON text. | |
+ attr_accessor :indent | |
+ | |
+ # This string is used to insert a space between the tokens in a JSON | |
+ # string. | |
+ attr_accessor :space | |
+ | |
+ # This string is used to insert a space before the ':' in JSON objects. | |
+ attr_accessor :space_before | |
+ | |
+ # This string is put at the end of a line that holds a JSON object (or | |
+ # Hash). | |
+ attr_accessor :object_nl | |
+ | |
+ # This string is put at the end of a line that holds a JSON array. | |
+ attr_accessor :array_nl | |
+ | |
+ # This integer returns the maximum level of data structure nesting in | |
+ # the generated JSON, max_nesting = 0 if no maximum is checked. | |
+ attr_accessor :max_nesting | |
+ | |
+ def check_max_nesting(depth) # :nodoc: | |
+ return if @max_nesting.zero? | |
+ current_nesting = depth + 1 | |
+ current_nesting > @max_nesting and | |
+ raise NestingError, "nesting of #{current_nesting} is too deep" | |
+ end | |
+ | |
+ # Returns true, if circular data structures should be checked, | |
+ # otherwise returns false. | |
+ def check_circular? | |
+ @check_circular | |
+ end | |
+ | |
+ # Returns true if NaN, Infinity, and -Infinity should be considered as | |
+ # valid JSON and output. | |
+ def allow_nan? | |
+ @allow_nan | |
+ end | |
+ | |
+ # Returns _true_, if _object_ was already seen during this generating | |
+ # run. | |
+ def seen?(object) | |
+ @seen.key?(object.__id__) | |
+ end | |
+ | |
+ # Remember _object_, to find out if it was already encountered (if a | |
+ # cyclic data structure is if a cyclic data structure is rendered). | |
+ def remember(object) | |
+ @seen[object.__id__] = true | |
+ end | |
+ | |
+ # Forget _object_ for this generating run. | |
+ def forget(object) | |
+ @seen.delete object.__id__ | |
+ end | |
+ | |
+ # Configure this State instance with the Hash _opts_, and return | |
+ # itself. | |
+ def configure(opts) | |
+ @indent = opts[:indent] if opts.key?(:indent) | |
+ @space = opts[:space] if opts.key?(:space) | |
+ @space_before = opts[:space_before] if opts.key?(:space_before) | |
+ @object_nl = opts[:object_nl] if opts.key?(:object_nl) | |
+ @array_nl = opts[:array_nl] if opts.key?(:array_nl) | |
+ @check_circular = !!opts[:check_circular] if opts.key?(:check_circular) | |
+ @allow_nan = !!opts[:allow_nan] if opts.key?(:allow_nan) | |
+ if !opts.key?(:max_nesting) # defaults to 19 | |
+ @max_nesting = 19 | |
+ elsif opts[:max_nesting] | |
+ @max_nesting = opts[:max_nesting] | |
+ else | |
+ @max_nesting = 0 | |
+ end | |
+ self | |
+ end | |
+ | |
+ # Returns the configuration instance variables as a hash, that can be | |
+ # passed to the configure method. | |
+ def to_h | |
+ result = {} | |
+ for iv in %w[indent space space_before object_nl array_nl check_circular allow_nan max_nesting] | |
+ result[iv.intern] = instance_variable_get("@#{iv}") | |
+ end | |
+ result | |
+ end | |
+ end | |
+ | |
+ module GeneratorMethods | |
+ module Object | |
+ # Converts this object to a string (calling #to_s), converts | |
+ # it to a JSON string, and returns the result. This is a fallback, if no | |
+ # special method #to_json was defined for some object. | |
+ def to_json(*) to_s.to_json end | |
+ end | |
+ | |
+ module Hash | |
+ # Returns a JSON string containing a JSON object, that is unparsed from | |
+ # this Hash instance. | |
+ # _state_ is a JSON::State object, that can also be used to configure the | |
+ # produced JSON string output further. | |
+ # _depth_ is used to find out nesting depth, to indent accordingly. | |
+ def to_json(state = nil, depth = 0, *) | |
+ if state | |
+ state = JSON.state.from_state(state) | |
+ state.check_max_nesting(depth) | |
+ json_check_circular(state) { json_transform(state, depth) } | |
+ else | |
+ json_transform(state, depth) | |
+ end | |
+ end | |
+ | |
+ private | |
+ | |
+ def json_check_circular(state) | |
+ if state and state.check_circular? | |
+ state.seen?(self) and raise JSON::CircularDatastructure, | |
+ "circular data structures not supported!" | |
+ state.remember self | |
+ end | |
+ yield | |
+ ensure | |
+ state and state.forget self | |
+ end | |
+ | |
+ def json_shift(state, depth) | |
+ state and not state.object_nl.empty? or return '' | |
+ state.indent * depth | |
+ end | |
+ | |
+ def json_transform(state, depth) | |
+ delim = ',' | |
+ delim << state.object_nl if state | |
+ result = '{' | |
+ result << state.object_nl if state | |
+ result << map { |key,value| | |
+ s = json_shift(state, depth + 1) | |
+ s << key.to_s.to_json(state, depth + 1) | |
+ s << state.space_before if state | |
+ s << ':' | |
+ s << state.space if state | |
+ s << value.to_json(state, depth + 1) | |
+ }.join(delim) | |
+ result << state.object_nl if state | |
+ result << json_shift(state, depth) | |
+ result << '}' | |
+ result | |
+ end | |
+ end | |
+ | |
+ module Array | |
+ # Returns a JSON string containing a JSON array, that is unparsed from | |
+ # this Array instance. | |
+ # _state_ is a JSON::State object, that can also be used to configure the | |
+ # produced JSON string output further. | |
+ # _depth_ is used to find out nesting depth, to indent accordingly. | |
+ def to_json(state = nil, depth = 0, *) | |
+ if state | |
+ state = JSON.state.from_state(state) | |
+ state.check_max_nesting(depth) | |
+ json_check_circular(state) { json_transform(state, depth) } | |
+ else | |
+ json_transform(state, depth) | |
+ end | |
+ end | |
+ | |
+ private | |
+ | |
+ def json_check_circular(state) | |
+ if state and state.check_circular? | |
+ state.seen?(self) and raise JSON::CircularDatastructure, | |
+ "circular data structures not supported!" | |
+ state.remember self | |
+ end | |
+ yield | |
+ ensure | |
+ state and state.forget self | |
+ end | |
+ | |
+ def json_shift(state, depth) | |
+ state and not state.array_nl.empty? or return '' | |
+ state.indent * depth | |
+ end | |
+ | |
+ def json_transform(state, depth) | |
+ delim = ',' | |
+ delim << state.array_nl if state | |
+ result = '[' | |
+ result << state.array_nl if state | |
+ result << map { |value| | |
+ json_shift(state, depth + 1) << value.to_json(state, depth + 1) | |
+ }.join(delim) | |
+ result << state.array_nl if state | |
+ result << json_shift(state, depth) | |
+ result << ']' | |
+ result | |
+ end | |
+ end | |
+ | |
+ module Integer | |
+ # Returns a JSON string representation for this Integer number. | |
+ def to_json(*) to_s end | |
+ end | |
+ | |
+ module Float | |
+ # Returns a JSON string representation for this Float number. | |
+ def to_json(state = nil, *) | |
+ case | |
+ when infinite? | |
+ if !state || state.allow_nan? | |
+ to_s | |
+ else | |
+ raise GeneratorError, "#{self} not allowed in JSON" | |
+ end | |
+ when nan? | |
+ if !state || state.allow_nan? | |
+ to_s | |
+ else | |
+ raise GeneratorError, "#{self} not allowed in JSON" | |
+ end | |
+ else | |
+ to_s | |
+ end | |
+ end | |
+ end | |
+ | |
+ module String | |
+ # This string should be encoded with UTF-8 A call to this method | |
+ # returns a JSON string encoded with UTF16 big endian characters as | |
+ # \u????. | |
+ def to_json(*) | |
+ '"' << JSON.utf8_to_json(self) << '"' | |
+ end | |
+ | |
+ # Module that holds the extinding methods if, the String module is | |
+ # included. | |
+ module Extend | |
+ # Raw Strings are JSON Objects (the raw bytes are stored in an array for the | |
+ # key "raw"). The Ruby String can be created by this module method. | |
+ def json_create(o) | |
+ o['raw'].pack('C*') | |
+ end | |
+ end | |
+ | |
+ # Extends _modul_ with the String::Extend module. | |
+ def self.included(modul) | |
+ modul.extend Extend | |
+ end | |
+ | |
+ # This method creates a raw object hash, that can be nested into | |
+ # other data structures and will be unparsed as a raw string. This | |
+ # method should be used, if you want to convert raw strings to JSON | |
+ # instead of UTF-8 strings, e. g. binary data. | |
+ def to_json_raw_object | |
+ { | |
+ JSON.create_id => self.class.name, | |
+ 'raw' => self.unpack('C*'), | |
+ } | |
+ end | |
+ | |
+ # This method creates a JSON text from the result of | |
+ # a call to to_json_raw_object of this String. | |
+ def to_json_raw(*args) | |
+ to_json_raw_object.to_json(*args) | |
+ end | |
+ end | |
+ | |
+ module TrueClass | |
+ # Returns a JSON string for true: 'true'. | |
+ def to_json(*) 'true' end | |
+ end | |
+ | |
+ module FalseClass | |
+ # Returns a JSON string for false: 'false'. | |
+ def to_json(*) 'false' end | |
+ end | |
+ | |
+ module NilClass | |
+ # Returns a JSON string for nil: 'null'. | |
+ def to_json(*) 'null' end | |
+ end | |
+ end | |
+ end | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/pure/parser.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/pure/parser.rb | |
new file mode 100644 | |
index 0000000..9b30f15 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/pure/parser.rb | |
@@ -0,0 +1,259 @@ | |
+require 'strscan' | |
+ | |
+module JSON | |
+ module Pure | |
+ # This class implements the JSON parser that is used to parse a JSON string | |
+ # into a Ruby data structure. | |
+ class Parser < StringScanner | |
+ STRING = /" ((?:[^\x0-\x1f"\\] | | |
+ \\["\\\/bfnrt] | | |
+ \\u[0-9a-fA-F]{4} | | |
+ \\[\x20-\xff])*) | |
+ "/nx | |
+ INTEGER = /(-?0|-?[1-9]\d*)/ | |
+ FLOAT = /(-? | |
+ (?:0|[1-9]\d*) | |
+ (?: | |
+ \.\d+(?i:e[+-]?\d+) | | |
+ \.\d+ | | |
+ (?i:e[+-]?\d+) | |
+ ) | |
+ )/x | |
+ NAN = /NaN/ | |
+ INFINITY = /Infinity/ | |
+ MINUS_INFINITY = /-Infinity/ | |
+ OBJECT_OPEN = /\{/ | |
+ OBJECT_CLOSE = /\}/ | |
+ ARRAY_OPEN = /\[/ | |
+ ARRAY_CLOSE = /\]/ | |
+ PAIR_DELIMITER = /:/ | |
+ COLLECTION_DELIMITER = /,/ | |
+ TRUE = /true/ | |
+ FALSE = /false/ | |
+ NULL = /null/ | |
+ IGNORE = %r( | |
+ (?: | |
+ //[^\n\r]*[\n\r]| # line comments | |
+ /\* # c-style comments | |
+ (?: | |
+ [^*/]| # normal chars | |
+ /[^*]| # slashes that do not start a nested comment | |
+ \*[^/]| # asterisks that do not end this comment | |
+ /(?=\*/) # single slash before this comment's end | |
+ )* | |
+ \*/ # the End of this comment | |
+ |[ \t\r\n]+ # whitespaces: space, horicontal tab, lf, cr | |
+ )+ | |
+ )mx | |
+ | |
+ UNPARSED = Object.new | |
+ | |
+ # Creates a new JSON::Pure::Parser instance for the string _source_. | |
+ # | |
+ # It will be configured by the _opts_ hash. _opts_ can have the following | |
+ # keys: | |
+ # * *max_nesting*: The maximum depth of nesting allowed in the parsed data | |
+ # structures. Disable depth checking with :max_nesting => false|nil|0, | |
+ # it defaults to 19. | |
+ # * *allow_nan*: If set to true, allow NaN, Infinity and -Infinity in | |
+ # defiance of RFC 4627 to be parsed by the Parser. This option defaults | |
+ # to false. | |
+ # * *create_additions*: If set to false, the Parser doesn't create | |
+ # additions even if a matchin class and create_id was found. This option | |
+ # defaults to true. | |
+ def initialize(source, opts = {}) | |
+ super | |
+ if !opts.key?(:max_nesting) # defaults to 19 | |
+ @max_nesting = 19 | |
+ elsif opts[:max_nesting] | |
+ @max_nesting = opts[:max_nesting] | |
+ else | |
+ @max_nesting = 0 | |
+ end | |
+ @allow_nan = !!opts[:allow_nan] | |
+ ca = true | |
+ ca = opts[:create_additions] if opts.key?(:create_additions) | |
+ @create_id = ca ? JSON.create_id : nil | |
+ end | |
+ | |
+ alias source string | |
+ | |
+ # Parses the current JSON string _source_ and returns the complete data | |
+ # structure as a result. | |
+ def parse | |
+ reset | |
+ obj = nil | |
+ until eos? | |
+ case | |
+ when scan(OBJECT_OPEN) | |
+ obj and raise ParserError, "source '#{peek(20)}' not in JSON!" | |
+ @current_nesting = 1 | |
+ obj = parse_object | |
+ when scan(ARRAY_OPEN) | |
+ obj and raise ParserError, "source '#{peek(20)}' not in JSON!" | |
+ @current_nesting = 1 | |
+ obj = parse_array | |
+ when skip(IGNORE) | |
+ ; | |
+ else | |
+ raise ParserError, "source '#{peek(20)}' not in JSON!" | |
+ end | |
+ end | |
+ obj or raise ParserError, "source did not contain any JSON!" | |
+ obj | |
+ end | |
+ | |
+ private | |
+ | |
+ # Unescape characters in strings. | |
+ UNESCAPE_MAP = Hash.new { |h, k| h[k] = k.chr } | |
+ UNESCAPE_MAP.update({ | |
+ ?" => '"', | |
+ ?\\ => '\\', | |
+ ?/ => '/', | |
+ ?b => "\b", | |
+ ?f => "\f", | |
+ ?n => "\n", | |
+ ?r => "\r", | |
+ ?t => "\t", | |
+ ?u => nil, | |
+ }) | |
+ | |
+ def parse_string | |
+ if scan(STRING) | |
+ return '' if self[1].empty? | |
+ self[1].gsub(%r((?:\\[\\bfnrt"/]|(?:\\u(?:[A-Fa-f\d]{4}))+|\\[\x20-\xff]))n) do |c| | |
+ if u = UNESCAPE_MAP[$&[1]] | |
+ u | |
+ else # \uXXXX | |
+ bytes = '' | |
+ i = 0 | |
+ while c[6 * i] == ?\\ && c[6 * i + 1] == ?u | |
+ bytes << c[6 * i + 2, 2].to_i(16) << c[6 * i + 4, 2].to_i(16) | |
+ i += 1 | |
+ end | |
+ JSON::UTF16toUTF8.iconv(bytes) | |
+ end | |
+ end | |
+ else | |
+ UNPARSED | |
+ end | |
+ rescue Iconv::Failure => e | |
+ raise GeneratorError, "Caught #{e.class}: #{e}" | |
+ end | |
+ | |
+ def parse_value | |
+ case | |
+ when scan(FLOAT) | |
+ Float(self[1]) | |
+ when scan(INTEGER) | |
+ Integer(self[1]) | |
+ when scan(TRUE) | |
+ true | |
+ when scan(FALSE) | |
+ false | |
+ when scan(NULL) | |
+ nil | |
+ when (string = parse_string) != UNPARSED | |
+ string | |
+ when scan(ARRAY_OPEN) | |
+ @current_nesting += 1 | |
+ ary = parse_array | |
+ @current_nesting -= 1 | |
+ ary | |
+ when scan(OBJECT_OPEN) | |
+ @current_nesting += 1 | |
+ obj = parse_object | |
+ @current_nesting -= 1 | |
+ obj | |
+ when @allow_nan && scan(NAN) | |
+ NaN | |
+ when @allow_nan && scan(INFINITY) | |
+ Infinity | |
+ when @allow_nan && scan(MINUS_INFINITY) | |
+ MinusInfinity | |
+ else | |
+ UNPARSED | |
+ end | |
+ end | |
+ | |
+ def parse_array | |
+ raise NestingError, "nesting of #@current_nesting is to deep" if | |
+ @max_nesting.nonzero? && @current_nesting > @max_nesting | |
+ result = [] | |
+ delim = false | |
+ until eos? | |
+ case | |
+ when (value = parse_value) != UNPARSED | |
+ delim = false | |
+ result << value | |
+ skip(IGNORE) | |
+ if scan(COLLECTION_DELIMITER) | |
+ delim = true | |
+ elsif match?(ARRAY_CLOSE) | |
+ ; | |
+ else | |
+ raise ParserError, "expected ',' or ']' in array at '#{peek(20)}'!" | |
+ end | |
+ when scan(ARRAY_CLOSE) | |
+ if delim | |
+ raise ParserError, "expected next element in array at '#{peek(20)}'!" | |
+ end | |
+ break | |
+ when skip(IGNORE) | |
+ ; | |
+ else | |
+ raise ParserError, "unexpected token in array at '#{peek(20)}'!" | |
+ end | |
+ end | |
+ result | |
+ end | |
+ | |
+ def parse_object | |
+ raise NestingError, "nesting of #@current_nesting is to deep" if | |
+ @max_nesting.nonzero? && @current_nesting > @max_nesting | |
+ result = {} | |
+ delim = false | |
+ until eos? | |
+ case | |
+ when (string = parse_string) != UNPARSED | |
+ skip(IGNORE) | |
+ unless scan(PAIR_DELIMITER) | |
+ raise ParserError, "expected ':' in object at '#{peek(20)}'!" | |
+ end | |
+ skip(IGNORE) | |
+ unless (value = parse_value).equal? UNPARSED | |
+ result[string] = value | |
+ delim = false | |
+ skip(IGNORE) | |
+ if scan(COLLECTION_DELIMITER) | |
+ delim = true | |
+ elsif match?(OBJECT_CLOSE) | |
+ ; | |
+ else | |
+ raise ParserError, "expected ',' or '}' in object at '#{peek(20)}'!" | |
+ end | |
+ else | |
+ raise ParserError, "expected value in object at '#{peek(20)}'!" | |
+ end | |
+ when scan(OBJECT_CLOSE) | |
+ if delim | |
+ raise ParserError, "expected next name, value pair in object at '#{peek(20)}'!" | |
+ end | |
+ if @create_id and klassname = result[@create_id] | |
+ klass = JSON.deep_const_get klassname | |
+ break unless klass and klass.json_creatable? | |
+ result = klass.json_create(result) | |
+ end | |
+ break | |
+ when skip(IGNORE) | |
+ ; | |
+ else | |
+ raise ParserError, "unexpected token in object at '#{peek(20)}'!" | |
+ end | |
+ end | |
+ result | |
+ end | |
+ end | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/version.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/version.rb | |
new file mode 100644 | |
index 0000000..acf8217 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/lib/json/version.rb | |
@@ -0,0 +1,9 @@ | |
+module JSON | |
+ # JSON version | |
+ VERSION = '1.1.3' | |
+ VERSION_ARRAY = VERSION.split(/\./).map { |x| x.to_i } # :nodoc: | |
+ VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc: | |
+ VERSION_MINOR = VERSION_ARRAY[1] # :nodoc: | |
+ VERSION_BUILD = VERSION_ARRAY[2] # :nodoc: | |
+ VARIANT_BINARY = false | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail1.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail1.json | |
new file mode 100644 | |
index 0000000..6216b86 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail1.json | |
@@ -0,0 +1 @@ | |
+"A JSON payload should be an object or array, not a string." | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail10.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail10.json | |
new file mode 100644 | |
index 0000000..5d8c004 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail10.json | |
@@ -0,0 +1 @@ | |
+{"Extra value after close": true} "misplaced quoted value" | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail11.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail11.json | |
new file mode 100644 | |
index 0000000..76eb95b | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail11.json | |
@@ -0,0 +1 @@ | |
+{"Illegal expression": 1 + 2} | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail12.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail12.json | |
new file mode 100644 | |
index 0000000..77580a4 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail12.json | |
@@ -0,0 +1 @@ | |
+{"Illegal invocation": alert()} | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail13.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail13.json | |
new file mode 100644 | |
index 0000000..379406b | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail13.json | |
@@ -0,0 +1 @@ | |
+{"Numbers cannot have leading zeroes": 013} | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail14.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail14.json | |
new file mode 100644 | |
index 0000000..0ed366b | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail14.json | |
@@ -0,0 +1 @@ | |
+{"Numbers cannot be hex": 0x14} | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail18.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail18.json | |
new file mode 100644 | |
index 0000000..e2d130c | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail18.json | |
@@ -0,0 +1 @@ | |
+[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail19.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail19.json | |
new file mode 100644 | |
index 0000000..3b9c46f | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail19.json | |
@@ -0,0 +1 @@ | |
+{"Missing colon" null} | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail2.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail2.json | |
new file mode 100644 | |
index 0000000..6b7c11e | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail2.json | |
@@ -0,0 +1 @@ | |
+["Unclosed array" | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail20.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail20.json | |
new file mode 100644 | |
index 0000000..27c1af3 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail20.json | |
@@ -0,0 +1 @@ | |
+{"Double colon":: null} | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail21.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail21.json | |
new file mode 100644 | |
index 0000000..6247457 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail21.json | |
@@ -0,0 +1 @@ | |
+{"Comma instead of colon", null} | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail22.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail22.json | |
new file mode 100644 | |
index 0000000..a775258 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail22.json | |
@@ -0,0 +1 @@ | |
+["Colon instead of comma": false] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail23.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail23.json | |
new file mode 100644 | |
index 0000000..494add1 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail23.json | |
@@ -0,0 +1 @@ | |
+["Bad value", truth] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail24.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail24.json | |
new file mode 100644 | |
index 0000000..caff239 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail24.json | |
@@ -0,0 +1 @@ | |
+['single quote'] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail25.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail25.json | |
new file mode 100644 | |
index 0000000..2dfbd25 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail25.json | |
@@ -0,0 +1 @@ | |
+["tab character in string "] | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail27.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail27.json | |
new file mode 100644 | |
index 0000000..6b01a2c | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail27.json | |
@@ -0,0 +1,2 @@ | |
+["line | |
+break"] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail28.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail28.json | |
new file mode 100644 | |
index 0000000..621a010 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail28.json | |
@@ -0,0 +1,2 @@ | |
+["line\ | |
+break"] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail3.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail3.json | |
new file mode 100644 | |
index 0000000..168c81e | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail3.json | |
@@ -0,0 +1 @@ | |
+{unquoted_key: "keys must be quoted"} | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail4.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail4.json | |
new file mode 100644 | |
index 0000000..9de168b | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail4.json | |
@@ -0,0 +1 @@ | |
+["extra comma",] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail5.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail5.json | |
new file mode 100644 | |
index 0000000..ddf3ce3 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail5.json | |
@@ -0,0 +1 @@ | |
+["double extra comma",,] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail6.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail6.json | |
new file mode 100644 | |
index 0000000..ed91580 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail6.json | |
@@ -0,0 +1 @@ | |
+[ , "<-- missing value"] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail7.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail7.json | |
new file mode 100644 | |
index 0000000..8a96af3 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail7.json | |
@@ -0,0 +1 @@ | |
+["Comma after the close"], | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail8.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail8.json | |
new file mode 100644 | |
index 0000000..b28479c | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail8.json | |
@@ -0,0 +1 @@ | |
+["Extra close"]] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail9.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail9.json | |
new file mode 100644 | |
index 0000000..5815574 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/fail9.json | |
@@ -0,0 +1 @@ | |
+{"Extra comma": true,} | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass1.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass1.json | |
new file mode 100644 | |
index 0000000..7828fcc | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass1.json | |
@@ -0,0 +1,56 @@ | |
+[ | |
+ "JSON Test Pattern pass1", | |
+ {"object with 1 member":["array with 1 element"]}, | |
+ {}, | |
+ [], | |
+ -42, | |
+ true, | |
+ false, | |
+ null, | |
+ { | |
+ "integer": 1234567890, | |
+ "real": -9876.543210, | |
+ "e": 0.123456789e-12, | |
+ "E": 1.234567890E+34, | |
+ "": 23456789012E666, | |
+ "zero": 0, | |
+ "one": 1, | |
+ "space": " ", | |
+ "quote": "\"", | |
+ "backslash": "\\", | |
+ "controls": "\b\f\n\r\t", | |
+ "slash": "/ & \/", | |
+ "alpha": "abcdefghijklmnopqrstuvwyz", | |
+ "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", | |
+ "digit": "0123456789", | |
+ "special": "`1~!@#$%^&*()_+-={':[,]}|;.</>?", | |
+ "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", | |
+ "true": true, | |
+ "false": false, | |
+ "null": null, | |
+ "array":[ ], | |
+ "object":{ }, | |
+ "address": "50 St. James Street", | |
+ "url": "http://www.JSON.org/", | |
+ "comment": "// /* <!-- --", | |
+ "# -- --> */": " ", | |
+ " s p a c e d " :[1,2 , 3 | |
+ | |
+, | |
+ | |
+4 , 5 , 6 ,7 ], | |
+ "compact": [1,2,3,4,5,6,7], | |
+ "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", | |
+ "quotes": "" \u0022 %22 0x22 034 "", | |
+ "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" | |
+: "A key can be any string" | |
+ }, | |
+ 0.5 ,98.6 | |
+, | |
+99.44 | |
+, | |
+ | |
+1066 | |
+ | |
+ | |
+,"rosebud"] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass15.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass15.json | |
new file mode 100644 | |
index 0000000..fc8376b | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass15.json | |
@@ -0,0 +1 @@ | |
+["Illegal backslash escape: \x15"] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass16.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass16.json | |
new file mode 100644 | |
index 0000000..c43ae3c | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass16.json | |
@@ -0,0 +1 @@ | |
+["Illegal backslash escape: \'"] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass17.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass17.json | |
new file mode 100644 | |
index 0000000..62b9214 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass17.json | |
@@ -0,0 +1 @@ | |
+["Illegal backslash escape: \017"] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass2.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass2.json | |
new file mode 100644 | |
index 0000000..d3c63c7 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass2.json | |
@@ -0,0 +1 @@ | |
+[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass26.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass26.json | |
new file mode 100644 | |
index 0000000..845d26a | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass26.json | |
@@ -0,0 +1 @@ | |
+["tab\ character\ in\ string\ "] | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass3.json b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass3.json | |
new file mode 100644 | |
index 0000000..4528d51 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/fixtures/pass3.json | |
@@ -0,0 +1,6 @@ | |
+{ | |
+ "JSON Test Pattern pass3": { | |
+ "The outermost value": "must be an object or array.", | |
+ "In this test": "It is an object." | |
+ } | |
+} | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/runner.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/runner.rb | |
new file mode 100755 | |
index 0000000..9ad0448 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/runner.rb | |
@@ -0,0 +1,25 @@ | |
+#!/usr/bin/env ruby | |
+ | |
+require 'test/unit/ui/console/testrunner' | |
+require 'test/unit/testsuite' | |
+$:.unshift File.expand_path(File.dirname($0)) | |
+$:.unshift 'tests' | |
+require 'test_json' | |
+require 'test_json_generate' | |
+require 'test_json_unicode' | |
+require 'test_json_addition' | |
+require 'test_json_rails' | |
+require 'test_json_fixtures' | |
+ | |
+class TS_AllTests | |
+ def self.suite | |
+ suite = Test::Unit::TestSuite.new name | |
+ suite << TC_JSONGenerate.suite | |
+ suite << TC_JSON.suite | |
+ suite << TC_JSONUnicode.suite | |
+ suite << TC_JSONAddition.suite | |
+ suite << TC_JSONRails.suite | |
+ suite << TC_JSONFixtures.suite | |
+ end | |
+end | |
+Test::Unit::UI::Console::TestRunner.run(TS_AllTests) | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json.rb | |
new file mode 100755 | |
index 0000000..d7e8288 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json.rb | |
@@ -0,0 +1,293 @@ | |
+#!/usr/bin/env ruby | |
+ | |
+require 'test/unit' | |
+require 'json' | |
+require 'stringio' | |
+ | |
+class TC_JSON < Test::Unit::TestCase | |
+ include JSON | |
+ | |
+ def setup | |
+ $KCODE = 'UTF8' | |
+ @ary = [1, "foo", 3.14, 4711.0, 2.718, nil, [1,-2,3], false, true].map do | |
+ |x| [x] | |
+ end | |
+ @ary_to_parse = ["1", '"foo"', "3.14", "4711.0", "2.718", "null", | |
+ "[1,-2,3]", "false", "true"].map do | |
+ |x| "[#{x}]" | |
+ end | |
+ @hash = { | |
+ 'a' => 2, | |
+ 'b' => 3.141, | |
+ 'c' => 'c', | |
+ 'd' => [ 1, "b", 3.14 ], | |
+ 'e' => { 'foo' => 'bar' }, | |
+ 'g' => "\"\0\037", | |
+ 'h' => 1000.0, | |
+ 'i' => 0.001 | |
+ } | |
+ @json = '{"a":2,"b":3.141,"c":"c","d":[1,"b",3.14],"e":{"foo":"bar"},' + | |
+ '"g":"\\"\\u0000\\u001f","h":1.0E3,"i":1.0E-3}' | |
+ end | |
+ suite << TC_JSON.suite | |
+ | |
+ def test_construction | |
+ parser = JSON::Parser.new('test') | |
+ assert_equal 'test', parser.source | |
+ end | |
+ | |
+ def assert_equal_float(expected, is) | |
+ assert_in_delta(expected.first, is.first, 1e-2) | |
+ end | |
+ | |
+ def test_parse_simple_arrays | |
+ assert_equal([], parse('[]')) | |
+ assert_equal([], parse(' [ ] ')) | |
+ assert_equal([nil], parse('[null]')) | |
+ assert_equal([false], parse('[false]')) | |
+ assert_equal([true], parse('[true]')) | |
+ assert_equal([-23], parse('[-23]')) | |
+ assert_equal([23], parse('[23]')) | |
+ assert_equal([0.23], parse('[0.23]')) | |
+ assert_equal([0.0], parse('[0e0]')) | |
+ assert_raises(JSON::ParserError) { parse('[+23.2]') } | |
+ assert_raises(JSON::ParserError) { parse('[+23]') } | |
+ assert_raises(JSON::ParserError) { parse('[.23]') } | |
+ assert_raises(JSON::ParserError) { parse('[023]') } | |
+ assert_equal_float [3.141], parse('[3.141]') | |
+ assert_equal_float [-3.141], parse('[-3.141]') | |
+ assert_equal_float [3.141], parse('[3141e-3]') | |
+ assert_equal_float [3.141], parse('[3141.1e-3]') | |
+ assert_equal_float [3.141], parse('[3141E-3]') | |
+ assert_equal_float [3.141], parse('[3141.0E-3]') | |
+ assert_equal_float [-3.141], parse('[-3141.0e-3]') | |
+ assert_equal_float [-3.141], parse('[-3141e-3]') | |
+ assert_raises(ParserError) { parse('[NaN]') } | |
+ assert parse('[NaN]', :allow_nan => true).first.nan? | |
+ assert_raises(ParserError) { parse('[Infinity]') } | |
+ assert_equal [1.0/0], parse('[Infinity]', :allow_nan => true) | |
+ assert_raises(ParserError) { parse('[-Infinity]') } | |
+ assert_equal [-1.0/0], parse('[-Infinity]', :allow_nan => true) | |
+ assert_equal([""], parse('[""]')) | |
+ assert_equal(["foobar"], parse('["foobar"]')) | |
+ assert_equal([{}], parse('[{}]')) | |
+ end | |
+ | |
+ def test_parse_simple_objects | |
+ assert_equal({}, parse('{}')) | |
+ assert_equal({}, parse(' { } ')) | |
+ assert_equal({ "a" => nil }, parse('{ "a" : null}')) | |
+ assert_equal({ "a" => nil }, parse('{"a":null}')) | |
+ assert_equal({ "a" => false }, parse('{ "a" : false } ')) | |
+ assert_equal({ "a" => false }, parse('{"a":false}')) | |
+ assert_raises(JSON::ParserError) { parse('{false}') } | |
+ assert_equal({ "a" => true }, parse('{"a":true}')) | |
+ assert_equal({ "a" => true }, parse(' { "a" : true } ')) | |
+ assert_equal({ "a" => -23 }, parse(' { "a" : -23 } ')) | |
+ assert_equal({ "a" => -23 }, parse(' { "a" : -23 } ')) | |
+ assert_equal({ "a" => 23 }, parse('{"a":23 } ')) | |
+ assert_equal({ "a" => 23 }, parse(' { "a" : 23 } ')) | |
+ assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } ')) | |
+ assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } ')) | |
+ end | |
+ | |
+ begin | |
+ require 'permutation' | |
+ def test_parse_more_complex_arrays | |
+ a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "n€ßt€ð2" => {} }] | |
+ perms = Permutation.for a | |
+ perms.each do |perm| | |
+ orig_ary = perm.project | |
+ json = pretty_generate(orig_ary) | |
+ assert_equal orig_ary, parse(json) | |
+ end | |
+ end | |
+ | |
+ def test_parse_complex_objects | |
+ a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "n€ßt€ð2" => {} }] | |
+ perms = Permutation.for a | |
+ perms.each do |perm| | |
+ s = "a" | |
+ orig_obj = perm.project.inject({}) { |h, x| h[s.dup] = x; s = s.succ; h } | |
+ json = pretty_generate(orig_obj) | |
+ assert_equal orig_obj, parse(json) | |
+ end | |
+ end | |
+ rescue LoadError | |
+ warn "Skipping permutation tests." | |
+ end | |
+ | |
+ def test_parse_arrays | |
+ assert_equal([1,2,3], parse('[1,2,3]')) | |
+ assert_equal([1.2,2,3], parse('[1.2,2,3]')) | |
+ assert_equal([[],[[],[]]], parse('[[],[[],[]]]')) | |
+ end | |
+ | |
+ def test_parse_values | |
+ assert_equal([""], parse('[""]')) | |
+ assert_equal(["\\"], parse('["\\\\"]')) | |
+ assert_equal(['"'], parse('["\""]')) | |
+ assert_equal(['\\"\\'], parse('["\\\\\\"\\\\"]')) | |
+ assert_equal(["\"\b\n\r\t\0\037"], | |
+ parse('["\"\b\n\r\t\u0000\u001f"]')) | |
+ for i in 0 ... @ary.size | |
+ assert_equal(@ary[i], parse(@ary_to_parse[i])) | |
+ end | |
+ end | |
+ | |
+ def test_parse_array | |
+ assert_equal([], parse('[]')) | |
+ assert_equal([], parse(' [ ] ')) | |
+ assert_equal([1], parse('[1]')) | |
+ assert_equal([1], parse(' [ 1 ] ')) | |
+ assert_equal(@ary, | |
+ parse('[[1],["foo"],[3.14],[47.11e+2],[2718.0E-3],[null],[[1,-2,3]]'\ | |
+ ',[false],[true]]')) | |
+ assert_equal(@ary, parse(%Q{ [ [1] , ["foo"] , [3.14] \t , [47.11e+2] | |
+ , [2718.0E-3 ],\r[ null] , [[1, -2, 3 ]], [false ],[ true]\n ] })) | |
+ end | |
+ | |
+ def test_parse_object | |
+ assert_equal({}, parse('{}')) | |
+ assert_equal({}, parse(' { } ')) | |
+ assert_equal({'foo'=>'bar'}, parse('{"foo":"bar"}')) | |
+ assert_equal({'foo'=>'bar'}, parse(' { "foo" : "bar" } ')) | |
+ end | |
+ | |
+ def test_parser_reset | |
+ parser = Parser.new(@json) | |
+ assert_equal(@hash, parser.parse) | |
+ assert_equal(@hash, parser.parse) | |
+ end | |
+ | |
+ def test_comments | |
+ json = <<EOT | |
+{ | |
+ "key1":"value1", // eol comment | |
+ "key2":"value2" /* multi line | |
+ * comment */, | |
+ "key3":"value3" /* multi line | |
+ // nested eol comment | |
+ * comment */ | |
+} | |
+EOT | |
+ assert_equal( | |
+ { "key1" => "value1", "key2" => "value2", "key3" => "value3" }, | |
+ parse(json)) | |
+ json = <<EOT | |
+{ | |
+ "key1":"value1" /* multi line | |
+ // nested eol comment | |
+ /* illegal nested multi line comment */ | |
+ * comment */ | |
+} | |
+EOT | |
+ assert_raises(ParserError) { parse(json) } | |
+ json = <<EOT | |
+{ | |
+ "key1":"value1" /* multi line | |
+ // nested eol comment | |
+ closed multi comment */ | |
+ and again, throw an Error */ | |
+} | |
+EOT | |
+ assert_raises(ParserError) { parse(json) } | |
+ json = <<EOT | |
+{ | |
+ "key1":"value1" /*/*/ | |
+} | |
+EOT | |
+ assert_equal({ "key1" => "value1" }, parse(json)) | |
+ end | |
+ | |
+ def test_backslash | |
+ data = [ '\\.(?i:gif|jpe?g|png)$' ] | |
+ json = '["\\\\.(?i:gif|jpe?g|png)$"]' | |
+ assert_equal json, JSON.unparse(data) | |
+ assert_equal data, JSON.parse(json) | |
+ # | |
+ data = [ '\\"' ] | |
+ json = '["\\\\\""]' | |
+ assert_equal json, JSON.unparse(data) | |
+ assert_equal data, JSON.parse(json) | |
+ # | |
+ json = '["\/"]' | |
+ data = JSON.parse(json) | |
+ assert_equal ['/'], data | |
+ assert_equal json, JSON.unparse(data) | |
+ # | |
+ json = '["\""]' | |
+ data = JSON.parse(json) | |
+ assert_equal ['"'], data | |
+ assert_equal json, JSON.unparse(data) | |
+ json = '["\\\'"]' | |
+ data = JSON.parse(json) | |
+ assert_equal ["'"], data | |
+ assert_equal '["\'"]', JSON.unparse(data) | |
+ end | |
+ | |
+ def test_wrong_inputs | |
+ assert_raises(ParserError) { JSON.parse('"foo"') } | |
+ assert_raises(ParserError) { JSON.parse('123') } | |
+ assert_raises(ParserError) { JSON.parse('[] bla') } | |
+ assert_raises(ParserError) { JSON.parse('[] 1') } | |
+ assert_raises(ParserError) { JSON.parse('[] []') } | |
+ assert_raises(ParserError) { JSON.parse('[] {}') } | |
+ assert_raises(ParserError) { JSON.parse('{} []') } | |
+ assert_raises(ParserError) { JSON.parse('{} {}') } | |
+ assert_raises(ParserError) { JSON.parse('[NULL]') } | |
+ assert_raises(ParserError) { JSON.parse('[FALSE]') } | |
+ assert_raises(ParserError) { JSON.parse('[TRUE]') } | |
+ assert_raises(ParserError) { JSON.parse('[07] ') } | |
+ assert_raises(ParserError) { JSON.parse('[0a]') } | |
+ assert_raises(ParserError) { JSON.parse('[1.]') } | |
+ assert_raises(ParserError) { JSON.parse(' ') } | |
+ end | |
+ | |
+ def test_nesting | |
+ assert_raises(JSON::NestingError) { JSON.parse '[[]]', :max_nesting => 1 } | |
+ assert_raises(JSON::NestingError) { JSON.parser.new('[[]]', :max_nesting => 1).parse } | |
+ assert_equal [[]], JSON.parse('[[]]', :max_nesting => 2) | |
+ too_deep = '[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]' | |
+ too_deep_ary = eval too_deep | |
+ assert_raises(JSON::NestingError) { JSON.parse too_deep } | |
+ assert_raises(JSON::NestingError) { JSON.parser.new(too_deep).parse } | |
+ assert_raises(JSON::NestingError) { JSON.parse too_deep, :max_nesting => 19 } | |
+ ok = JSON.parse too_deep, :max_nesting => 20 | |
+ assert_equal too_deep_ary, ok | |
+ ok = JSON.parse too_deep, :max_nesting => nil | |
+ assert_equal too_deep_ary, ok | |
+ ok = JSON.parse too_deep, :max_nesting => false | |
+ assert_equal too_deep_ary, ok | |
+ ok = JSON.parse too_deep, :max_nesting => 0 | |
+ assert_equal too_deep_ary, ok | |
+ assert_raises(JSON::NestingError) { JSON.generate [[]], :max_nesting => 1 } | |
+ assert_equal '[[]]', JSON.generate([[]], :max_nesting => 2) | |
+ assert_raises(JSON::NestingError) { JSON.generate too_deep_ary } | |
+ assert_raises(JSON::NestingError) { JSON.generate too_deep_ary, :max_nesting => 19 } | |
+ ok = JSON.generate too_deep_ary, :max_nesting => 20 | |
+ assert_equal too_deep, ok | |
+ ok = JSON.generate too_deep_ary, :max_nesting => nil | |
+ assert_equal too_deep, ok | |
+ ok = JSON.generate too_deep_ary, :max_nesting => false | |
+ assert_equal too_deep, ok | |
+ ok = JSON.generate too_deep_ary, :max_nesting => 0 | |
+ assert_equal too_deep, ok | |
+ end | |
+ | |
+ def test_load_dump | |
+ too_deep = '[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]' | |
+ assert_equal too_deep, JSON.dump(eval(too_deep)) | |
+ assert_kind_of String, Marshal.dump(eval(too_deep)) | |
+ assert_raises(ArgumentError) { JSON.dump(eval(too_deep), 19) } | |
+ assert_raises(ArgumentError) { Marshal.dump(eval(too_deep), 19) } | |
+ assert_equal too_deep, JSON.dump(eval(too_deep), 20) | |
+ assert_kind_of String, Marshal.dump(eval(too_deep), 20) | |
+ output = StringIO.new | |
+ JSON.dump(eval(too_deep), output) | |
+ assert_equal too_deep, output.string | |
+ output = StringIO.new | |
+ JSON.dump(eval(too_deep), output, 20) | |
+ assert_equal too_deep, output.string | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_addition.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_addition.rb | |
new file mode 100755 | |
index 0000000..248b7c9 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_addition.rb | |
@@ -0,0 +1,161 @@ | |
+#!/usr/bin/env ruby | |
+ | |
+require 'test/unit' | |
+require 'json/add/core' | |
+require 'date' | |
+ | |
+class TC_JSONAddition < Test::Unit::TestCase | |
+ include JSON | |
+ | |
+ class A | |
+ def initialize(a) | |
+ @a = a | |
+ end | |
+ | |
+ attr_reader :a | |
+ | |
+ def ==(other) | |
+ a == other.a | |
+ end | |
+ | |
+ def self.json_create(object) | |
+ new(*object['args']) | |
+ end | |
+ | |
+ def to_json(*args) | |
+ { | |
+ 'json_class' => self.class.name, | |
+ 'args' => [ @a ], | |
+ }.to_json(*args) | |
+ end | |
+ end | |
+ | |
+ class B | |
+ def self.json_creatable? | |
+ false | |
+ end | |
+ | |
+ def to_json(*args) | |
+ { | |
+ 'json_class' => self.class.name, | |
+ }.to_json(*args) | |
+ end | |
+ end | |
+ | |
+ class C | |
+ def self.json_creatable? | |
+ false | |
+ end | |
+ | |
+ def to_json(*args) | |
+ { | |
+ 'json_class' => 'TC_JSONAddition::Nix', | |
+ }.to_json(*args) | |
+ end | |
+ end | |
+ | |
+ def setup | |
+ $KCODE = 'UTF8' | |
+ end | |
+ | |
+ def test_extended_json | |
+ a = A.new(666) | |
+ assert A.json_creatable? | |
+ json = generate(a) | |
+ a_again = JSON.parse(json) | |
+ assert_kind_of a.class, a_again | |
+ assert_equal a, a_again | |
+ end | |
+ | |
+ def test_extended_json_disabled | |
+ a = A.new(666) | |
+ assert A.json_creatable? | |
+ json = generate(a) | |
+ a_again = JSON.parse(json, :create_additions => true) | |
+ assert_kind_of a.class, a_again | |
+ assert_equal a, a_again | |
+ a_hash = JSON.parse(json, :create_additions => false) | |
+ assert_kind_of Hash, a_hash | |
+ assert_equal( | |
+ {"args"=>[666], "json_class"=>"TC_JSONAddition::A"}.sort_by { |k,| k }, | |
+ a_hash.sort_by { |k,| k } | |
+ ) | |
+ end | |
+ | |
+ def test_extended_json_fail1 | |
+ b = B.new | |
+ assert !B.json_creatable? | |
+ json = generate(b) | |
+ assert_equal({ "json_class"=>"TC_JSONAddition::B" }, JSON.parse(json)) | |
+ end | |
+ | |
+ def test_extended_json_fail2 | |
+ c = C.new | |
+ assert !C.json_creatable? | |
+ json = generate(c) | |
+ assert_raises(ArgumentError) { JSON.parse(json) } | |
+ end | |
+ | |
+ def test_raw_strings | |
+ raw = '' | |
+ raw_array = [] | |
+ for i in 0..255 | |
+ raw << i | |
+ raw_array << i | |
+ end | |
+ json = raw.to_json_raw | |
+ json_raw_object = raw.to_json_raw_object | |
+ hash = { 'json_class' => 'String', 'raw'=> raw_array } | |
+ assert_equal hash, json_raw_object | |
+ json_raw = <<EOT.chomp | |
+{\"json_class\":\"String\",\"raw\":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255]} | |
+EOT | |
+# " | |
+ assert_equal json_raw, json | |
+ raw_again = JSON.parse(json) | |
+ assert_equal raw, raw_again | |
+ end | |
+ | |
+ MyJsonStruct = Struct.new 'MyJsonStruct', :foo, :bar | |
+ | |
+ def test_core | |
+ t = Time.now | |
+ assert_equal t, JSON(JSON(t)) | |
+ d = Date.today | |
+ assert_equal d, JSON(JSON(d)) | |
+ d = DateTime.civil(2007, 6, 14, 14, 57, 10, Rational(1, 12), 2299161) | |
+ assert_equal d, JSON(JSON(d)) | |
+ assert_equal 1..10, JSON(JSON(1..10)) | |
+ assert_equal 1...10, JSON(JSON(1...10)) | |
+ assert_equal "a".."c", JSON(JSON("a".."c")) | |
+ assert_equal "a"..."c", JSON(JSON("a"..."c")) | |
+ s = MyJsonStruct.new 4711, 'foot' | |
+ assert_equal s, JSON(JSON(s)) | |
+ struct = Struct.new :foo, :bar | |
+ s = struct.new 4711, 'foot' | |
+ assert_raises(JSONError) { JSON(s) } | |
+ begin | |
+ raise TypeError, "test me" | |
+ rescue TypeError => e | |
+ e_json = JSON.generate e | |
+ e_again = JSON e_json | |
+ assert_kind_of TypeError, e_again | |
+ assert_equal e.message, e_again.message | |
+ assert_equal e.backtrace, e_again.backtrace | |
+ end | |
+ assert_equal(/foo/, JSON(JSON(/foo/))) | |
+ assert_equal(/foo/i, JSON(JSON(/foo/i))) | |
+ end | |
+ | |
+ def test_utc_datetime | |
+ now = Time.now | |
+ d = DateTime.parse(now.to_s) # usual case | |
+ assert d, JSON.parse(d.to_json) | |
+ d = DateTime.parse(now.utc.to_s) # of = 0 | |
+ assert d, JSON.parse(d.to_json) | |
+ d = DateTime.civil(2008, 6, 17, 11, 48, 32, 1) # of = 1 / 12 => 1/12 | |
+ assert d, JSON.parse(d.to_json) | |
+ d = DateTime.civil(2008, 6, 17, 11, 48, 32, 12) # of = 12 / 12 => 12 | |
+ assert d, JSON.parse(d.to_json) | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_fixtures.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_fixtures.rb | |
new file mode 100755 | |
index 0000000..665dcbd | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_fixtures.rb | |
@@ -0,0 +1,30 @@ | |
+#!/usr/bin/env ruby | |
+ | |
+require 'test/unit' | |
+require 'json' | |
+ | |
+class TC_JSONFixtures < Test::Unit::TestCase | |
+ def setup | |
+ $KCODE = 'UTF8' | |
+ fixtures = File.join(File.dirname(__FILE__), 'fixtures/*.json') | |
+ passed, failed = Dir[fixtures].partition { |f| f['pass'] } | |
+ @passed = passed.inject([]) { |a, f| a << [ f, File.read(f) ] }.sort | |
+ @failed = failed.inject([]) { |a, f| a << [ f, File.read(f) ] }.sort | |
+ end | |
+ | |
+ def test_passing | |
+ for name, source in @passed | |
+ assert JSON.parse(source), | |
+ "Did not pass for fixture '#{name}'" | |
+ end | |
+ end | |
+ | |
+ def test_failing | |
+ for name, source in @failed | |
+ assert_raises(JSON::ParserError, JSON::NestingError, | |
+ "Did not fail for fixture '#{name}'") do | |
+ JSON.parse(source) | |
+ end | |
+ end | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_generate.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_generate.rb | |
new file mode 100755 | |
index 0000000..f09e9d5 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_generate.rb | |
@@ -0,0 +1,100 @@ | |
+require 'test/unit' | |
+require 'json' | |
+ | |
+class TC_JSONGenerate < Test::Unit::TestCase | |
+ include JSON | |
+ | |
+ def setup | |
+ $KCODE = 'UTF8' | |
+ @hash = { | |
+ 'a' => 2, | |
+ 'b' => 3.141, | |
+ 'c' => 'c', | |
+ 'd' => [ 1, "b", 3.14 ], | |
+ 'e' => { 'foo' => 'bar' }, | |
+ 'g' => "\"\0\037", | |
+ 'h' => 1000.0, | |
+ 'i' => 0.001 | |
+ } | |
+ @json2 = '{"a":2,"b":3.141,"c":"c","d":[1,"b",3.14],"e":{"foo":"bar"},' + | |
+ '"g":"\\"\\u0000\\u001f","h":1000.0,"i":0.001}' | |
+ @json3 = <<'EOT'.chomp | |
+{ | |
+ "a": 2, | |
+ "b": 3.141, | |
+ "c": "c", | |
+ "d": [ | |
+ 1, | |
+ "b", | |
+ 3.14 | |
+ ], | |
+ "e": { | |
+ "foo": "bar" | |
+ }, | |
+ "g": "\"\u0000\u001f", | |
+ "h": 1000.0, | |
+ "i": 0.001 | |
+} | |
+EOT | |
+ end | |
+ | |
+ def test_unparse | |
+ json = unparse(@hash) | |
+ assert_equal(JSON.parse(@json2), JSON.parse(json)) | |
+ parsed_json = parse(json) | |
+ assert_equal(@hash, parsed_json) | |
+ json = generate({1=>2}) | |
+ assert_equal('{"1":2}', json) | |
+ parsed_json = parse(json) | |
+ assert_equal({"1"=>2}, parsed_json) | |
+ end | |
+ | |
+ def test_unparse_pretty | |
+ json = pretty_unparse(@hash) | |
+ assert_equal(JSON.parse(@json3), JSON.parse(json)) | |
+ parsed_json = parse(json) | |
+ assert_equal(@hash, parsed_json) | |
+ json = pretty_generate({1=>2}) | |
+ assert_equal(<<'EOT'.chomp, json) | |
+{ | |
+ "1": 2 | |
+} | |
+EOT | |
+ parsed_json = parse(json) | |
+ assert_equal({"1"=>2}, parsed_json) | |
+ end | |
+ | |
+ def test_states | |
+ json = generate({1=>2}, nil) | |
+ assert_equal('{"1":2}', json) | |
+ s = JSON.state.new(:check_circular => true) | |
+ #assert s.check_circular | |
+ h = { 1=>2 } | |
+ h[3] = h | |
+ assert_raises(JSON::CircularDatastructure) { generate(h) } | |
+ assert_raises(JSON::CircularDatastructure) { generate(h, s) } | |
+ s = JSON.state.new(:check_circular => true) | |
+ #assert s.check_circular | |
+ a = [ 1, 2 ] | |
+ a << a | |
+ assert_raises(JSON::CircularDatastructure) { generate(a, s) } | |
+ end | |
+ | |
+ def test_allow_nan | |
+ assert_raises(GeneratorError) { generate([JSON::NaN]) } | |
+ assert_equal '[NaN]', generate([JSON::NaN], :allow_nan => true) | |
+ assert_equal '[NaN]', fast_generate([JSON::NaN]) | |
+ assert_raises(GeneratorError) { pretty_generate([JSON::NaN]) } | |
+ assert_equal "[\n NaN\n]", pretty_generate([JSON::NaN], :allow_nan => true) | |
+ assert_raises(GeneratorError) { generate([JSON::Infinity]) } | |
+ assert_equal '[Infinity]', generate([JSON::Infinity], :allow_nan => true) | |
+ assert_equal '[Infinity]', fast_generate([JSON::Infinity]) | |
+ assert_raises(GeneratorError) { pretty_generate([JSON::Infinity]) } | |
+ assert_equal "[\n Infinity\n]", pretty_generate([JSON::Infinity], :allow_nan => true) | |
+ assert_raises(GeneratorError) { generate([JSON::MinusInfinity]) } | |
+ assert_equal '[-Infinity]', generate([JSON::MinusInfinity], :allow_nan => true) | |
+ assert_equal '[-Infinity]', fast_generate([JSON::MinusInfinity]) | |
+ assert_raises(GeneratorError) { pretty_generate([JSON::MinusInfinity]) } | |
+ assert_equal "[\n -Infinity\n]", pretty_generate([JSON::MinusInfinity], :allow_nan => true) | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_rails.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_rails.rb | |
new file mode 100755 | |
index 0000000..c004860 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_rails.rb | |
@@ -0,0 +1,118 @@ | |
+#!/usr/bin/env ruby | |
+ | |
+require 'test/unit' | |
+require 'json/add/rails' | |
+require 'date' | |
+ | |
+class TC_JSONRails < Test::Unit::TestCase | |
+ include JSON | |
+ | |
+ class A | |
+ def initialize(a) | |
+ @a = a | |
+ end | |
+ | |
+ attr_reader :a | |
+ | |
+ def ==(other) | |
+ a == other.a | |
+ end | |
+ | |
+ def self.json_create(object) | |
+ new(*object['args']) | |
+ end | |
+ | |
+ def to_json(*args) | |
+ { | |
+ 'json_class' => self.class.name, | |
+ 'args' => [ @a ], | |
+ }.to_json(*args) | |
+ end | |
+ end | |
+ | |
+ class B | |
+ def self.json_creatable? | |
+ false | |
+ end | |
+ | |
+ def to_json(*args) | |
+ { | |
+ 'json_class' => self.class.name, | |
+ }.to_json(*args) | |
+ end | |
+ end | |
+ | |
+ class C | |
+ def to_json(*args) | |
+ { | |
+ 'json_class' => 'TC_JSONRails::Nix', | |
+ }.to_json(*args) | |
+ end | |
+ end | |
+ | |
+ def setup | |
+ $KCODE = 'UTF8' | |
+ end | |
+ | |
+ def test_extended_json | |
+ a = A.new(666) | |
+ assert A.json_creatable? | |
+ json = generate(a) | |
+ a_again = JSON.parse(json) | |
+ assert_kind_of a.class, a_again | |
+ assert_equal a, a_again | |
+ end | |
+ | |
+ def test_extended_json_disabled | |
+ a = A.new(666) | |
+ assert A.json_creatable? | |
+ json = generate(a) | |
+ a_again = JSON.parse(json, :create_additions => true) | |
+ assert_kind_of a.class, a_again | |
+ assert_equal a, a_again | |
+ a_hash = JSON.parse(json, :create_additions => false) | |
+ assert_kind_of Hash, a_hash | |
+ assert_equal( | |
+ {"args"=>[666], "json_class"=>"TC_JSONRails::A"}.sort_by { |k,| k }, | |
+ a_hash.sort_by { |k,| k } | |
+ ) | |
+ end | |
+ | |
+ def test_extended_json_fail1 | |
+ b = B.new | |
+ assert !B.json_creatable? | |
+ json = generate(b) | |
+ assert_equal({ 'json_class' => B.name }, JSON.parse(json)) | |
+ end | |
+ | |
+ def test_extended_json_fail2 | |
+ c = C.new # with rails addition all objects are theoretically creatable | |
+ assert C.json_creatable? | |
+ json = generate(c) | |
+ assert_raises(ArgumentError) { JSON.parse(json) } | |
+ end | |
+ | |
+ def test_raw_strings | |
+ raw = '' | |
+ raw_array = [] | |
+ for i in 0..255 | |
+ raw << i | |
+ raw_array << i | |
+ end | |
+ json = raw.to_json_raw | |
+ json_raw_object = raw.to_json_raw_object | |
+ hash = { 'json_class' => 'String', 'raw'=> raw_array } | |
+ assert_equal hash, json_raw_object | |
+ json_raw = <<EOT.chomp | |
+{\"json_class\":\"String\",\"raw\":[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255]} | |
+EOT | |
+# " | |
+ assert_equal json_raw, json | |
+ raw_again = JSON.parse(json) | |
+ assert_equal raw, raw_again | |
+ end | |
+ | |
+ def test_symbol | |
+ assert_equal '"foo"', JSON(:foo) # we don't want an object here | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_unicode.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_unicode.rb | |
new file mode 100755 | |
index 0000000..a91f4b5 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tests/test_json_unicode.rb | |
@@ -0,0 +1,61 @@ | |
+#!/usr/bin/env ruby | |
+ | |
+require 'test/unit' | |
+require 'json' | |
+ | |
+class TC_JSONUnicode < Test::Unit::TestCase | |
+ include JSON | |
+ | |
+ def setup | |
+ $KCODE = 'UTF8' | |
+ end | |
+ | |
+ def test_unicode | |
+ assert_equal '""', ''.to_json | |
+ assert_equal '"\\b"', "\b".to_json | |
+ assert_equal '"\u0001"', 0x1.chr.to_json | |
+ assert_equal '"\u001f"', 0x1f.chr.to_json | |
+ assert_equal '" "', ' '.to_json | |
+ assert_equal "\"#{0x7f.chr}\"", 0x7f.chr.to_json | |
+ utf8 = [ "© ≠ €! \01" ] | |
+ json = '["\u00a9 \u2260 \u20ac! \u0001"]' | |
+ assert_equal json, utf8.to_json | |
+ assert_equal utf8, parse(json) | |
+ utf8 = ["\343\201\202\343\201\204\343\201\206\343\201\210\343\201\212"] | |
+ json = "[\"\\u3042\\u3044\\u3046\\u3048\\u304a\"]" | |
+ assert_equal json, utf8.to_json | |
+ assert_equal utf8, parse(json) | |
+ utf8 = ['საქართველო'] | |
+ json = "[\"\\u10e1\\u10d0\\u10e5\\u10d0\\u10e0\\u10d7\\u10d5\\u10d4\\u10da\\u10dd\"]" | |
+ assert_equal json, utf8.to_json | |
+ assert_equal utf8, parse(json) | |
+ assert_equal '["\\u00c3"]', JSON.generate(["Ã"]) | |
+ assert_equal ["€"], JSON.parse('["\u20ac"]') | |
+ utf8 = ["\xf0\xa0\x80\x81"] | |
+ json = '["\ud840\udc01"]' | |
+ assert_equal json, JSON.generate(utf8) | |
+ assert_equal utf8, JSON.parse(json) | |
+ end | |
+ | |
+ def test_chars | |
+ (0..0x7f).each do |i| | |
+ json = '["\u%04x"]' % i | |
+ if RUBY_VERSION >= "1.9." | |
+ i = i.chr | |
+ end | |
+ assert_equal i, JSON.parse(json).first[0] | |
+ if i == ?\b | |
+ generated = JSON.generate(["" << i]) | |
+ assert '["\b"]' == generated || '["\10"]' == generated | |
+ elsif [?\n, ?\r, ?\t, ?\f].include?(i) | |
+ assert_equal '[' << ('' << i).dump << ']', JSON.generate(["" << i]) | |
+ elsif i.chr < 0x20.chr | |
+ assert_equal json, JSON.generate(["" << i]) | |
+ end | |
+ end | |
+ assert_raises(JSON::GeneratorError) do | |
+ JSON.generate(["" << 0x80]) | |
+ end | |
+ assert_equal "\302\200", JSON.parse('["\u0080"]').first | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tools/fuzz.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tools/fuzz.rb | |
new file mode 100755 | |
index 0000000..8735963 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tools/fuzz.rb | |
@@ -0,0 +1,140 @@ | |
+$KCODE='UTF8' | |
+require 'json' | |
+ | |
+require 'iconv' | |
+ISO_8859_1_TO_UTF8 = Iconv.new('utf-8', 'iso-8859-15') | |
+class ::String | |
+ def to_utf8 | |
+ ISO_8859_1_TO_UTF8.iconv self | |
+ end | |
+end | |
+ | |
+class Fuzzer | |
+ def initialize(n, freqs = {}) | |
+ sum = freqs.inject(0.0) { |s, x| s + x.last } | |
+ freqs.each_key { |x| freqs[x] /= sum } | |
+ s = 0.0 | |
+ freqs.each_key do |x| | |
+ freqs[x] = s .. (s + t = freqs[x]) | |
+ s += t | |
+ end | |
+ @freqs = freqs | |
+ @n = n | |
+ @alpha = (0..0xff).to_a | |
+ end | |
+ | |
+ def random_string | |
+ s = '' | |
+ 30.times { s << @alpha[rand(@alpha.size)] } | |
+ s.to_utf8 | |
+ end | |
+ | |
+ def pick | |
+ r = rand | |
+ found = @freqs.find { |k, f| f.include? rand } | |
+ found && found.first | |
+ end | |
+ | |
+ def make_pick | |
+ k = pick | |
+ case | |
+ when k == Hash, k == Array | |
+ k.new | |
+ when k == true, k == false, k == nil | |
+ k | |
+ when k == String | |
+ random_string | |
+ when k == Fixnum | |
+ rand(2 ** 30) - 2 ** 29 | |
+ when k == Bignum | |
+ rand(2 ** 70) - 2 ** 69 | |
+ end | |
+ end | |
+ | |
+ def fuzz(current = nil) | |
+ if @n > 0 | |
+ case current | |
+ when nil | |
+ @n -= 1 | |
+ current = fuzz [ Hash, Array ][rand(2)].new | |
+ when Array | |
+ while @n > 0 | |
+ @n -= 1 | |
+ current << case p = make_pick | |
+ when Array, Hash | |
+ fuzz(p) | |
+ else | |
+ p | |
+ end | |
+ end | |
+ when Hash | |
+ while @n > 0 | |
+ @n -= 1 | |
+ current[random_string] = case p = make_pick | |
+ when Array, Hash | |
+ fuzz(p) | |
+ else | |
+ p | |
+ end | |
+ end | |
+ end | |
+ end | |
+ current | |
+ end | |
+end | |
+ | |
+class MyState < JSON.state | |
+ WS = " \r\t\n" | |
+ | |
+ def initialize | |
+ super( | |
+ :indent => make_spaces, | |
+ :space => make_spaces, | |
+ :space_before => make_spaces, | |
+ :object_nl => make_spaces, | |
+ :array_nl => make_spaces, | |
+ :max_nesting => false | |
+ ) | |
+ end | |
+ | |
+ def make_spaces | |
+ s = '' | |
+ rand(1).times { s << WS[rand(WS.size)] } | |
+ s | |
+ end | |
+end | |
+ | |
+n = (ARGV.shift || 500).to_i | |
+loop do | |
+ fuzzer = Fuzzer.new(n, | |
+ Hash => 25, | |
+ Array => 25, | |
+ String => 10, | |
+ Fixnum => 10, | |
+ Bignum => 10, | |
+ nil => 5, | |
+ true => 5, | |
+ false => 5 | |
+ ) | |
+ o1 = fuzzer.fuzz | |
+ json = JSON.generate o1, MyState.new | |
+ if $DEBUG | |
+ puts "-" * 80 | |
+ puts json, json.size | |
+ else | |
+ puts json.size | |
+ end | |
+ begin | |
+ o2 = JSON.parse(json, :max_nesting => false) | |
+ rescue JSON::ParserError => e | |
+ puts "Caught #{e.class}: #{e.message}\n#{e.backtrace * "\n"}" | |
+ puts "o1 = #{o1.inspect}", "json = #{json}", "json_str = #{json.inspect}" | |
+ puts "locals = #{local_variables.inspect}" | |
+ exit | |
+ end | |
+ if o1 != o2 | |
+ puts "mismatch", "o1 = #{o1.inspect}", "o2 = #{o2.inspect}", | |
+ "json = #{json}", "json_str = #{json.inspect}" | |
+ puts "locals = #{local_variables.inspect}" | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tools/server.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tools/server.rb | |
new file mode 100755 | |
index 0000000..9508311 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/json_pure-1.1.3/tools/server.rb | |
@@ -0,0 +1,62 @@ | |
+#!/usr/bin/env ruby | |
+ | |
+$KCODE='UTF8' | |
+require 'webrick' | |
+include WEBrick | |
+$:.unshift 'ext' | |
+$:.unshift 'lib' | |
+require 'json' | |
+ | |
+class JSONServlet < HTTPServlet::AbstractServlet | |
+ @@count = 1 | |
+ | |
+ def do_GET(req, res) | |
+ obj = { | |
+ "TIME" => Time.now.strftime("%FT%T"), | |
+ "foo" => "Bär", | |
+ "bar" => "© ≠ €!", | |
+ 'a' => 2, | |
+ 'b' => 3.141, | |
+ 'COUNT' => @@count += 1, | |
+ 'c' => 'c', | |
+ 'd' => [ 1, "b", 3.14 ], | |
+ 'e' => { 'foo' => 'bar' }, | |
+ 'g' => "松本行弘", | |
+ 'h' => 1000.0, | |
+ 'i' => 0.001, | |
+ 'j' => "\xf0\xa0\x80\x81", | |
+ } | |
+ res.body = JSON.generate obj | |
+ res['Content-Type'] = "application/json" | |
+ end | |
+end | |
+ | |
+def create_server(err, dir, port) | |
+ dir = File.expand_path(dir) | |
+ err.puts "Surf to:", "http://#{Socket.gethostname}:#{port}" | |
+ | |
+ s = HTTPServer.new( | |
+ :Port => port, | |
+ :DocumentRoot => dir, | |
+ :Logger => WEBrick::Log.new(err), | |
+ :AccessLog => [ | |
+ [ err, WEBrick::AccessLog::COMMON_LOG_FORMAT ], | |
+ [ err, WEBrick::AccessLog::REFERER_LOG_FORMAT ], | |
+ [ err, WEBrick::AccessLog::AGENT_LOG_FORMAT ] | |
+ ] | |
+ ) | |
+ s.mount("/json", JSONServlet) | |
+ s | |
+end | |
+ | |
+default_dir = File.expand_path(File.join(File.dirname(__FILE__), '..', 'data')) | |
+dir = ARGV.shift || default_dir | |
+port = (ARGV.shift || 6666).to_i | |
+s = create_server(STDERR, dir, 6666) | |
+t = Thread.new { s.start } | |
+trap(:INT) do | |
+ s.shutdown | |
+ t.join | |
+ exit | |
+end | |
+sleep | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/simple_gem-0.0.1/Rakefile b/merb-core/spec/public/core_ext/fixtures/gems/gems/simple_gem-0.0.1/Rakefile | |
new file mode 100644 | |
index 0000000..c69ddcc | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/simple_gem-0.0.1/Rakefile | |
@@ -0,0 +1,42 @@ | |
+require 'rubygems' | |
+require 'rake/gempackagetask' | |
+require 'rubygems/specification' | |
+require 'date' | |
+ | |
+GEM = "simple_gem" | |
+GEM_VERSION = "0.0.1" | |
+AUTHOR = "Your Name" | |
+EMAIL = "Your Email" | |
+HOMEPAGE = "http://example.com" | |
+SUMMARY = "A gem that provides..." | |
+ | |
+spec = Gem::Specification.new do |s| | |
+ s.name = GEM | |
+ s.version = GEM_VERSION | |
+ s.platform = Gem::Platform::RUBY | |
+ s.has_rdoc = true | |
+ s.summary = SUMMARY | |
+ s.description = s.summary | |
+ s.author = AUTHOR | |
+ s.email = EMAIL | |
+ s.homepage = HOMEPAGE | |
+ | |
+ s.require_path = 'lib' | |
+ s.files = %w(Rakefile) + Dir.glob("{lib}/**/*") | |
+end | |
+ | |
+Rake::GemPackageTask.new(spec) do |pkg| | |
+ pkg.gem_spec = spec | |
+end | |
+ | |
+desc "install the gem locally" | |
+task :install => [:package] do | |
+ sh %{sudo #{Gem.ruby} -S gem install pkg/#{GEM}-#{GEM_VERSION}} | |
+end | |
+ | |
+desc "create a gemspec file" | |
+task :make_spec do | |
+ File.open("#{GEM}.gemspec", "w") do |file| | |
+ file.puts spec.to_ruby | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/simple_gem-0.0.1/lib/simple_gem.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/simple_gem-0.0.1/lib/simple_gem.rb | |
new file mode 100644 | |
index 0000000..f0c4e44 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/simple_gem-0.0.1/lib/simple_gem.rb | |
@@ -0,0 +1,6 @@ | |
+module Merb | |
+ module SpecFixture | |
+ class SimpleGem | |
+ end | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/simple_gem-0.0.2/Rakefile b/merb-core/spec/public/core_ext/fixtures/gems/gems/simple_gem-0.0.2/Rakefile | |
new file mode 100644 | |
index 0000000..fa1e180 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/simple_gem-0.0.2/Rakefile | |
@@ -0,0 +1,42 @@ | |
+require 'rubygems' | |
+require 'rake/gempackagetask' | |
+require 'rubygems/specification' | |
+require 'date' | |
+ | |
+GEM = "simple_gem" | |
+GEM_VERSION = "0.0.2" | |
+AUTHOR = "Your Name" | |
+EMAIL = "Your Email" | |
+HOMEPAGE = "http://example.com" | |
+SUMMARY = "A gem that provides..." | |
+ | |
+spec = Gem::Specification.new do |s| | |
+ s.name = GEM | |
+ s.version = GEM_VERSION | |
+ s.platform = Gem::Platform::RUBY | |
+ s.has_rdoc = true | |
+ s.summary = SUMMARY | |
+ s.description = s.summary | |
+ s.author = AUTHOR | |
+ s.email = EMAIL | |
+ s.homepage = HOMEPAGE | |
+ | |
+ s.require_path = 'lib' | |
+ s.files = %w(Rakefile) + Dir.glob("{lib}/**/*") | |
+end | |
+ | |
+Rake::GemPackageTask.new(spec) do |pkg| | |
+ pkg.gem_spec = spec | |
+end | |
+ | |
+desc "install the gem locally" | |
+task :install => [:package] do | |
+ sh %{sudo #{Gem.ruby} -S gem install pkg/#{GEM}-#{GEM_VERSION}} | |
+end | |
+ | |
+desc "create a gemspec file" | |
+task :make_spec do | |
+ File.open("#{GEM}.gemspec", "w") do |file| | |
+ file.puts spec.to_ruby | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/gems/simple_gem-0.0.2/lib/simple_gem.rb b/merb-core/spec/public/core_ext/fixtures/gems/gems/simple_gem-0.0.2/lib/simple_gem.rb | |
new file mode 100644 | |
index 0000000..fb720cf | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/gems/simple_gem-0.0.2/lib/simple_gem.rb | |
@@ -0,0 +1,6 @@ | |
+module Merb | |
+ module SpecFixture | |
+ class SimpleGem2 | |
+ end | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/specifications/bad_require_gem-0.0.1.gemspec b/merb-core/spec/public/core_ext/fixtures/gems/specifications/bad_require_gem-0.0.1.gemspec | |
new file mode 100644 | |
index 0000000..59dfa1b | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/specifications/bad_require_gem-0.0.1.gemspec | |
@@ -0,0 +1,28 @@ | |
+# -*- encoding: utf-8 -*- | |
+ | |
+Gem::Specification.new do |s| | |
+ s.name = %q{bad_require_gem} | |
+ s.version = "0.0.1" | |
+ | |
+ s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= | |
+ s.authors = ["Your Name"] | |
+ s.date = %q{2008-10-27} | |
+ s.description = %q{A gem that provides...} | |
+ s.email = %q{Your Email} | |
+ s.files = ["Rakefile", "lib/BadRequireGem.rb"] | |
+ s.has_rdoc = true | |
+ s.homepage = %q{http://example.com} | |
+ s.require_paths = ["lib"] | |
+ s.rubygems_version = %q{1.3.0} | |
+ s.summary = %q{A gem that provides...} | |
+ | |
+ if s.respond_to? :specification_version then | |
+ current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION | |
+ s.specification_version = 2 | |
+ | |
+ if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then | |
+ else | |
+ end | |
+ else | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/specifications/json_pure-1.1.3.gemspec b/merb-core/spec/public/core_ext/fixtures/gems/specifications/json_pure-1.1.3.gemspec | |
new file mode 100644 | |
index 0000000..9b39f4e | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/specifications/json_pure-1.1.3.gemspec | |
@@ -0,0 +1,33 @@ | |
+# -*- encoding: utf-8 -*- | |
+ | |
+Gem::Specification.new do |s| | |
+ s.name = %q{json_pure} | |
+ s.version = "1.1.3" | |
+ | |
+ s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= | |
+ s.authors = ["Florian Frank"] | |
+ s.date = %q{2008-07-10} | |
+ s.default_executable = %q{edit_json.rb} | |
+ s.description = %q{} | |
+ s.email = %q{[email protected]} | |
+ s.executables = ["edit_json.rb"] | |
+ s.files = ["install.rb", "lib", "lib/json.rb", "lib/json", "lib/json/Array.xpm", "lib/json/FalseClass.xpm", "lib/json/json.xpm", "lib/json/editor.rb", "lib/json/Hash.xpm", "lib/json/Key.xpm", "lib/json/common.rb", "lib/json/String.xpm", "lib/json/pure", "lib/json/pure/generator.rb", "lib/json/pure/parser.rb", "lib/json/Numeric.xpm", "lib/json/ext.rb", "lib/json/pure.rb", "lib/json/NilClass.xpm", "lib/json/add", "lib/json/add/rails.rb", "lib/json/add/core.rb", "lib/json/TrueClass.xpm", "lib/json/version.rb", "ext", "ext/json", "ext/json/ext", "ext/json/ext/parser", "ext/json/ext/parser/unicode.h", "ext/json/ext/parser/parser.c", "ext/json/ext/parser/extconf.rb", "ext/json/ext/parser/unicode.c", "ext/json/ext/parser/parser.rl", "ext/json/ext/generator", "ext/json/ext/generator/unicode.h", "ext/json/ext/generator/extconf.rb", "ext/json/ext/generator/generator.c", "ext/json/ext/generator/unicode.c", "README", "diagrams", "CHANGES", "RUBY", "TODO", "VERSION", "tests", "tests/test_json.rb", "tests/test_json_addition.rb", "tests/fixtures", "tests/fixtures/fail11.json", "tests/fixtures/fail5.json", "tests/fixtures/fail10.json", "tests/fixtures/fail3.json", "tests/fixtures/pass15.json", "tests/fixtures/fail9.json", "tests/fixtures/fail22.json", "tests/fixtures/fail6.json", "tests/fixtures/pass2.json", "tests/fixtures/fail20.json", "tests/fixtures/fail19.json", "tests/fixtures/fail12.json", "tests/fixtures/fail7.json", "tests/fixtures/fail4.json", "tests/fixtures/fail1.json", "tests/fixtures/fail24.json", "tests/fixtures/fail21.json", "tests/fixtures/pass1.json", "tests/fixtures/fail2.json", "tests/fixtures/fail25.json", "tests/fixtures/pass16.json", "tests/fixtures/pass3.json", "tests/fixtures/fail18.json", "tests/fixtures/fail28.json", "tests/fixtures/fail13.json", "tests/fixtures/fail27.json", "tests/fixtures/pass17.json", "tests/fixtures/pass26.json", "tests/fixtures/fail23.json", "tests/fixtures/fail14.json", "tests/fixtures/fail8.json", "tests/runner.rb", "tests/test_json_generate.rb", "tests/test_json_rails.rb", "tests/test_json_unicode.rb", "tests/test_json_fixtures.rb", "benchmarks", "benchmarks/benchmark_parser.rb", "benchmarks/benchmark_generator.rb", "benchmarks/benchmark_rails.rb", "benchmarks/benchmark.txt", "Rakefile", "GPL", "data", "data/example.json", "data/index.html", "data/prototype.js", "bin", "bin/edit_json.rb", "bin/prettify_json.rb", "tools", "tools/fuzz.rb", "tools/server.rb"] | |
+ s.has_rdoc = true | |
+ s.homepage = %q{http://json.rubyforge.org} | |
+ s.rdoc_options = ["--title", "JSON -- A JSON implemention", "--main", "JSON", "--line-numbers"] | |
+ s.require_paths = ["lib"] | |
+ s.rubyforge_project = %q{json} | |
+ s.rubygems_version = %q{1.3.0} | |
+ s.summary = %q{A JSON implementation in Ruby} | |
+ s.test_files = ["tests/runner.rb"] | |
+ | |
+ if s.respond_to? :specification_version then | |
+ current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION | |
+ s.specification_version = 2 | |
+ | |
+ if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then | |
+ else | |
+ end | |
+ else | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/specifications/simple_gem-0.0.1.gemspec b/merb-core/spec/public/core_ext/fixtures/gems/specifications/simple_gem-0.0.1.gemspec | |
new file mode 100644 | |
index 0000000..4e8afc3 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/specifications/simple_gem-0.0.1.gemspec | |
@@ -0,0 +1,28 @@ | |
+# -*- encoding: utf-8 -*- | |
+ | |
+Gem::Specification.new do |s| | |
+ s.name = %q{simple_gem} | |
+ s.version = "0.0.1" | |
+ | |
+ s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= | |
+ s.authors = ["Your Name"] | |
+ s.date = %q{2008-10-27} | |
+ s.description = %q{A gem that provides...} | |
+ s.email = %q{Your Email} | |
+ s.files = ["Rakefile", "lib/simple_gem.rb"] | |
+ s.has_rdoc = true | |
+ s.homepage = %q{http://example.com} | |
+ s.require_paths = ["lib"] | |
+ s.rubygems_version = %q{1.3.0} | |
+ s.summary = %q{A gem that provides...} | |
+ | |
+ if s.respond_to? :specification_version then | |
+ current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION | |
+ s.specification_version = 2 | |
+ | |
+ if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then | |
+ else | |
+ end | |
+ else | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/gems/specifications/simple_gem-0.0.2.gemspec b/merb-core/spec/public/core_ext/fixtures/gems/specifications/simple_gem-0.0.2.gemspec | |
new file mode 100644 | |
index 0000000..a667aab | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/gems/specifications/simple_gem-0.0.2.gemspec | |
@@ -0,0 +1,28 @@ | |
+# -*- encoding: utf-8 -*- | |
+ | |
+Gem::Specification.new do |s| | |
+ s.name = %q{simple_gem} | |
+ s.version = "0.0.2" | |
+ | |
+ s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version= | |
+ s.authors = ["Your Name"] | |
+ s.date = %q{2008-10-27} | |
+ s.description = %q{A gem that provides...} | |
+ s.email = %q{Your Email} | |
+ s.files = ["Rakefile", "lib/simple_gem.rb"] | |
+ s.has_rdoc = true | |
+ s.homepage = %q{http://example.com} | |
+ s.require_paths = ["lib"] | |
+ s.rubygems_version = %q{1.3.0} | |
+ s.summary = %q{A gem that provides...} | |
+ | |
+ if s.respond_to? :specification_version then | |
+ current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION | |
+ s.specification_version = 2 | |
+ | |
+ if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then | |
+ else | |
+ end | |
+ else | |
+ end | |
+end | |
diff --git a/merb-core/spec/public/core_ext/fixtures/simple_gem/Rakefile b/merb-core/spec/public/core_ext/fixtures/simple_gem/Rakefile | |
new file mode 100644 | |
index 0000000..c69ddcc | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/simple_gem/Rakefile | |
@@ -0,0 +1,42 @@ | |
+require 'rubygems' | |
+require 'rake/gempackagetask' | |
+require 'rubygems/specification' | |
+require 'date' | |
+ | |
+GEM = "simple_gem" | |
+GEM_VERSION = "0.0.1" | |
+AUTHOR = "Your Name" | |
+EMAIL = "Your Email" | |
+HOMEPAGE = "http://example.com" | |
+SUMMARY = "A gem that provides..." | |
+ | |
+spec = Gem::Specification.new do |s| | |
+ s.name = GEM | |
+ s.version = GEM_VERSION | |
+ s.platform = Gem::Platform::RUBY | |
+ s.has_rdoc = true | |
+ s.summary = SUMMARY | |
+ s.description = s.summary | |
+ s.author = AUTHOR | |
+ s.email = EMAIL | |
+ s.homepage = HOMEPAGE | |
+ | |
+ s.require_path = 'lib' | |
+ s.files = %w(Rakefile) + Dir.glob("{lib}/**/*") | |
+end | |
+ | |
+Rake::GemPackageTask.new(spec) do |pkg| | |
+ pkg.gem_spec = spec | |
+end | |
+ | |
+desc "install the gem locally" | |
+task :install => [:package] do | |
+ sh %{sudo #{Gem.ruby} -S gem install pkg/#{GEM}-#{GEM_VERSION}} | |
+end | |
+ | |
+desc "create a gemspec file" | |
+task :make_spec do | |
+ File.open("#{GEM}.gemspec", "w") do |file| | |
+ file.puts spec.to_ruby | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/simple_gem/lib/simple_gem.rb b/merb-core/spec/public/core_ext/fixtures/simple_gem/lib/simple_gem.rb | |
new file mode 100644 | |
index 0000000..f0c4e44 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/simple_gem/lib/simple_gem.rb | |
@@ -0,0 +1,6 @@ | |
+module Merb | |
+ module SpecFixture | |
+ class SimpleGem | |
+ end | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/simple_gem_2/Rakefile b/merb-core/spec/public/core_ext/fixtures/simple_gem_2/Rakefile | |
new file mode 100644 | |
index 0000000..fa1e180 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/simple_gem_2/Rakefile | |
@@ -0,0 +1,42 @@ | |
+require 'rubygems' | |
+require 'rake/gempackagetask' | |
+require 'rubygems/specification' | |
+require 'date' | |
+ | |
+GEM = "simple_gem" | |
+GEM_VERSION = "0.0.2" | |
+AUTHOR = "Your Name" | |
+EMAIL = "Your Email" | |
+HOMEPAGE = "http://example.com" | |
+SUMMARY = "A gem that provides..." | |
+ | |
+spec = Gem::Specification.new do |s| | |
+ s.name = GEM | |
+ s.version = GEM_VERSION | |
+ s.platform = Gem::Platform::RUBY | |
+ s.has_rdoc = true | |
+ s.summary = SUMMARY | |
+ s.description = s.summary | |
+ s.author = AUTHOR | |
+ s.email = EMAIL | |
+ s.homepage = HOMEPAGE | |
+ | |
+ s.require_path = 'lib' | |
+ s.files = %w(Rakefile) + Dir.glob("{lib}/**/*") | |
+end | |
+ | |
+Rake::GemPackageTask.new(spec) do |pkg| | |
+ pkg.gem_spec = spec | |
+end | |
+ | |
+desc "install the gem locally" | |
+task :install => [:package] do | |
+ sh %{sudo #{Gem.ruby} -S gem install pkg/#{GEM}-#{GEM_VERSION}} | |
+end | |
+ | |
+desc "create a gemspec file" | |
+task :make_spec do | |
+ File.open("#{GEM}.gemspec", "w") do |file| | |
+ file.puts spec.to_ruby | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/fixtures/simple_gem_2/lib/simple_gem.rb b/merb-core/spec/public/core_ext/fixtures/simple_gem_2/lib/simple_gem.rb | |
new file mode 100644 | |
index 0000000..fb720cf | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/fixtures/simple_gem_2/lib/simple_gem.rb | |
@@ -0,0 +1,6 @@ | |
+module Merb | |
+ module SpecFixture | |
+ class SimpleGem2 | |
+ end | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/immediate_spec.rb b/merb-core/spec/public/core_ext/immediate_spec.rb | |
new file mode 100644 | |
index 0000000..1c73c2b | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/immediate_spec.rb | |
@@ -0,0 +1,17 @@ | |
+require File.join(File.dirname(__FILE__), "spec_helper") | |
+ | |
+describe "using depdendency to require a simple gem immediately" do | |
+ before(:all) do | |
+ Gem.use_paths(File.dirname(__FILE__) / "fixtures" / "gems") | |
+ dependency "simple_gem", :immediate => true | |
+ end | |
+ | |
+ it "loads it right away" do | |
+ defined?(Merb::SpecFixture::SimpleGem2).should_not be_nil | |
+ end | |
+ | |
+ it "is still loaded once Merb starts" do | |
+ startup_merb | |
+ defined?(Merb::SpecFixture::SimpleGem2).should_not be_nil | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/kernel_spec.rb b/merb-core/spec/public/core_ext/kernel_spec.rb | |
index 7f81715..b560993 100644 | |
--- a/merb-core/spec/public/core_ext/kernel_spec.rb | |
+++ b/merb-core/spec/public/core_ext/kernel_spec.rb | |
@@ -3,71 +3,6 @@ startup_merb | |
$:.push File.join(File.dirname(__FILE__), "fixtures") | |
-describe Kernel, "#dependency" do | |
- | |
- before { reset_dependency('core_ext_dependency', :CoreExtDependency) } | |
- | |
- it "works even when the BootLoader has already finished" do | |
- dependency "core_ext_dependency" | |
- defined?(CoreExtDependency).should_not be_nil | |
- end | |
- | |
- it "takes :immediate => true to require a dependency immediately" do | |
- Merb::BootLoader::finished.delete("Merb::BootLoader::Dependencies") | |
- dependency "core_ext_dependency" | |
- defined?(CoreExtDependency).should be_nil | |
- dependency "core_ext_dependency", :immediate => true | |
- defined?(CoreExtDependency).should_not be_nil | |
- Merb::BootLoader::finished << "Merb::BootLoader::Dependencies" | |
- end | |
- | |
- it "returns a Gem::Dependency" do | |
- dep = dependency "core_ext_dependency", ">= 1.1.2" | |
- dep.name.should == "core_ext_dependency" | |
- dep.version_requirements.to_s.should == ">= 1.1.2" | |
- end | |
- | |
- it "adds a Gem::Dependency item to Merb::BootLoader::Dependencies.dependencies" do | |
- dep = dependency "core_ext_dependency", ">= 1.1.2" | |
- dep.name.should == "core_ext_dependency" | |
- dep.version_requirements.to_s.should == ">= 1.1.2" | |
- Merb::BootLoader::Dependencies.dependencies.should include(dep) | |
- end | |
- | |
- it "will replace any previously registered dependencies with the same name" do | |
- dep = dependency "core_ext_dependency", ">= 1.1.0" | |
- dep.version_requirements.to_s.should == ">= 1.1.0" | |
- dep = dependency "core_ext_dependency", ">= 1.1.2" | |
- dep.version_requirements.to_s.should == ">= 1.1.2" | |
- entries = Merb::BootLoader::Dependencies.dependencies.select { |d| d.name == dep.name } | |
- entries.first.version_requirements.to_s.should == ">= 1.1.2" | |
- entries.length.should == 1 | |
- end | |
- | |
-end | |
- | |
-describe Kernel, "#load_dependency" do | |
- | |
- before { reset_dependency('core_ext_dependency', :CoreExtDependency) } | |
- | |
- it "requires a dependency immediately" do | |
- load_dependency "core_ext_dependency" | |
- defined?(CoreExtDependency).should_not be_nil | |
- end | |
- | |
- it "returns a Gem::Dependency" do | |
- dep = load_dependency "core_ext_dependency" | |
- dep.name.should == "core_ext_dependency" | |
- ["", ">= 0"].include?(dep.version_requirements.to_s.should) | |
- end | |
- | |
- it "adds a Gem::Dependency item to Merb::BootLoader::Dependencies.dependencies" do | |
- dep = load_dependency "core_ext_dependency" | |
- Merb::BootLoader::Dependencies.dependencies.should include(dep) | |
- end | |
- | |
-end | |
- | |
describe Kernel, "#use_orm" do | |
before do | |
diff --git a/merb-core/spec/public/core_ext/multi_versions_spec.rb b/merb-core/spec/public/core_ext/multi_versions_spec.rb | |
new file mode 100644 | |
index 0000000..fc5f5c1 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/multi_versions_spec.rb | |
@@ -0,0 +1,20 @@ | |
+require File.join(File.dirname(__FILE__), "spec_helper") | |
+ | |
+describe "using dependency to update the version of a previously required dependency" do | |
+ before(:all) do | |
+ Gem.use_paths(File.dirname(__FILE__) / "fixtures" / "gems") | |
+ dependency "simple_gem", "= 0.0.1" | |
+ dependency "simple_gem", "= 0.0.2" | |
+ end | |
+ | |
+ it "doesn't load it right away" do | |
+ defined?(Merb::SpecFixture::SimpleGem).should be_nil | |
+ defined?(Merb::SpecFixture::SimpleGem2).should be_nil | |
+ end | |
+ | |
+ it "loads the second one when merb starts" do | |
+ startup_merb | |
+ defined?(Merb::SpecFixture::SimpleGem).should be_nil | |
+ defined?(Merb::SpecFixture::SimpleGem2).should_not be_nil | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/require_as_fail_spec.rb b/merb-core/spec/public/core_ext/require_as_fail_spec.rb | |
new file mode 100644 | |
index 0000000..bdf33a3 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/require_as_fail_spec.rb | |
@@ -0,0 +1,19 @@ | |
+require File.join(File.dirname(__FILE__), "spec_helper") | |
+ | |
+describe "using dependency to require a bad gem" do | |
+ before(:all) do | |
+ Gem.use_paths(File.dirname(__FILE__) / "fixtures" / "gems") | |
+ dependency "bad_require_gem" | |
+ end | |
+ | |
+ it "doesn't load it right away" do | |
+ defined?(Merb::SpecFixture::BadRequireGem).should be_nil | |
+ defined?(Merb::SpecFixture::BadRequireGem).should be_nil | |
+ end | |
+ | |
+ it "raises an error when Merb starts because it can't find the file to require" do | |
+ lambda do | |
+ startup_merb(:show_ugly_backtraces => true) | |
+ end.should raise_error(LoadError) | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/require_as_immediate_spec.rb b/merb-core/spec/public/core_ext/require_as_immediate_spec.rb | |
new file mode 100644 | |
index 0000000..ce7a381 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/require_as_immediate_spec.rb | |
@@ -0,0 +1,17 @@ | |
+require File.join(File.dirname(__FILE__), "spec_helper") | |
+ | |
+describe "using dependency to require a bad gem immediately" do | |
+ before(:all) do | |
+ Gem.use_paths(File.dirname(__FILE__) / "fixtures" / "gems") | |
+ dependency "bad_require_gem", :require_as => "BadRequireGem", :immediate => true | |
+ end | |
+ | |
+ it "loads it right away" do | |
+ defined?(Merb::SpecFixture::BadRequireGem).should_not be_nil | |
+ end | |
+ | |
+ it "loads the file once Merb is started" do | |
+ startup_merb | |
+ defined?(Merb::SpecFixture::BadRequireGem).should_not be_nil | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/require_as_version_spec.rb b/merb-core/spec/public/core_ext/require_as_version_spec.rb | |
new file mode 100644 | |
index 0000000..6c6eff6 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/require_as_version_spec.rb | |
@@ -0,0 +1,17 @@ | |
+require File.join(File.dirname(__FILE__), "spec_helper") | |
+ | |
+describe "using dependency to require a bad gem with a version" do | |
+ before(:all) do | |
+ Gem.use_paths(File.dirname(__FILE__) / "fixtures" / "gems") | |
+ dependency "bad_require_gem", "0.0.1", :require_as => "BadRequireGem" | |
+ end | |
+ | |
+ it "doesn't load it right away" do | |
+ defined?(Merb::SpecFixture::BadRequireGem).should be_nil | |
+ end | |
+ | |
+ it "loads the file once Merb is started" do | |
+ startup_merb | |
+ defined?(Merb::SpecFixture::BadRequireGem).should_not be_nil | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/require_as_working_spec.rb b/merb-core/spec/public/core_ext/require_as_working_spec.rb | |
new file mode 100644 | |
index 0000000..1acf319 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/require_as_working_spec.rb | |
@@ -0,0 +1,17 @@ | |
+require File.join(File.dirname(__FILE__), "spec_helper") | |
+ | |
+describe "using dependency to require a bad gem" do | |
+ before(:all) do | |
+ Gem.use_paths(File.dirname(__FILE__) / "fixtures" / "gems") | |
+ dependency "bad_require_gem", :require_as => "BadRequireGem" | |
+ end | |
+ | |
+ it "doesn't load it right away" do | |
+ defined?(Merb::SpecFixture::BadRequireGem).should be_nil | |
+ end | |
+ | |
+ it "loads the file once Merb is started" do | |
+ startup_merb | |
+ defined?(Merb::SpecFixture::BadRequireGem).should_not be_nil | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/simple_dependency_spec.rb b/merb-core/spec/public/core_ext/simple_dependency_spec.rb | |
new file mode 100644 | |
index 0000000..574ba44 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/simple_dependency_spec.rb | |
@@ -0,0 +1,17 @@ | |
+require File.join(File.dirname(__FILE__), "spec_helper") | |
+ | |
+describe "using dependency to require a simple gem" do | |
+ before(:all) do | |
+ Gem.use_paths(File.dirname(__FILE__) / "fixtures" / "gems") | |
+ dependency "simple_gem" | |
+ end | |
+ | |
+ it "doesn't load it right away" do | |
+ defined?(Merb::SpecFixture::SimpleGem2).should be_nil | |
+ end | |
+ | |
+ it "loads it when merb starts" do | |
+ startup_merb | |
+ defined?(Merb::SpecFixture::SimpleGem2).should_not be_nil | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-core/spec/public/core_ext/version_dependency_spec.rb b/merb-core/spec/public/core_ext/version_dependency_spec.rb | |
new file mode 100644 | |
index 0000000..7dd6b02 | |
--- /dev/null | |
+++ b/merb-core/spec/public/core_ext/version_dependency_spec.rb | |
@@ -0,0 +1,19 @@ | |
+require File.join(File.dirname(__FILE__), "spec_helper") | |
+ | |
+describe "using dependency to require a simple gem with a version" do | |
+ before(:all) do | |
+ Gem.use_paths(File.dirname(__FILE__) / "fixtures" / "gems") | |
+ dependency "simple_gem", "= 0.0.1" | |
+ end | |
+ | |
+ it "doesn't load it right away" do | |
+ defined?(Merb::SpecFixture::SimpleGem).should be_nil | |
+ defined?(Merb::SpecFixture::SimpleGem2).should be_nil | |
+ end | |
+ | |
+ it "loads it when merb starts" do | |
+ startup_merb | |
+ defined?(Merb::SpecFixture::SimpleGem).should_not be_nil | |
+ defined?(Merb::SpecFixture::SimpleGem2).should be_nil | |
+ end | |
+end | |
\ No newline at end of file | |
diff --git a/merb-gen/lib/generators/templates/application/common/dothtaccess b/merb-gen/lib/generators/templates/application/common/dothtaccess | |
index 5ef5b8b..455e706 100644 | |
--- a/merb-gen/lib/generators/templates/application/common/dothtaccess | |
+++ b/merb-gen/lib/generators/templates/application/common/dothtaccess | |
@@ -14,4 +14,4 @@ RewriteCond %{REQUEST_FILENAME} !-f | |
RewriteRule ^(.*)$ merb.fcgi [QSA,L] | |
-ErrorDocument 500 "<h2>Application Error</h2>Merb could not be reached | |
+ErrorDocument 500 "<h2>Application Error</h2>Merb could not be reached" | |
diff --git a/merb-gen/lib/generators/templates/application/common/merb.thor b/merb-gen/lib/generators/templates/application/common/merb.thor | |
index 18957d8..83767e5 100644 | |
--- a/merb-gen/lib/generators/templates/application/common/merb.thor | |
+++ b/merb-gen/lib/generators/templates/application/common/merb.thor | |
@@ -5,7 +5,7 @@ require 'fileutils' | |
require 'yaml' | |
# Important - don't change this line or its position | |
-MERB_THOR_VERSION = '0.0.53' | |
+MERB_THOR_VERSION = '0.0.54' | |
############################################################################## | |
@@ -476,8 +476,9 @@ module MerbThorHelper | |
end | |
def install_dependency(dependency, opts = {}) | |
- opts[:version] ||= dependency.version_requirements.to_s | |
- Merb::Gem.install(dependency.name, default_install_options.merge(opts)) | |
+ version = dependency.version_requirements.to_s | |
+ install_opts = default_install_options.merge(:version => version) | |
+ Merb::Gem.install(dependency.name, install_opts.merge(opts)) | |
end | |
def install_dependency_from_source(dependency, opts = {}) | |
@@ -760,8 +761,10 @@ module Merb | |
if only_missing = comp == 'missing' | |
message "Preparing to install missing gems #{where} using #{strategy} strategy..." | |
comp = nil | |
+ clobber = false | |
else | |
message "Preparing to install #{where} using #{strategy} strategy..." | |
+ clobber = true | |
end | |
# If comp given, filter on known stack components | |
@@ -777,7 +780,7 @@ module Merb | |
warning "No dependencies to install..." | |
else | |
puts "#{deps.length} dependencies to install..." | |
- install_dependencies(strategy, deps) | |
+ install_dependencies(strategy, deps, clobber) | |
end | |
# Show current dependency info now that we're done | |
@@ -902,31 +905,33 @@ module Merb | |
end | |
end | |
- def install_dependencies(strategy, deps) | |
+ def install_dependencies(strategy, deps, clobber = true) | |
if method = strategy?(strategy) | |
# Clobber existing local dependencies | |
- clobber_dependencies! | |
+ clobber_dependencies! if clobber | |
# Run the chosen strategy - collect files installed from stable gems | |
installed_from_stable = send(method, deps).map { |d| d.name } | |
- # Sleep a bit otherwise the following steps won't see the new files | |
- sleep(deps.length) if deps.length > 0 | |
- | |
- # Leave a file to denote the strategy that has been used for this dependency | |
- self.local.each do |spec| | |
- next unless File.directory?(spec.full_gem_path) | |
- unless installed_from_stable.include?(spec.name) | |
- FileUtils.touch(File.join(spec.full_gem_path, "#{strategy}.strategy")) | |
- else | |
- FileUtils.touch(File.join(spec.full_gem_path, "stable.strategy")) | |
- end | |
- end | |
+ unless dry_run? | |
+ # Sleep a bit otherwise the following steps won't see the new files | |
+ sleep(deps.length) if deps.length > 0 && deps.length <= 10 | |
+ | |
+ # Leave a file to denote the strategy that has been used for this dependency | |
+ self.local.each do |spec| | |
+ next unless File.directory?(spec.full_gem_path) | |
+ unless installed_from_stable.include?(spec.name) | |
+ FileUtils.touch(File.join(spec.full_gem_path, "#{strategy}.strategy")) | |
+ else | |
+ FileUtils.touch(File.join(spec.full_gem_path, "stable.strategy")) | |
+ end | |
+ end | |
- # Add local binaries for the installed framework dependencies | |
- comps = Merb::Stack.all_components & deps.map { |d| d.name } | |
- comps << { :no_minigems => 'merb-gen' } | |
- ensure_bin_wrapper_for(*comps) | |
+ # Add local binaries for the installed framework dependencies | |
+ comps = Merb::Stack.all_components & deps.map { |d| d.name } | |
+ comps << { :no_minigems => 'merb-gen' } | |
+ ensure_bin_wrapper_for(*comps) | |
+ end | |
return true | |
end | |
false | |
@@ -967,7 +972,7 @@ module Merb | |
end | |
end | |
end | |
- | |
+ | |
deps | |
end | |
@@ -1037,15 +1042,12 @@ module Merb | |
# Selectively update repositories for the matching dependencies | |
update_dependency_repositories(deps) unless dry_run? | |
- # Skip gem dependencies to prevent them from being installed from stable; | |
- # however, core dependencies will be retrieved from source when available | |
- install_opts = { :ignore_dependencies => true } | |
if core = deps.find { |d| d.name == 'merb-core' } | |
if dry_run? | |
note "Installing #{core.name}..." | |
else | |
- if install_dependency_from_source(core, install_opts) | |
- elsif install_dependency(core, install_opts) | |
+ if install_dependency_from_source(core) | |
+ elsif install_dependency(core) | |
info "Installed #{core.name} from rubygems..." | |
installed_from_rubygems << core | |
end | |
@@ -1057,8 +1059,8 @@ module Merb | |
if dry_run? | |
note "Installing #{dependency.name}..." | |
else | |
- if install_dependency_from_source(dependency, install_opts) | |
- elsif install_dependency(dependency, install_opts) | |
+ if install_dependency_from_source(dependency) | |
+ elsif install_dependency(dependency) | |
info "Installed #{dependency.name} from rubygems..." | |
installed_from_rubygems << dependency | |
end | |
@@ -1091,12 +1093,18 @@ module Merb | |
@_merb_loaded = true | |
end | |
Merb::BootLoader::Dependencies.dependencies | |
- rescue => e | |
+ rescue StandardError => e | |
error "Couldn't extract dependencies from application!" | |
error e.message | |
puts "Make sure you're executing the task from your app (--merb-root), or" | |
puts "specify a config option (--config or --config-file=YAML_FILE)" | |
return [] | |
+ rescue SystemExit | |
+ error "Couldn't extract dependencies from application!" | |
+ error "application failed to run" | |
+ puts "Please check if your application runs using 'merb'; for example," | |
+ puts "look for any gem version mismatches in dependencies.rb" | |
+ return [] | |
end | |
# Parse the basic YAML config data, and process Gem::Dependency output. | |
@@ -1124,6 +1132,7 @@ module Merb | |
DM_STACK = %w[ | |
extlib | |
+ data_objects | |
dm-core | |
dm-aggregates | |
dm-migrations | |
@@ -1143,6 +1152,9 @@ module Merb | |
merb-mailer | |
merb-slices | |
merb-auth | |
+ merb-auth-core | |
+ merb-auth-more | |
+ merb-auth-slice-password | |
merb-param-protection | |
merb-exceptions | |
] + DM_STACK | |
@@ -1221,6 +1233,15 @@ module Merb | |
dm-rest-adapter | |
] | |
+ DATA_OBJECTS = %w[ | |
+ data_objects | |
+ do_derby do_hsqldb | |
+ do_jdbc | |
+ do_mysql | |
+ do_postgres | |
+ do_sqlite3 | |
+ ] | |
+ | |
attr_accessor :system, :local, :missing | |
include MerbThorHelper | |
@@ -1391,6 +1412,7 @@ module Merb | |
comps["merb-more"] = MERB_MORE.sort | |
comps["merb-plugins"] = MERB_PLUGINS.sort | |
comps["dm-more"] = DM_MORE.sort | |
+ comps["do"] = DATA_OBJECTS.sort | |
comps | |
end | |
@@ -1438,7 +1460,7 @@ module Merb | |
end | |
def self.base_components | |
- %w[thor rake] | |
+ %w[thor rake extlib] | |
end | |
def self.all_components | |
@@ -1450,6 +1472,7 @@ module Merb | |
def self.core_dependencies(gem_dir = nil, ignore_deps = false) | |
@_core_dependencies ||= begin | |
if gem_dir # add local gems to index | |
+ orig_gem_path = ::Gem.path | |
::Gem.clear_paths; ::Gem.path.unshift(gem_dir) | |
end | |
deps = [] | |
@@ -1464,7 +1487,7 @@ module Merb | |
deps += gemspec.dependencies | |
end | |
end | |
- ::Gem.clear_paths if gem_dir # reset | |
+ ::Gem.path.replace(orig_gem_path) if gem_dir # reset | |
deps | |
end | |
end | |
diff --git a/merb-gen/lib/generators/templates/application/merb_core/config/rack.rb b/merb-gen/lib/generators/templates/application/merb_core/config/rack.rb | |
index a8ec99d..494c687 100644 | |
--- a/merb-gen/lib/generators/templates/application/merb_core/config/rack.rb | |
+++ b/merb-gen/lib/generators/templates/application/merb_core/config/rack.rb | |
@@ -1,4 +1,3 @@ | |
- | |
# use PathPrefix Middleware if :path_prefix is set in Merb::Config | |
if prefix = ::Merb::Config[:path_prefix] | |
use Merb::Rack::PathPrefix, prefix | |
diff --git a/merb-gen/lib/generators/templates/application/merb_stack/config/rack.rb b/merb-gen/lib/generators/templates/application/merb_stack/config/rack.rb | |
index a8ec99d..494c687 100644 | |
--- a/merb-gen/lib/generators/templates/application/merb_stack/config/rack.rb | |
+++ b/merb-gen/lib/generators/templates/application/merb_stack/config/rack.rb | |
@@ -1,4 +1,3 @@ | |
- | |
# use PathPrefix Middleware if :path_prefix is set in Merb::Config | |
if prefix = ::Merb::Config[:path_prefix] | |
use Merb::Rack::PathPrefix, prefix | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment