Skip to content

Instantly share code, notes, and snippets.

@rklemme
Created April 22, 2012 11:08
Show Gist options
  • Save rklemme/2463600 to your computer and use it in GitHub Desktop.
Save rklemme/2463600 to your computer and use it in GitHub Desktop.
Benchmark for determining duplicate positions
require 'benchmark'
REP = 100_000
dat = [1, 2, 3, 2, 4, 5, 4, 4]
Benchmark.bmbm 25 do |x|
x.report "Jan low level" do
REP.times do
dups = {}
dat.each_with_index do |value, i|
(i + 1).upto dat.length - 1 do |j|
if dat[j] == value
dups[value] = [i] if dups[value].nil?
dups[value] << j
break
end
end
end
#p dups; exit
end
end
x.report "Jan multi Hash" do
REP.times do
dups = dat.each_with_index.group_by(&:first).inject({}) do |result, (val, group)|
next result if group.length == 1
result.merge val => group.map {|pair| pair[1]}
end
#p dups; exit
end
end
x.report "Sam" do
REP.times do
dups = Hash.new {|h,k| h[k] = []}
dat.each_index {|i| dups[dat[i]] << i unless 1 == dat.count(dat[i])}
#p dups; exit
end
end
x.report "Lars" do
REP.times do
dups = dat.each_with_index.reduce({}) { |hash, (item, index)|
hash[item] = (hash[item] || []) << index
hash
}.select { |key, value|
value.size > 1
}
#p dups; exit
end
end
x.report "Lars simpler" do
REP.times do
dups = dat.each_with_index.reduce({}) { |hash, (item, index)|
(hash[item] ||= []) << index
hash
}.select { |key, value|
value.size > 1
}
#p dups; exit
end
end
x.report "Lars faster?" do
REP.times do
dups = dat.each_with_index.reduce({}) { |hash, (item, index)|
(hash[item] ||= []) << index
hash
}.delete_if { |key, value|
value.size == 1
}
#p dups; exit
end
end
x.report "Greg" do
REP.times do
dups = {}
dat.each_with_index do |key, indx|
if !dups.has_key?(key)
dups.store(key, [])
end
dups[key].push(indx)
end
dups.delete_if {|k,v| v.size == 1}
#p dups; exit
end
end
x.report "RK #[]=" do
REP.times do
dups = {}
dat.each_with_index do |val, idx|
(dups[val] ||= []) << idx
end
dups.delete_if {|k,v| v.size == 1}
#p dups; exit
end
end
x.report "RK #fetch" do
REP.times do
dups = {}
dat.each_with_index do |val, idx|
dups.fetch(val) { dups[val] = [] } << idx
end
dups.delete_if {|k,v| v.size == 1}
#p dups; exit
end
end
x.report "RK / Ryan #default_proc" do
REP.times do
dups = Hash.new {|h,k| h[k] = []}
dat.each_with_index do |val, idx|
dups[val] << idx
end
dups.delete_if {|k,v| v.size == 1}
#p dups; exit
end
end
x.report "RK 1 liner" do
REP.times do
dups = dat.each_with_index.group_by(&:first).delete_if {|k,v| v.size == 1}.each {|k,v| v.map! &:last}
#p dups; exit
end
end
end
$ ruby19 -v
ruby 1.9.3p125 (2012-02-16 revision 34643) [i686-linux]
$ ruby19 b.rb
Rehearsal -------------------------------------------------------------
Jan low level 1.320000 0.000000 1.320000 ( 1.373991)
Jan multi Hash 2.250000 0.000000 2.250000 ( 2.395521)
Sam 1.630000 0.000000 1.630000 ( 2.000988)
Lars 1.590000 0.000000 1.590000 ( 1.675986)
Lars simpler 1.550000 0.000000 1.550000 ( 1.616905)
Lars faster? 1.400000 0.000000 1.400000 ( 1.467628)
Greg 1.180000 0.000000 1.180000 ( 1.210565)
RK #[]= 0.950000 0.000000 0.950000 ( 1.027279)
RK #fetch 1.170000 0.000000 1.170000 ( 1.219644)
RK / Ryan #default_proc 1.460000 0.000000 1.460000 ( 1.505699)
RK 1 liner 1.560000 0.010000 1.570000 ( 1.692262)
--------------------------------------------------- total: 16.070000sec
user system total real
Jan low level 1.360000 0.000000 1.360000 ( 1.425580)
Jan multi Hash 2.340000 0.000000 2.340000 ( 2.417529)
Sam 1.600000 0.000000 1.600000 ( 1.733025)
Lars 1.560000 0.000000 1.560000 ( 1.610749)
Lars simpler 1.530000 0.000000 1.530000 ( 1.675452)
Lars faster? 1.390000 0.000000 1.390000 ( 1.445232)
Greg 1.170000 0.000000 1.170000 ( 1.205904)
RK #[]= 0.940000 0.000000 0.940000 ( 0.984368)
RK #fetch 1.170000 0.010000 1.180000 ( 1.243567)
RK / Ryan #default_proc 1.450000 0.000000 1.450000 ( 1.507328)
RK 1 liner 1.560000 0.000000 1.560000 ( 1.603388)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment