Created
December 6, 2018 16:10
-
-
Save NHDaly/4bccca3bc44485d1b352c8a5137654c0 to your computer and use it in GitHub Desktop.
Benchmark for the performance of FixedPointDecimals relative to other types
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Decimal Representation Comparisons | |
# | |
# This benchmark compares the performance of several numeric representations, over various | |
# numeric operations (+,-,*,/,÷...) on large arrays of numbers, in order to guide | |
# decision-making about how to represent fixed-decimal numbers. | |
# | |
# It compares fixed-decimal types against the builtin Int and Float types of various sizes. | |
# The output is written to a .csv file in the same directory as this file. | |
module DecimalRepresentationComparisons | |
using FixedPointDecimals | |
using Random | |
using BenchmarkTools, Statistics | |
using DataFrames | |
using CSV | |
decimal_precision = 2 | |
# Express that data through the various types. Round it for integers. | |
fd_FixedPointDecimal_types = [ | |
FixedPointDecimals.FixedDecimal{Int32, decimal_precision}, | |
FixedPointDecimals.FixedDecimal{Int64, decimal_precision}, | |
FixedPointDecimals.FixedDecimal{Int128, decimal_precision}, | |
] | |
inttypes = [Int32,Int64,Int128] | |
floattypes = [Float32,Float64] | |
bigtypes = [BigInt, BigFloat] | |
alltypes = (inttypes..., bigtypes..., floattypes..., fd_FixedPointDecimal_types...,) | |
identity1(a,_) = a | |
allops = (*, /, +, ÷, identity1) | |
# Category for the results output CSV | |
category(::Type{<:Union{inttypes...}}) = "Int" | |
category(::Type{<:Union{floattypes...}}) = "Float" | |
category(::Type{<:Union{bigtypes...}}) = "Big" | |
category(::Type{<:FixedPointDecimals.FixedDecimal}) = "FixedDecimal" | |
type(T::Type) = "$T" | |
type(T::Type{<:Union{Int32, Int64}}) = " $T" | |
type(T::Type{Int128}) = " $T" | |
type(::Type{FixedPointDecimals.FixedDecimal{T,f}}) where {T,f} = "FD{$T,$f}" | |
type(::Type{FixedPointDecimals.FixedDecimal{T,f}}) where {T<:Union{Int32,Int64},f} = "FD{ $T,$f}" | |
opname(f) = Symbol(f) | |
opname(f::typeof(identity1)) = :identity | |
# --------- Define benchmark functions ------------- | |
# Some care is taken here to prevent the compiler from optimizing away the operations: | |
# - Marked @noinline so the constants we pass in aren't available to the optimizer. | |
# - We take `a` and `out` as parameters so that their values aren't available when | |
# compiling this function. | |
# - `out` is a Ref{T} so that this function will have side effects. We use an output | |
# parameter instead of returning the value directly so that it will play nicely with | |
# the `@benchmark` macro which returns the benchmark results as an object. | |
# - `T` and `op` _should_ be available as compile-time constants, since we don't want to be | |
# measuring the time it takes to read from global variables. | |
@noinline function benchmark(::Type{T}, op, a::T, n, out::Ref{T}) where {T} | |
for _ in 1:n | |
tmp = op(a,a) | |
out[] += tmp | |
a += one(T) | |
end | |
end | |
@noinline function baseline(::Type{T}, a::T, n, out::Ref{T}) where {T} | |
for _ in 1:n | |
tmp = a | |
out[] += tmp | |
a += one(T) | |
end | |
end | |
# ------------ Run the Benchmarks ------------------------- | |
function perform_benchmark() | |
# Collect the results | |
results = DataFrame(Operation=Symbol[], Category=String[], Type=String[], | |
DurationNs=Float64[], Allocations=Int[], MinGcTime=Number[], | |
Value=Number[]) | |
# Run the benchmarks | |
for op in allops | |
println("$op") | |
for T in alltypes | |
print("$T ") | |
N = 1_000_000 | |
initial_value = zero(T) | |
a = one(T) | |
# For some reason this is necessary to eliminate mysterious "1 allocation" | |
fbase = @eval (out::Ref{$T})->baseline($T, $a, $N, out) | |
fbench = @eval (out::Ref{$T})->benchmark($T, $op, $a, $N, out) | |
# Run the benchmark | |
outbase = Ref(initial_value) | |
bbase = median(@benchmark $fbase($outbase) evals=1 setup=($outbase[]=$initial_value)) | |
outbench = Ref(initial_value) | |
bbench = median(@benchmark $fbench($outbench) evals=1 setup=($outbench[]=$initial_value)) | |
# Compute results | |
difftime = (bbench.time - bbase.time) | |
println("$(round(difftime, digits=2)) ns ($(bbench.allocs) allocations)") | |
println(outbench[]) | |
println(outbase[]) | |
value = outbench | |
push!(results, Dict(:Operation=>opname(op), :Category=>category(T), :Type=>type(T), | |
:DurationNs=>difftime/N, # average (b.times reports ns) | |
:Allocations=>bbench.allocs, :MinGcTime=>bbench.gctime, | |
:Value=>value[])) | |
end | |
end | |
println(results) | |
CSV.write("$(@__DIR__)/comparisons-benchmark-results.csv", results) | |
return results | |
end | |
results = perform_benchmark() | |
end |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment