Skip to content

Commit 3e7da85

Browse files
committed
Add Decimal Representations Comparison benchmark.
Adds a benchmark file that produces performance comparisons across various types and operations.
1 parent 1768c58 commit 3e7da85

File tree

1 file changed

+122
-0
lines changed

1 file changed

+122
-0
lines changed
Lines changed: 122 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,122 @@
1+
# Decimal Representation Comparisons
2+
#
3+
# This benchmark compares the performance of several numeric representations, over various
4+
# numeric operations (+,-,*,/,÷...) on large arrays of numbers, in order to guide
5+
# decision-making about how to represent fixed-decimal numbers.
6+
#
7+
# It compares fixed-decimal types against the builtin Int and Float types of various sizes.
8+
# The output is written to a .csv file in the same directory as this file.
9+
10+
module DecimalRepresentationComparisons
11+
12+
using FixedPointDecimals
13+
using Random
14+
using BenchmarkTools, Statistics
15+
using DataFrames
16+
using CSV
17+
18+
decimal_precision = 2
19+
20+
# Express that data through the various types. Round it for integers.
21+
fd_FixedPointDecimal_types = [
22+
FixedPointDecimals.FixedDecimal{Int32, decimal_precision},
23+
FixedPointDecimals.FixedDecimal{Int64, decimal_precision},
24+
FixedPointDecimals.FixedDecimal{Int128, decimal_precision},
25+
]
26+
inttypes = [Int32,Int64,Int128]
27+
floattypes = [Float32,Float64]
28+
bigtypes = [BigInt, BigFloat]
29+
30+
alltypes = (inttypes..., bigtypes..., floattypes..., fd_FixedPointDecimal_types...,)
31+
32+
identity1(a,_) = a
33+
allops = (*, /, +, ÷, identity1)
34+
35+
# Category for the results output CSV
36+
category(::Type{<:Union{inttypes...}}) = "Int"
37+
category(::Type{<:Union{floattypes...}}) = "Float"
38+
category(::Type{<:Union{bigtypes...}}) = "Big"
39+
category(::Type{<:FixedPointDecimals.FixedDecimal}) = "FixedDecimal"
40+
type(T::Type) = "$T"
41+
type(T::Type{<:Union{Int32, Int64}}) = " $T"
42+
type(T::Type{Int128}) = " $T"
43+
type(::Type{FixedPointDecimals.FixedDecimal{T,f}}) where {T,f} = "FD{$T,$f}"
44+
type(::Type{FixedPointDecimals.FixedDecimal{T,f}}) where {T<:Union{Int32,Int64},f} = "FD{ $T,$f}"
45+
opname(f) = Symbol(f)
46+
opname(f::typeof(identity1)) = :identity
47+
48+
# --------- Define benchmark functions -------------
49+
# Some care is taken here to prevent the compiler from optimizing away the operations:
50+
# - Marked @noinline so the constants we pass in aren't available to the optimizer.
51+
# - We take `a` and `out` as parameters so that their values aren't available when
52+
# compiling this function.
53+
# - `out` is a Ref{T} so that this function will have side effects. We use an output
54+
# parameter instead of returning the value directly so that it will play nicely with
55+
# the `@benchmark` macro which returns the benchmark results as an object.
56+
# - `T` and `op` _should_ be available as compile-time constants, since we don't want to be
57+
# measuring the time it takes to read from global variables.
58+
@noinline function benchmark(::Type{T}, op, a::T, n, out::Ref{T}) where {T}
59+
for _ in 1:n
60+
tmp = op(a,a)
61+
out[] += tmp
62+
a += one(T)
63+
end
64+
end
65+
66+
@noinline function baseline(::Type{T}, a::T, n, out::Ref{T}) where {T}
67+
for _ in 1:n
68+
tmp = a
69+
out[] += tmp
70+
a += one(T)
71+
end
72+
end
73+
74+
# ------------ Run the Benchmarks -------------------------
75+
function perform_benchmark()
76+
# Collect the results
77+
results = DataFrame(Operation=Symbol[], Category=String[], Type=String[],
78+
DurationNs=Float64[], Allocations=Int[], MinGcTime=Number[],
79+
Value=Number[])
80+
81+
# Run the benchmarks
82+
for op in allops
83+
println("$op")
84+
for T in alltypes
85+
print("$T ")
86+
87+
N = 1_000_000
88+
initial_value = zero(T)
89+
a = one(T)
90+
91+
# For some reason this is necessary to eliminate mysterious "1 allocation"
92+
fbase = @eval (out::Ref{$T})->baseline($T, $a, $N, out)
93+
fbench = @eval (out::Ref{$T})->benchmark($T, $op, $a, $N, out)
94+
95+
# Run the benchmark
96+
outbase = Ref(initial_value)
97+
bbase = median(@benchmark $fbase($outbase) evals=1 setup=($outbase[]=$initial_value))
98+
outbench = Ref(initial_value)
99+
bbench = median(@benchmark $fbench($outbench) evals=1 setup=($outbench[]=$initial_value))
100+
101+
# Compute results
102+
difftime = (bbench.time - bbase.time)
103+
println("$(round(difftime, digits=2)) ns ($(bbench.allocs) allocations)")
104+
println(outbench[])
105+
println(outbase[])
106+
value = outbench
107+
108+
push!(results, Dict(:Operation=>opname(op), :Category=>category(T), :Type=>type(T),
109+
:DurationNs=>difftime/N, # average (b.times reports ns)
110+
:Allocations=>bbench.allocs, :MinGcTime=>bbench.gctime,
111+
:Value=>value[]))
112+
end
113+
end
114+
115+
println(results)
116+
CSV.write("$(@__DIR__)/comparisons-benchmark-results.csv", results)
117+
return results
118+
end
119+
120+
results = perform_benchmark()
121+
122+
end

0 commit comments

Comments
 (0)