class Benchmark::Inputs::Job
Attributes
dup_inputs[R]
@return [Boolean]
reports[R]
Array of benchmark reports. Each call to {report} adds an element to this array.
@return [Array<Benchmark::Inputs::Report>]
sample_dt[RW]
@return [Integer]
sample_n[RW]
@return [Integer]
Public Class Methods
new(inputs, dup_inputs: false, sample_n: 10, sample_dt: NS_PER_MS * 200)
click to toggle source
@!visibility private
# File lib/benchmark/inputs.rb, line 55 def initialize(inputs, dup_inputs: false, sample_n: 10, sample_dt: NS_PER_MS * 200) raise ArgumentError, "No inputs specified" if inputs.empty? @inputs = inputs @dup_inputs = dup_inputs @sample_n = sample_n @sample_dt = sample_dt @reports = [] def_bench! end
Public Instance Methods
compare!()
click to toggle source
Prints the relative speeds (from fastest to slowest) of all {reports} to +$stdout+.
@return [void]
# File lib/benchmark/inputs.rb, line 128 def compare! return $stdout.puts("Nothing to compare!") if @reports.empty? @reports.sort_by!{|r| -r.ips } @reports.each{|r| r.slower_than!(@reports.first) } max_label_len = @reports.map{|r| r.label.length }.max format = " %#{max_label_len}s: %10.1f i/s" $stdout.puts("\nComparison:") @reports.each_with_index do |r, i| $stdout.printf(format, r.label, r.ips) if r.ratio $stdout.printf(" - %.2fx slower", r.ratio) elsif i > 0 $stdout.printf(" - same-ish: difference falls within error") end $stdout.puts end $stdout.puts end
dup_inputs=(flag)
click to toggle source
@param flag [Boolean] @return [Boolean]
# File lib/benchmark/inputs.rb, line 71 def dup_inputs=(flag) @dup_inputs = flag def_bench! @dup_inputs end
report(label) { |x| ... }
click to toggle source
Benchmarks the given block using each of the Job's input values. If {dup_inputs} is true, each input value is dup
'd before being yielded to the block. Prints the block's estimated speed (in invocations per second) to +$stdout+, and adds a {Report} to {reports}.
@param label [String]
Label for the report
@yieldparam input [Object]
One of the Job's input values
@return [void]
# File lib/benchmark/inputs.rb, line 100 def report(label) # estimate repititions reps = 1 reps_time = 0 while reps_time < @sample_dt reps_time = bench(reps){|x| yield(x) } reps *= 2 end reps = ((reps / 2) * (reps_time.to_f / @sample_dt)).ceil # benchmark r = Report.new(label, reps * @inputs.length) i = @sample_n GC.start() while i > 0 r.add_sample(bench(reps){|x| yield(x) } - bench(reps){|x| x }) i -= 1 end $stdout.puts(r.label) $stdout.printf(" %.1f i/s (\u00B1%.2f%%)\n", r.ips, r.stddev / r.ips * 100) @reports << r end
Private Instance Methods
def_bench!()
click to toggle source
# File lib/benchmark/inputs.rb, line 152 def def_bench! assigns = @inputs.each_index.map do |i| "x#{i} = @inputs[#{i}]" end.join(";") yields = @inputs.each_index.map do |i| dup_inputs ? "yield(x#{i}.dup)" : "yield(x#{i})" end.join(";") code = <<-CODE def bench(reps) #{assigns} i = reps before_time = Process.clock_gettime(Process::CLOCK_MONOTONIC, :nanosecond) while i > 0 #{yields} i -= 1 end after_time = Process.clock_gettime(Process::CLOCK_MONOTONIC, :nanosecond) after_time - before_time end CODE instance_eval{ undef :bench } if self.respond_to?(:bench) instance_eval(code) end