module ActiveSupport::Testing::Performance
Constants
- DEFAULTS
each implementation should define metrics and freeze the defaults
Public Instance Methods
_performance_run()
click to toggle source
# File lib/rails/perftest/active_support/testing/performance.rb, line 66 def _performance_run run_warmup if full_profile_options && metrics = full_profile_options[:metrics] metrics.each do |metric_name| if klass = Metrics[metric_name.to_sym] run_profile(klass.new) end end end end
full_profile_options()
click to toggle source
# File lib/rails/perftest/active_support/testing/performance.rb, line 30 def full_profile_options DEFAULTS.merge(profile_options) end
full_test_name()
click to toggle source
# File lib/rails/perftest/active_support/testing/performance.rb, line 34 def full_test_name "#{self.class.name}##{method_name}" end
run_test(metric, mode)
click to toggle source
# File lib/rails/perftest/active_support/testing/performance.rb, line 77 def run_test(metric, mode) result = '.' begin before_setup setup after_setup metric.send(mode) { __send__ method_name } rescue Exception => e result = performance_failure(e) ensure begin before_teardown teardown after_teardown rescue Exception => e result = performance_failure(e) end end result end
Protected Instance Methods
run_gc()
click to toggle source
overridden by each implementation.
# File lib/rails/perftest/active_support/testing/performance.rb, line 100 def run_gc; end
run_profile(metric)
click to toggle source
# File lib/rails/perftest/active_support/testing/performance.rb, line 112 def run_profile(metric) klass = full_profile_options[:benchmark] ? Benchmarker : Profiler performer = klass.new(self, metric) performer.run puts performer.report performer.record end
run_warmup()
click to toggle source
# File lib/rails/perftest/active_support/testing/performance.rb, line 102 def run_warmup run_gc time = Metrics::Time.new run_test(time, :benchmark) puts "%s (%s warmup)" % [full_test_name, time.format(time.total)] run_gc end