class ActiveRecordProfiler::Collector

Constants

AGGREGATE_QUIET_PERIOD
AVG_DURATION
COUNT
CSV_AVG
CSV_COUNT
CSV_DURATION
CSV_LOCATION
CSV_LONGEST
CSV_LONGEST_SQL
DATETIME_FORMAT
DATE_FORMAT
DURATION
HOUR_FORMAT
LOCATION
LONGEST
LONGEST_SQL
NON_APP_CODE_DESCRIPTION

Attributes

last_stats_flush[RW]
profile_data_directory[RW]
query_sites[RW]

Public Class Methods

clear_data() click to toggle source
# File lib/active-record-profiler/collector.rb, line 70
def self.clear_data
  dir = Dir.new(profile_dir)
  prof_files = dir.entries.select{ |filename| /.prof$/.match(filename) }.map{ |filename| File.join(dir.path, filename) }
  FileUtils.rm(prof_files) if prof_files.size > 0
end
instance() click to toggle source
# File lib/active-record-profiler/collector.rb, line 62
def self.instance
  Thread.current[:active_record_profiler_collector] ||= Collector.new
end
new() click to toggle source
# File lib/active-record-profiler/collector.rb, line 76
def initialize
  @query_sites = {}
  @last_stats_flush = nil
  @profile_data_directory = self.class.profile_dir
end
profile_self?() click to toggle source
# File lib/active-record-profiler/collector.rb, line 66
def self.profile_self?
  self.profile_self
end

Public Instance Methods

aggregate(options = {}) click to toggle source
# File lib/active-record-profiler/collector.rb, line 121
def aggregate(options = {})
  prefix = options[:prefix]
  compact = options[:compact]
  raise "Cannot compact without a prefix!" if compact && prefix.nil?
  return self.query_sites unless File.exists?(self.profile_data_directory)
  
  dir = Dir.new(self.profile_data_directory)
  now = Time.now
  raw_files_processed = []
  date_regexp = Regexp.new(prefix) if prefix

  dir.each do |filename|
    next unless /.prof$/.match(filename)
    next if date_regexp && ! date_regexp.match(filename)
    # Parse the datetime out of the filename and convert it to localtime
    begin
      file_time = DateTime.strptime(filename, DATETIME_FORMAT)
      file_time = Time.local(file_time.year, file_time.month, file_time.day, file_time.hour, file_time.min)
    rescue Exception => e
      if e.to_s != 'invalid date'
        raise e
      end
    end

    if (file_time.nil? || ((file_time + AGGREGATE_QUIET_PERIOD) < now))
      begin
        update_from_file(File.join(dir.path, filename))
      
        raw_files_processed << filename if file_time    # any files that are already aggregated don't count
      rescue Exception => e
        RAILS_DEFAULT_LOGGER.warn "Unable to read file #{filename}: #{e.message}"
      end
    else
      Rails.logger.info "Skipping file #{filename} because it is too new and may still be open for writing."
    end
  end

  if compact && raw_files_processed.size > 0
    write_file(File.join(dir.path, "#{prefix}.prof"))

    raw_files_processed.each do |filename|
      FileUtils.rm(File.join(dir.path, filename))
    end
  end

  return self.query_sites
end
call_location_name(caller_array = nil) click to toggle source
# File lib/active-record-profiler/collector.rb, line 82
def call_location_name(caller_array = nil)
  find_app_call_location(caller_array) || NON_APP_CODE_DESCRIPTION
end
flush_query_sites_statistics() click to toggle source
# File lib/active-record-profiler/collector.rb, line 101
def flush_query_sites_statistics
  pid = $$
  thread_id = Thread.current.object_id
  flush_time = Time.now
  site_count = self.query_sites.keys.size
  Rails.logger.info("Flushing ActiveRecordProfiler statistics for PID #{pid} at #{flush_time} (#{site_count} sites).")
  
  if (site_count > 0)
    FileUtils.makedirs(self.profile_data_directory)
  
    filename = File.join(self.profile_data_directory, "#{flush_time.strftime(DATETIME_FORMAT)}.#{pid}-#{thread_id}.prof")
    write_file(filename)
    
    # Nuke each value to make sure it can be reclaimed by Ruby
    self.query_sites.keys.each{ |k| self.query_sites[k] = nil }
  end
  self.query_sites = {}
  self.last_stats_flush = flush_time
end
record_caller_info(location, seconds, sql) click to toggle source
# File lib/active-record-profiler/collector.rb, line 86
def record_caller_info(location, seconds, sql)
  return if sql_ignore_pattern.match(sql)
  
  update_counts(location, seconds, 1, sql)
end
record_self_info(seconds, name) click to toggle source
# File lib/active-record-profiler/collector.rb, line 92
def record_self_info(seconds, name)
  record_caller_info(trim_location(caller.first), seconds, name)
end
save_aggregated(date = nil) click to toggle source
# File lib/active-record-profiler/collector.rb, line 169
def save_aggregated(date = nil)
  aggregate(:date => date, :compact => true)
end
should_flush_stats?() click to toggle source
# File lib/active-record-profiler/collector.rb, line 96
def should_flush_stats?
  self.last_stats_flush ||= Time.now
  return(Time.now > self.last_stats_flush + stats_flush_period)
end
sorted_locations(sort_field = nil, max_locations = nil) click to toggle source
# File lib/active-record-profiler/collector.rb, line 173
def sorted_locations(sort_field = nil, max_locations = nil)
  sort_field ||= DURATION
  case sort_field
    when LOCATION
      sorted = self.query_sites.keys.sort
    when AVG_DURATION
      sorted = self.query_sites.keys.sort_by{ |k| (self.query_sites[k][DURATION] / self.query_sites[k][COUNT]) }.reverse
    when DURATION, COUNT, LONGEST
      sorted = self.query_sites.keys.sort{ |a,b| self.query_sites[b][sort_field] <=> self.query_sites[a][sort_field] }
    else
      raise "Invalid sort field: #{sort_field}"
  end
  if max_locations && max_locations > 0
    sorted.first(max_locations)
  else
    sorted
  end
end

Protected Instance Methods

detect_file_type(filename) click to toggle source
# File lib/active-record-profiler/collector.rb, line 219
def detect_file_type(filename)
  type = nil
  File.open(filename, "r") do |io|
    first_line = io.readline
    if first_line.match(/^\/\* JSON \*\//)
      type = :json
    end
  end
  return type
end
find_app_call_location(call_stack) click to toggle source
# File lib/active-record-profiler/collector.rb, line 194
def find_app_call_location(call_stack)
  call_stack ||= caller(2)
  call_stack.each do |frame|
    if app_path_pattern.match(frame)
      return trim_location(frame)
    end
  end
  return nil
end
read_file(filename) { |row| ... } click to toggle source
# File lib/active-record-profiler/collector.rb, line 267
def read_file(filename)
  file_type = detect_file_type filename
  case file_type
  when :json
    read_json_file(filename) { |row| yield row }
  else
    raise "Unknown profiler data file type for file '#{filename}: #{file_type}"
  end
end
read_json_file(filename) { |row| ... } click to toggle source
# File lib/active-record-profiler/collector.rb, line 277
def read_json_file(filename)
  JSON.load(File.open(filename, "r")).each do |row|
    yield row
  end
end
trim_location(loc) click to toggle source
# File lib/active-record-profiler/collector.rb, line 204
def trim_location(loc)
  loc.sub(trim_root_path, '').sub(trim_cache_id_pattern, '')
end
update_counts(location, seconds, count, sql, longest = nil) click to toggle source
# File lib/active-record-profiler/collector.rb, line 208
def update_counts(location, seconds, count, sql, longest = nil)
  longest ||= seconds
  self.query_sites[location] ||= [0.0,0,0,'']
  self.query_sites[location][DURATION] += seconds
  self.query_sites[location][COUNT] += count
  if (longest > self.query_sites[location][LONGEST])
    self.query_sites[location][LONGEST] = longest
    self.query_sites[location][LONGEST_SQL] = sql.to_s
  end
end
update_from_file(filename) click to toggle source
# File lib/active-record-profiler/collector.rb, line 259
def update_from_file(filename)
  read_file(filename) do |row|
    update_counts(
      row[CSV_LOCATION], row[CSV_DURATION].to_f, row[CSV_COUNT].to_i, row[CSV_LONGEST_SQL], row[CSV_LONGEST].to_f
    )
  end
end
write_file(filename) click to toggle source
# File lib/active-record-profiler/collector.rb, line 230
def write_file(filename)
  case storage_backend
  when :json
    write_json_file(filename)
  else
    raise "Invalid storage_backend: #{storage_backend}"
  end
end
write_json_file(filename) click to toggle source
# File lib/active-record-profiler/collector.rb, line 239
def write_json_file(filename)
  File.open(filename, "w") do |file|
    file.puts "/* JSON */"
    file.puts "/* Fields: Duration, Count, Avg. Duration, Max. Duration, Location, Max. Duration SQL */"
    file.puts "["

    first = true
    self.query_sites.each_pair do |location, info|
      if first
        first = false
      else
        file.puts "\n, "
      end
      row = [info[DURATION], info[COUNT], (info[DURATION]/info[COUNT]), info[LONGEST], location, info[LONGEST_SQL]]
      file.print JSON.generate(row)
    end
    file.puts "\n]"
  end
end