This commit is contained in:
Mahad Kalam 2026-02-10 01:26:14 +00:00
parent 30ec28cbb4
commit e1de26193a
2 changed files with 8 additions and 97 deletions

View file

@ -233,26 +233,11 @@ class StaticPagesController < InertiaController
end
hb = hb.filter_by_time_range(interval, params[:from], params[:to])
result[:total_time] = hb.group(:project).duration_seconds.values.sum
result[:total_heartbeats] = hb.count
# Build weekly time ranges for bulk computation
week_ranges = (0..11).map do |w|
ws = w.weeks.ago.beginning_of_week
[ ws.to_date.iso8601, ws.to_f, w.weeks.ago.end_of_week.to_f ]
end
# Single SQL query computes all per-heartbeat diffs, then aggregates
# by every dimension + weekly project buckets in one pass.
bulk = hb.bulk_duration_stats(week_ranges: week_ranges)
result[:total_time] = bulk[:total_time]
# Top stats per dimension
dimension_map = { project: :by_project, language: :by_language,
operating_system: :by_operating_system, editor: :by_editor,
category: :by_category }
filters.each do |f|
stats = bulk[dimension_map[f]]
stats = hb.group(f).duration_seconds
stats = stats.reject { |n, _| archived.include?(n) } if f == :project
result["top_#{f}"] = stats.max_by { |_, v| v }&.first
end
@ -261,16 +246,14 @@ class StaticPagesController < InertiaController
result["top_operating_system"] &&= h.display_os_name(result["top_operating_system"])
result["top_language"] &&= h.display_language_name(result["top_language"])
# Project durations chart
unless result["singular_project"]
result[:project_durations] = bulk[:by_project]
result[:project_durations] = hb.group(:project).duration_seconds
.reject { |p, _| archived.include?(p) }.sort_by { |_, d| -d }.first(10).to_h
end
# Per-dimension stats charts (language, editor, OS, category)
%i[language editor operating_system category].each do |f|
next if result["singular_#{f}"]
stats = bulk[dimension_map[f]].each_with_object({}) do |(raw, dur), agg|
stats = hb.group(f).duration_seconds.each_with_object({}) do |(raw, dur), agg|
k = raw.to_s.presence || "Unknown"
k = f == :language ? (k == "Unknown" ? k : k.categorize_language) : (%i[editor operating_system].include?(f) ? k.downcase : k)
agg[k] = (agg[k] || 0) + dur
@ -286,9 +269,10 @@ class StaticPagesController < InertiaController
}.to_h
end
# Weekly project stats (already computed in bulk)
result[:weekly_project_stats] = bulk[:weekly_projects].transform_values do |proj_stats|
proj_stats.reject { |p, _| archived.include?(p) }
result[:weekly_project_stats] = (0..11).to_h do |w|
ws = w.weeks.ago.beginning_of_week
[ ws.to_date.iso8601, hb.where(time: ws.to_f..w.weeks.ago.end_of_week.to_f)
.group(:project).duration_seconds.reject { |p, _| archived.include?(p) } ]
end
end
result[:selected_interval] = interval.to_s

View file

@ -249,79 +249,6 @@ module Heartbeatable
end
end
def bulk_duration_stats(week_ranges: [])
timeout = heartbeat_timeout_duration.to_i
scope = with_valid_timestamps.where.not(time: nil)
# We calculate independent diffs for each dimension (partitioned) to replicate
# the "group(...).duration_seconds" behavior which counts parallel/overlapping time.
sql = <<~SQL.squish
SELECT project, language, editor, operating_system, category, time,
LEAST(COALESCE(time - LAG(time) OVER (PARTITION BY project ORDER BY time), 0), #{timeout}) as diff_project,
LEAST(COALESCE(time - LAG(time) OVER (PARTITION BY language ORDER BY time), 0), #{timeout}) as diff_language,
LEAST(COALESCE(time - LAG(time) OVER (PARTITION BY editor ORDER BY time), 0), #{timeout}) as diff_editor,
LEAST(COALESCE(time - LAG(time) OVER (PARTITION BY operating_system ORDER BY time), 0), #{timeout}) as diff_os,
LEAST(COALESCE(time - LAG(time) OVER (PARTITION BY category ORDER BY time), 0), #{timeout}) as diff_category
FROM (#{scope.to_sql}) AS hb
SQL
rows = connection.select_all(sql)
by_project = Hash.new(0)
by_language = Hash.new(0)
by_editor = Hash.new(0)
by_operating_system = Hash.new(0)
by_category = Hash.new(0)
total_time = 0
# Pre-build weekly buckets if requested
weekly_projects = {}
week_lookup = []
if week_ranges.any?
week_ranges.each do |label, ws, we|
weekly_projects[label] = Hash.new(0)
week_lookup << [ label, ws, we ]
end
end
rows.each do |row|
d_project = row["diff_project"].to_i
d_language = row["diff_language"].to_i
d_editor = row["diff_editor"].to_i
d_os = row["diff_os"].to_i
d_category = row["diff_category"].to_i
if d_project > 0
by_project[row["project"]] += d_project
total_time += d_project
end
by_language[row["language"]] += d_language if d_language > 0
by_editor[row["editor"]] += d_editor if d_editor > 0
by_operating_system[row["operating_system"]] += d_os if d_os > 0
by_category[row["category"]] += d_category if d_category > 0
if week_lookup.any? && d_project > 0
t = row["time"].to_f
week_lookup.each do |label, ws, we|
if t >= ws && t <= we
weekly_projects[label][row["project"]] += d_project
break
end
end
end
end
{
total_time: total_time,
by_project: by_project,
by_language: by_language,
by_editor: by_editor,
by_operating_system: by_operating_system,
by_category: by_category,
weekly_projects: weekly_projects
}
end
def duration_seconds_boundary_aware(scope, start_time, end_time)
scope = scope.with_valid_timestamps