WakaTime/Hackatime v1 imports + Settings v2 (#1062)

* Imports are back!!

* Settings UI v3

* Use Inertia forms for heartbeat imports

* Update app/javascript/pages/Users/Settings/Data.svelte

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* Update Bundle

* Fix broken Form/Button markup in Data.svelte settings page

* Update JS deps

* Greptile fixes

* Remove dead code

---------

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
This commit is contained in:
Mahad Kalam 2026-03-12 21:27:10 +00:00 committed by GitHub
parent ea9596cb7d
commit 667d3a7c93
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
78 changed files with 2748 additions and 3996 deletions

View file

@ -11,9 +11,6 @@ SLACK_SAILORS_LOG_SIGNING_SECRET=your_signing_secret_here
SLACK_SAILORS_LOG_BOT_OAUTH_TOKEN=your_bot_oauth_token_here
TELETYPE_API_KEY=your_teletype_api_key_here
# Wakatime database url used for migrating data from waka.hackclub.com
WAKATIME_DATABASE_URL=your_wakatime_database_url_here
# You can leave this alone if you're using the provided docker setup!
DATABASE_URL=your_database_url_here
POOL_DATABASE_URL=pg_bouncer_url_here

View file

@ -21,7 +21,6 @@ Edit your `.env` file to include the following:
```env
# Database configurations - these work with the Docker setup
DATABASE_URL=postgres://postgres:secureorpheus123@db:5432/app_development
WAKATIME_DATABASE_URL=postgres://postgres:secureorpheus123@db:5432/app_development
SAILORS_LOG_DATABASE_URL=postgres://postgres:secureorpheus123@db:5432/app_development
# Generate these with `rails secret` or use these for development

View file

@ -1,7 +1,7 @@
GEM
remote: https://rubygems.org/
specs:
action_text-trix (2.1.16)
action_text-trix (2.1.17)
railties
actioncable (8.1.2)
actionpack (= 8.1.2)
@ -82,8 +82,8 @@ GEM
ast (2.4.3)
autotuner (1.1.0)
aws-eventstream (1.4.0)
aws-partitions (1.1220.0)
aws-sdk-core (3.242.0)
aws-partitions (1.1225.0)
aws-sdk-core (3.243.0)
aws-eventstream (~> 1, >= 1.3.0)
aws-partitions (~> 1, >= 1.992.0)
aws-sigv4 (~> 1.9)
@ -94,8 +94,8 @@ GEM
aws-sdk-kms (1.122.0)
aws-sdk-core (~> 3, >= 3.241.4)
aws-sigv4 (~> 1.5)
aws-sdk-s3 (1.213.0)
aws-sdk-core (~> 3, >= 3.241.4)
aws-sdk-s3 (1.216.0)
aws-sdk-core (~> 3, >= 3.243.0)
aws-sdk-kms (~> 1)
aws-sigv4 (~> 1.5)
aws-sigv4 (1.12.1)
@ -153,7 +153,7 @@ GEM
reline (>= 0.3.8)
diff-lcs (1.6.2)
domain_name (0.6.20240107)
doorkeeper (5.8.2)
doorkeeper (5.9.0)
railties (>= 5)
dotenv (3.2.0)
dotenv-rails (3.2.0)
@ -173,7 +173,7 @@ GEM
erubi (1.13.1)
et-orbi (1.4.0)
tzinfo
faker (3.6.0)
faker (3.6.1)
i18n (>= 1.8.11, < 2)
faraday (2.14.1)
faraday-net_http (>= 2.0, < 3.5)
@ -251,7 +251,7 @@ GEM
actionpack (>= 6.0.0)
activesupport (>= 6.0.0)
railties (>= 6.0.0)
inertia_rails (3.17.0)
inertia_rails (3.18.0)
railties (>= 6)
io-console (0.8.2)
irb (1.17.0)
@ -263,8 +263,8 @@ GEM
actionview (>= 7.0.0)
activesupport (>= 7.0.0)
jmespath (1.6.2)
json (2.18.1)
json-schema (6.1.0)
json (2.19.1)
json-schema (6.2.0)
addressable (~> 2.8)
bigdecimal (>= 3.1, < 5)
kamal (2.10.1)
@ -308,7 +308,7 @@ GEM
activesupport (>= 7.1)
marcel (1.1.0)
matrix (0.4.3)
mcp (0.7.1)
mcp (0.8.0)
json-schema (>= 4.1)
memory_profiler (1.1.0)
mini_magick (5.3.1)
@ -374,11 +374,11 @@ GEM
pg (1.6.3-arm64-darwin)
pg (1.6.3-x86_64-linux)
pg (1.6.3-x86_64-linux-musl)
posthog-ruby (3.5.4)
posthog-ruby (3.5.5)
concurrent-ruby (~> 1)
pp (0.6.3)
prettyprint
premailer (1.28.0)
premailer (1.29.0)
addressable
css_parser (>= 1.19.0)
htmlentities (>= 4.0.0)
@ -395,7 +395,7 @@ GEM
psych (5.3.1)
date
stringio
public_suffix (7.0.2)
public_suffix (7.0.5)
puma (7.2.0)
nio4r (~> 2.0)
query_count (1.1.1)
@ -475,14 +475,14 @@ GEM
rspec-mocks (3.13.8)
diff-lcs (>= 1.2.0, < 2.0)
rspec-support (~> 3.13.0)
rspec-rails (8.0.3)
rspec-rails (8.0.4)
actionpack (>= 7.2)
activesupport (>= 7.2)
railties (>= 7.2)
rspec-core (~> 3.13)
rspec-expectations (~> 3.13)
rspec-mocks (~> 3.13)
rspec-support (~> 3.13)
rspec-core (>= 3.13.0, < 5.0.0)
rspec-expectations (>= 3.13.0, < 5.0.0)
rspec-mocks (>= 3.13.0, < 5.0.0)
rspec-support (>= 3.13.0, < 5.0.0)
rspec-support (3.13.7)
rswag-api (2.17.0)
activesupport (>= 5.2, < 8.2)
@ -495,7 +495,7 @@ GEM
rswag-ui (2.17.0)
actionpack (>= 5.2, < 8.2)
railties (>= 5.2, < 8.2)
rubocop (1.85.0)
rubocop (1.85.1)
json (~> 2.3)
language_server-protocol (~> 3.17.0.2)
lint_roller (~> 1.1.0)
@ -507,7 +507,7 @@ GEM
rubocop-ast (>= 1.49.0, < 2.0)
ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 4.0)
rubocop-ast (1.49.0)
rubocop-ast (1.49.1)
parser (>= 3.3.7.2)
prism (~> 1.7)
rubocop-performance (1.26.1)
@ -541,10 +541,10 @@ GEM
rexml (~> 3.2, >= 3.2.5)
rubyzip (>= 1.2.2, < 4.0)
websocket (~> 1.0)
sentry-rails (6.4.0)
sentry-rails (6.4.1)
railties (>= 5.2.0)
sentry-ruby (~> 6.4.0)
sentry-ruby (6.4.0)
sentry-ruby (~> 6.4.1)
sentry-ruby (6.4.1)
bigdecimal
concurrent-ruby (~> 1.0, >= 1.0.2)
logger
@ -595,11 +595,11 @@ GEM
tailwindcss-ruby (4.2.0-x86_64-linux-gnu)
tailwindcss-ruby (4.2.0-x86_64-linux-musl)
thor (1.5.0)
thruster (0.1.18)
thruster (0.1.18-aarch64-linux)
thruster (0.1.18-arm64-darwin)
thruster (0.1.18-x86_64-linux)
timeout (0.6.0)
thruster (0.1.19)
thruster (0.1.19-aarch64-linux)
thruster (0.1.19-arm64-darwin)
thruster (0.1.19-x86_64-linux)
timeout (0.6.1)
tsort (0.2.0)
turbo-rails (2.0.23)
actionpack (>= 7.1.0)

View file

@ -219,7 +219,6 @@ class Api::Hackatime::V1::HackatimeController < ApplicationController
def handle_heartbeat(heartbeat_array)
results = []
should_enqueue_mirror_sync = false
heartbeat_array.each do |heartbeat|
source_type = :direct_entry
@ -252,7 +251,6 @@ class Api::Hackatime::V1::HackatimeController < ApplicationController
end
queue_project_mapping(heartbeat[:project])
results << [ new_heartbeat.attributes, 201 ]
should_enqueue_mirror_sync ||= source_type == :direct_entry
rescue => e
Sentry.capture_exception(e)
Rails.logger.error("Error creating heartbeat: #{e.class.name} #{e.message}")
@ -260,7 +258,6 @@ class Api::Hackatime::V1::HackatimeController < ApplicationController
end
PosthogService.capture_once_per_day(@user, "heartbeat_sent", { heartbeat_count: heartbeat_array.size })
enqueue_mirror_sync if should_enqueue_mirror_sync
results
end
@ -274,14 +271,6 @@ class Api::Hackatime::V1::HackatimeController < ApplicationController
Rails.logger.error("Error queuing project mapping: #{e.class.name} #{e.message}")
end
def enqueue_mirror_sync
return unless Flipper.enabled?(:wakatime_imports_mirrors)
MirrorFanoutEnqueueJob.perform_later(@user.id)
rescue => e
Rails.logger.error("Error enqueuing mirror sync fanout: #{e.class.name} #{e.message}")
end
def check_lockout
return unless @user&.pending_deletion?
render json: { error: "Account pending deletion" }, status: :forbidden

View file

@ -68,16 +68,7 @@ class LeaderboardsController < ApplicationController
entries_scope = leaderboard_entries_scope
ids = entries_scope.distinct.pluck(:user_id)
@user_on_leaderboard = current_user && ids.include?(current_user.id)
@untracked_entries = calculate_untracked_entries(ids) unless @user_on_leaderboard || country_scope?
@untracked_entries = 0
@total_entries = entries_scope.count
end
def calculate_untracked_entries(ids)
return 0 unless Flipper.enabled?(:hackatime_v1_import)
range = @period_type == :last_7_days ? ((Date.current - 6.days).beginning_of_day...Date.current.end_of_day) : Date.current.all_day
ids_set = ids.to_set
Hackatime::Heartbeat.where(time: range).distinct.pluck(:user_id).count { |uid| !ids_set.include?(uid) }
end
end

View file

@ -1,131 +0,0 @@
class My::HeartbeatImportSourcesController < ApplicationController
before_action :ensure_current_user
before_action :ensure_imports_and_mirrors_enabled
def create
if current_user.heartbeat_import_source.present?
redirect_to my_settings_data_path, alert: "Import source already configured. Update it instead."
return
end
source = current_user.build_heartbeat_import_source(create_params)
source.provider = :wakatime_compatible
source.status = :idle
if source.save
HeartbeatImportSourceSyncJob.perform_later(source.id) if source.sync_enabled?
redirect_to my_settings_data_path, notice: "Import source configured successfully."
else
redirect_to my_settings_data_path, alert: source.errors.full_messages.to_sentence
end
end
def update
source = current_user.heartbeat_import_source
unless source
redirect_to my_settings_data_path, alert: "No import source is configured."
return
end
rerun_backfill = ActiveModel::Type::Boolean.new.cast(params.dig(:heartbeat_import_source, :rerun_backfill))
attrs = update_params
attrs = attrs.except(:encrypted_api_key) if attrs[:encrypted_api_key].blank?
if source.update(attrs)
source.reset_backfill! if rerun_backfill
HeartbeatImportSourceSyncJob.perform_later(source.id) if source.sync_enabled?
redirect_to my_settings_data_path, notice: "Import source updated successfully."
else
redirect_to my_settings_data_path, alert: source.errors.full_messages.to_sentence
end
end
def show
source = current_user.heartbeat_import_source
render json: { import_source: source_payload(source) }
end
def destroy
source = current_user.heartbeat_import_source
unless source
redirect_to my_settings_data_path, alert: "No import source is configured."
return
end
source.destroy
redirect_to my_settings_data_path, notice: "Import source removed."
end
def sync_now
source = current_user.heartbeat_import_source
unless source
redirect_to my_settings_data_path, alert: "No import source is configured."
return
end
unless source.sync_enabled?
redirect_to my_settings_data_path, alert: "Enable sync before running sync now."
return
end
HeartbeatImportSourceSyncJob.perform_later(source.id)
redirect_to my_settings_data_path, notice: "Sync queued."
end
private
def ensure_current_user
redirect_to root_path, alert: "You must be logged in to view this page." unless current_user
end
def ensure_imports_and_mirrors_enabled
return if Flipper.enabled?(:wakatime_imports_mirrors)
if request.format.json?
render json: { error: "Imports and mirrors are currently disabled." }, status: :not_found
else
redirect_to my_settings_data_path, alert: "Imports and mirrors are currently disabled."
end
end
def create_params
base_params.merge(provider: :wakatime_compatible)
end
def update_params
base_params
end
def base_params
params.require(:heartbeat_import_source).permit(
:endpoint_url,
:encrypted_api_key,
:sync_enabled,
:initial_backfill_start_date,
:initial_backfill_end_date
)
end
def source_payload(source)
return nil unless source
{
id: source.id,
provider: source.provider,
endpoint_url: source.endpoint_url,
sync_enabled: source.sync_enabled,
status: source.status,
initial_backfill_start_date: source.initial_backfill_start_date&.iso8601,
initial_backfill_end_date: source.initial_backfill_end_date&.iso8601,
backfill_cursor_date: source.backfill_cursor_date&.iso8601,
last_synced_at: source.last_synced_at&.iso8601,
last_synced_ago: source.last_synced_at ? view_context.time_ago_in_words(source.last_synced_at) : nil,
last_error_message: source.last_error_message,
last_error_at: source.last_error_at&.iso8601,
consecutive_failures: source.consecutive_failures,
imported_count: Rails.cache.fetch("user:#{current_user.id}:wakapi_import_count", expires_in: 5.minutes) do
current_user.heartbeats.where(source_type: :wakapi_import).count
end
}
end
end

View file

@ -1,36 +1,41 @@
class My::HeartbeatImportsController < ApplicationController
before_action :ensure_current_user
before_action :ensure_development
def create
unless params[:heartbeat_file].present?
render json: { error: "pls select a file to import" }, status: :unprocessable_entity
if params[:heartbeat_file].blank? && params[:heartbeat_import].blank?
redirect_with_import_error("No import data provided.")
return
end
file = params[:heartbeat_file]
unless valid_json_file?(file)
render json: { error: "pls upload only json (download from the button above it)" }, status: :unprocessable_entity
return
if params[:heartbeat_file].present?
start_dev_upload!
else
start_remote_import!
end
import_id = HeartbeatImportRunner.start(user: current_user, uploaded_file: file)
status = HeartbeatImportRunner.status(user: current_user, import_id: import_id)
render json: {
import_id: import_id,
status: status
}, status: :accepted
redirect_to my_settings_data_path
rescue DevelopmentOnlyError => e
redirect_with_import_error(e.message)
rescue HeartbeatImportRunner::FeatureDisabledError => e
redirect_with_import_error(e.message)
rescue HeartbeatImportRunner::CooldownError => e
flash[:cooldown_until] = e.retry_at.iso8601
redirect_with_import_error(e.message)
rescue HeartbeatImportRunner::ActiveImportError => e
redirect_with_import_error(e.message)
rescue HeartbeatImportRunner::InvalidProviderError, ActionController::ParameterMissing => e
redirect_with_import_error(e.message)
rescue => e
Sentry.capture_exception(e)
Rails.logger.error("Error starting heartbeat import for user #{current_user&.id}: #{e.message}")
render json: { error: "error reading file: #{e.message}" }, status: :internal_server_error
redirect_with_import_error("error reading file: #{e.message}")
end
def show
status = HeartbeatImportRunner.status(user: current_user, import_id: params[:id])
if status.present?
render json: status
run = HeartbeatImportRunner.find_run(user: current_user, import_id: params[:id])
if run.present?
run = HeartbeatImportRunner.refresh_remote_run!(run)
render json: HeartbeatImportRunner.serialize(run)
else
render json: { error: "Import not found" }, status: :not_found
end
@ -38,10 +43,36 @@ class My::HeartbeatImportsController < ApplicationController
private
class DevelopmentOnlyError < StandardError; end
def valid_json_file?(file)
file.content_type == "application/json" || file.original_filename.to_s.ends_with?(".json")
end
def start_dev_upload!
ensure_development
file = params[:heartbeat_file]
unless valid_json_file?(file)
raise HeartbeatImportRunner::InvalidProviderError, "pls upload only json (download from the button above it)"
end
HeartbeatImportRunner.start_dev_upload(user: current_user, uploaded_file: file)
end
def start_remote_import!
heartbeat_import = remote_import_params
if heartbeat_import[:api_key].blank?
raise HeartbeatImportRunner::InvalidProviderError, "API key is required."
end
HeartbeatImportRunner.start_remote_import(
user: current_user,
provider: heartbeat_import[:provider],
api_key: heartbeat_import[:api_key]
)
end
def ensure_current_user
return if current_user
@ -51,6 +82,14 @@ class My::HeartbeatImportsController < ApplicationController
def ensure_development
return if Rails.env.development?
render json: { error: "Heartbeat import is only available in development." }, status: :forbidden
raise DevelopmentOnlyError, "Heartbeat import is only available in development."
end
def redirect_with_import_error(message)
redirect_to my_settings_data_path, inertia: { errors: { import: message } }
end
def remote_import_params
params.require(:heartbeat_import).permit(:provider, :api_key)
end
end

View file

@ -28,56 +28,6 @@ module My
redirect_to my_settings_data_path, notice: "Your export is being prepared and will be emailed to you."
end
def import
unless Rails.env.development?
redirect_to my_settings_path, alert: "Hey you! This is noit a dev env, STOP DOING THIS!!!!!) Also, idk why this is happning, you should not be able to see this button hmm...."
return
end
unless params[:heartbeat_file].present?
redirect_to my_settings_path, alert: "pls select a file to import"
return
end
file = params[:heartbeat_file]
unless file.content_type == "application/json" || file.original_filename.ends_with?(".json")
redirect_to my_settings_path, alert: "pls upload only json (download from the button above it)"
return
end
begin
file_content = file.read.force_encoding("UTF-8")
rescue => e
redirect_to my_settings_path, alert: "error reading file: #{e.message}"
return
end
result = HeartbeatImportService.import_from_file(file_content, current_user)
if result[:success]
message = "Imported #{result[:imported_count]} out of #{result[:total_count]} heartbeats in #{result[:time_taken]}s"
if result[:skipped_count] > 0
message += " (#{result[:skipped_count]} skipped cause they were duplicates)"
end
if result[:errors].any?
error_count = result[:errors].length
if error_count <= 3
message += ". Errors occurred: #{result[:errors].join("; ")}"
else
message += ". #{error_count} errors occurred. First few: #{result[:errors].first(2).join("; ")}..."
end
end
redirect_to root_path, notice: message
else
error_message = "Import failed: #{result[:error]}"
if result[:errors].any? && result[:errors].length > 1
error_message += "Errors: #{result[:errors][1..2].join("; ")}"
end
redirect_to my_settings_path, alert: error_message
end
end
private
def ensure_current_user

View file

@ -116,7 +116,6 @@ class My::ProjectRepoMappingsController < InertiaController
.group(:repository_id)
.maximum(:created_at)
archived_names = current_user.project_repo_mappings.archived.pluck(:project_name).index_with(true)
labels_by_project_key = Flipper.enabled?(:hackatime_v1_import) ? current_user.project_labels.pluck(:project_key, :label).to_h : {}
cached = Rails.cache.fetch(project_durations_cache_key, expires_in: 1.minute) do
hb = current_user.heartbeats.filter_by_time_range(selected_interval, params[:from], params[:to])
@ -131,7 +130,7 @@ class My::ProjectRepoMappingsController < InertiaController
next if archived_names.key?(project_key) != archived
mapping = mappings_by_name[project_key]
display_name = labels_by_project_key[project_key].presence || project_key.presence || "Unknown"
display_name = project_key.presence || "Unknown"
{
id: project_card_id(project_key),

View file

@ -29,10 +29,6 @@ class SessionsController < ApplicationController
if @user&.persisted?
session[:user_id] = @user.id
if Flipper.enabled?(:hackatime_v1_import) && @user.data_migration_jobs.empty?
MigrateUserFromHackatimeJob.perform_later(@user.id)
end
PosthogService.identify(@user)
PosthogService.capture(@user, "user_signed_in", { method: "hca" })
@ -91,11 +87,6 @@ class SessionsController < ApplicationController
if @user&.persisted?
session[:user_id] = @user.id
if Flipper.enabled?(:hackatime_v1_import) && @user.data_migration_jobs.empty?
# if they don't have a data migration job, add one to the queue
MigrateUserFromHackatimeJob.perform_later(@user.id)
end
PosthogService.identify(@user)
PosthogService.capture(@user, "user_signed_in", { method: "slack" })

View file

@ -1,18 +0,0 @@
class Settings::AdminController < Settings::BaseController
before_action :require_admin_section_access
def show
render_settings_page(
active_section: "admin",
settings_update_path: my_settings_profile_path
)
end
private
def require_admin_section_access
unless current_user.admin_level.in?(%w[admin superadmin])
redirect_to my_settings_profile_path, alert: "You are not authorized to access this page"
end
end
end

View file

@ -25,23 +25,19 @@ class Settings::BaseController < InertiaController
"access" => "Users/Settings/Access",
"goals" => "Users/Settings/Goals",
"badges" => "Users/Settings/Badges",
"data" => "Users/Settings/Data",
"admin" => "Users/Settings/Admin"
"data" => "Users/Settings/Data"
}.fetch(active_section.to_s, "Users/Settings/Profile")
end
def prepare_settings_page
@is_own_settings = is_own_settings?
@can_enable_slack_status = @user.slack_access_token.present? && @user.slack_scopes.include?("users.profile:write")
@imports_and_mirrors_enabled = Flipper.enabled?(:wakatime_imports_mirrors)
@imports_enabled = Flipper.enabled?(:imports, @user)
@enabled_sailors_logs = SailorsLogNotificationPreference.where(
slack_uid: @user.slack_uid,
enabled: true,
).where.not(slack_channel_id: SailorsLog::DEFAULT_CHANNELS)
@heartbeats_migration_jobs = @user.data_migration_jobs
@projects = @user.project_repo_mappings.distinct.pluck(:project_name)
heartbeat_language_and_projects = @user.heartbeats.distinct.pluck(:language, :project)
goal_languages = []
@ -62,20 +58,12 @@ class Settings::BaseController < InertiaController
@general_badge_url = GithubReadmeStats.new(@user.id, "darcula").generate_badge_url
@latest_api_key_token = @user.api_keys.last&.token
@mirrors = @imports_and_mirrors_enabled ? current_user.wakatime_mirrors.order(created_at: :desc) : []
@import_source = @imports_and_mirrors_enabled ? current_user.heartbeat_import_source : nil
@latest_heartbeat_import = @user.heartbeat_import_runs.latest_first.first
end
def settings_page_props(active_section:, settings_update_path:)
heartbeats_last_7_days = @user.heartbeats.where("time >= ?", 7.days.ago.to_f).count
channel_ids = @enabled_sailors_logs.pluck(:slack_channel_id)
heartbeat_import_id = nil
heartbeat_import_status = nil
if active_section.to_s == "data" && Rails.env.development?
heartbeat_import_id = params[:heartbeat_import_id].presence
heartbeat_import_status = HeartbeatImportRunner.status(user: @user, import_id: heartbeat_import_id) if heartbeat_import_id
end
{
active_section: active_section,
@ -86,8 +74,7 @@ class Settings::BaseController < InertiaController
access: my_settings_access_path,
goals: my_settings_goals_path,
badges: my_settings_badges_path,
data: my_settings_data_path,
admin: my_settings_admin_path
data: my_settings_data_path
},
page_title: (@is_own_settings ? "My Settings" : "Settings | #{@user.display_name}"),
heading: (@is_own_settings ? "Settings" : "Settings for #{@user.display_name}"),
@ -127,14 +114,10 @@ class Settings::BaseController < InertiaController
add_email_path: add_email_auth_path,
unlink_email_path: unlink_email_auth_path,
rotate_api_key_path: my_settings_rotate_api_key_path,
migrate_heartbeats_path: my_settings_migrate_heartbeats_path,
export_all_heartbeats_path: export_my_heartbeats_path(all_data: "true"),
export_range_heartbeats_path: export_my_heartbeats_path,
create_heartbeat_import_path: my_heartbeat_imports_path,
create_deletion_path: create_deletion_path,
user_wakatime_mirrors_path: user_wakatime_mirrors_path(current_user),
heartbeat_import_source_path: my_heartbeat_import_source_path,
heartbeat_import_source_sync_path: sync_my_heartbeat_import_source_path
create_deletion_path: create_deletion_path
},
options: {
countries: ISO3166::Country.all.map { |country|
@ -205,38 +188,22 @@ class Settings::BaseController < InertiaController
config_file: {
content: generated_wakatime_config(@latest_api_key_token),
has_api_key: @latest_api_key_token.present?,
empty_message: "No API key is available yet. Migrate heartbeats or rotate your API key to generate one.",
empty_message: "No API key is available yet. Rotate your API key to generate one.",
api_key: @latest_api_key_token,
api_url: "https://#{request.host_with_port}/api/hackatime/v1"
},
migration: {
enabled: Flipper.enabled?(:hackatime_v1_import),
jobs: @heartbeats_migration_jobs.map { |job|
{
id: job.id,
status: job.status
}
}
},
data_export: {
total_heartbeats: number_with_delimiter(@user.heartbeats.count),
total_coding_time: @user.heartbeats.duration_simple,
heartbeats_last_7_days: number_with_delimiter(heartbeats_last_7_days),
is_restricted: (@user.trust_level == "red")
},
import_source: serialized_import_source(@import_source),
mirrors: serialized_mirrors(@mirrors),
admin_tools: {
visible: current_user.admin_level.in?(%w[admin superadmin]),
mirrors: serialized_mirrors(@mirrors)
},
imports_enabled: @imports_enabled,
remote_import_cooldown_until: HeartbeatImportRunner.remote_import_cooldown_until(user: @user)&.iso8601,
latest_heartbeat_import: HeartbeatImportRunner.serialize(@latest_heartbeat_import),
ui: {
show_dev_import: Rails.env.development?,
show_imports_and_mirrors: @imports_and_mirrors_enabled
},
heartbeat_import: {
import_id: heartbeat_import_id,
status: heartbeat_import_status
show_imports: @imports_enabled
},
errors: {
full_messages: @user.errors.full_messages,
@ -279,43 +246,4 @@ class Settings::BaseController < InertiaController
def is_own_settings?
params["id"] == "my" || params["id"]&.blank?
end
def serialized_import_source(source)
return nil unless source
{
id: source.id,
provider: source.provider,
endpoint_url: source.endpoint_url,
sync_enabled: source.sync_enabled,
status: source.status,
initial_backfill_start_date: source.initial_backfill_start_date&.iso8601,
initial_backfill_end_date: source.initial_backfill_end_date&.iso8601,
backfill_cursor_date: source.backfill_cursor_date&.iso8601,
last_synced_at: source.last_synced_at&.iso8601,
last_synced_ago: (source.last_synced_at ? "#{time_ago_in_words(source.last_synced_at)} ago" : "Never"),
last_error_message: source.last_error_message,
last_error_at: source.last_error_at&.iso8601,
consecutive_failures: source.consecutive_failures,
imported_count: Rails.cache.fetch("user:#{source.user_id}:wakapi_import_count", expires_in: 5.minutes) do
source.user.heartbeats.where(source_type: :wakapi_import).count
end
}
end
def serialized_mirrors(mirrors)
mirrors.map { |mirror|
{
id: mirror.id,
endpoint_url: mirror.endpoint_url,
enabled: mirror.enabled,
last_synced_at: mirror.last_synced_at&.iso8601,
last_synced_ago: (mirror.last_synced_at ? "#{time_ago_in_words(mirror.last_synced_at)} ago" : "Never"),
consecutive_failures: mirror.consecutive_failures,
last_error_message: mirror.last_error_message,
last_error_at: mirror.last_error_at&.iso8601,
destroy_path: user_wakatime_mirror_path(current_user, mirror)
}
}
end
end

View file

@ -3,16 +3,6 @@ class Settings::DataController < Settings::BaseController
render_data
end
def migrate_heartbeats
unless Flipper.enabled?(:hackatime_v1_import)
redirect_to my_settings_data_path, alert: "Hackatime v1 import is currently disabled"
return
end
MigrateUserFromHackatimeJob.perform_later(@user.id)
redirect_to my_settings_data_path, notice: "Heartbeats & api keys migration started"
end
private
def render_data(status: :ok)

View file

@ -63,11 +63,10 @@ class StaticPagesController < InertiaController
cached = Rails.cache.fetch(key, expires_in: 1.minute) do
hb = current_user.heartbeats.filter_by_time_range(params[:interval], params[:from], params[:to])
labels = Flipper.enabled?(:hackatime_v1_import) ? current_user.project_labels : []
projects = hb.group(:project).duration_seconds.filter_map do |proj, dur|
next if dur <= 0
m = @project_repo_mappings.find { |p| p.project_name == proj }
{ project: labels.find { |p| p.project_key == proj }&.label || proj || "Unknown",
{ project: proj || "Unknown",
project_key: proj, repo_url: m&.repo_url, repository: m&.repository,
has_mapping: m.present?, duration: dur }
end.sort_by { |p| -p[:duration] }

View file

@ -1,47 +0,0 @@
class WakatimeMirrorsController < ApplicationController
before_action :set_user
before_action :require_current_user
before_action :ensure_imports_and_mirrors_enabled
before_action :set_mirror, only: [ :destroy ]
def create
@mirror = @user.wakatime_mirrors.build(mirror_params)
@mirror.request_host = request.host
if @mirror.save
redirect_to my_settings_data_path, notice: "WakaTime mirror added successfully"
else
redirect_to my_settings_data_path, alert: "Failed to add WakaTime mirror: #{@mirror.errors.full_messages.join(', ')}"
end
end
def destroy
@mirror.destroy
redirect_to my_settings_data_path, notice: "WakaTime mirror removed successfully"
end
private
def set_user
@user = User.find(params[:user_id])
end
def set_mirror
@mirror = @user.wakatime_mirrors.find(params[:id])
end
def mirror_params
params.require(:wakatime_mirror).permit(:endpoint_url, :encrypted_api_key)
end
def require_current_user
unless @user == current_user
redirect_to root_path, alert: "You are not authorized to access this page"
end
end
def ensure_imports_and_mirrors_enabled
return if Flipper.enabled?(:wakatime_imports_mirrors)
redirect_to my_settings_data_path, alert: "Imports and mirrors are currently disabled."
end
end

View file

@ -3,6 +3,7 @@
import Button from "../../../components/Button.svelte";
import Modal from "../../../components/Modal.svelte";
import Select from "../../../components/Select.svelte";
import SectionCard from "./components/SectionCard.svelte";
import SettingsShell from "./Shell.svelte";
import type { AccessPageProps } from "./types";
@ -18,7 +19,6 @@
paths,
config_file,
errors,
admin_tools,
}: AccessPageProps = $props();
let csrfToken = $state("");
@ -93,29 +93,33 @@
{heading}
{subheading}
{errors}
{admin_tools}
>
<div class="space-y-8">
<section>
<h2 class="text-xl font-semibold text-surface-content">
Time Tracking Setup
</h2>
<p class="mt-1 text-sm text-muted">
Use the setup guide if you are configuring a new editor or device.
<SectionCard
id="user_tracking_setup"
title="Time Tracking Setup"
description="Use the setup guide if you are configuring a new editor or device."
>
<p class="text-sm text-muted">
Hackatime uses the WakaTime plugin ecosystem, so the setup guide covers
editor installation, API keys, and API URL configuration.
</p>
<Button href={paths.wakatime_setup_path} class="mt-4">
Open setup guide
</Button>
</section>
<section id="user_hackatime_extension">
<h2 class="text-xl font-semibold text-surface-content">
Extension Display
</h2>
<p class="mt-1 text-sm text-muted">
Choose how coding time appears in the extension status text.
</p>
<form method="post" action={settings_update_path} class="mt-4 space-y-4">
{#snippet footer()}
<Button href={paths.wakatime_setup_path}>Open setup guide</Button>
{/snippet}
</SectionCard>
<SectionCard
id="user_hackatime_extension"
title="Extension Display"
description="Choose how coding time appears in the extension status text."
>
<form
id="access-extension-form"
method="post"
action={settings_update_path}
class="space-y-4"
>
<input type="hidden" name="_method" value="patch" />
<input type="hidden" name="authenticity_token" value={csrfToken} />
@ -133,35 +137,31 @@
items={options.extension_text_types}
/>
</div>
<Button type="submit" variant="primary">Save extension settings</Button>
</form>
</section>
<section id="user_api_key">
<h2 class="text-xl font-semibold text-surface-content">API Key</h2>
<p class="mt-1 text-sm text-muted">
Rotate your API key if you think it has been exposed.
</p>
<Button
type="button"
class="mt-4"
onclick={openRotateApiKeyModal}
disabled={rotatingApiKey}
>
{rotatingApiKey ? "Rotating..." : "Rotate API key"}
{#snippet footer()}
<Button type="submit" variant="primary" form="access-extension-form">
Save extension settings
</Button>
{/snippet}
</SectionCard>
<SectionCard
id="user_api_key"
title="API Key"
description="Rotate your API key if you think it has been exposed."
hasBody={Boolean(rotatedApiKeyError || rotatedApiKey)}
>
{#if rotatedApiKeyError}
<p
class="mt-3 rounded-md border border-danger/40 bg-danger/10 px-3 py-2 text-sm text-red"
class="rounded-md border border-danger/40 bg-danger/10 px-3 py-2 text-sm text-red"
>
{rotatedApiKeyError}
</p>
{/if}
{#if rotatedApiKey}
<div class="mt-4 rounded-md border border-surface-200 bg-darker p-3">
<div class="rounded-md border border-surface-200 bg-darker p-3">
<p class="text-xs font-semibold uppercase tracking-wide text-muted">
New API key
</p>
@ -179,30 +179,35 @@
</Button>
</div>
{/if}
</section>
<section id="user_config_file">
<h2 class="text-xl font-semibold text-surface-content">
WakaTime Config File
</h2>
<p class="mt-1 text-sm text-muted">
Copy this into your <code class="rounded bg-darker px-1 py-0.5 text-xs"
>~/.wakatime.cfg</code
> file.
</p>
{#snippet footer()}
<Button
type="button"
onclick={openRotateApiKeyModal}
disabled={rotatingApiKey}
>
{rotatingApiKey ? "Rotating..." : "Rotate API key"}
</Button>
{/snippet}
</SectionCard>
<SectionCard
id="user_config_file"
title="WakaTime Config File"
description="Copy this into your ~/.wakatime.cfg file."
wide
>
{#if config_file.has_api_key && config_file.content}
<pre
class="mt-4 overflow-x-auto rounded-md border border-surface-200 bg-darker p-4 text-xs text-surface-content">{config_file.content}</pre>
class="overflow-x-auto rounded-md border border-surface-200 bg-darker p-4 text-xs text-surface-content">{config_file.content}</pre>
{:else}
<p
class="mt-4 rounded-md border border-surface-200 bg-darker px-3 py-2 text-sm text-muted"
class="rounded-md border border-surface-200 bg-darker px-3 py-2 text-sm text-muted"
>
{config_file.empty_message}
</p>
{/if}
</section>
</div>
</SectionCard>
</SettingsShell>
<Modal

View file

@ -1,40 +0,0 @@
<script lang="ts">
import SettingsShell from "./Shell.svelte";
import type { AdminPageProps } from "./types";
let {
active_section,
section_paths,
page_title,
heading,
subheading,
admin_tools,
errors,
}: AdminPageProps = $props();
</script>
<SettingsShell
{active_section}
{section_paths}
{page_title}
{heading}
{subheading}
{errors}
{admin_tools}
>
{#if admin_tools.visible}
<div class="space-y-3">
<h2 class="text-xl font-semibold text-surface-content">Admin</h2>
<p class="text-sm text-muted">
Mirror and import controls are available under Data settings for all
users.
</p>
</div>
{:else}
<p
class="rounded-md border border-surface-200 bg-darker px-3 py-2 text-sm text-muted"
>
You are not authorized to access this section.
</p>
{/if}
</SettingsShell>

View file

@ -1,5 +1,6 @@
<script lang="ts">
import Select from "../../../components/Select.svelte";
import SectionCard from "./components/SectionCard.svelte";
import SettingsShell from "./Shell.svelte";
import type { BadgesPageProps } from "./types";
@ -12,7 +13,6 @@
options,
badges,
errors,
admin_tools,
}: BadgesPageProps = $props();
const defaultTheme = (themes: string[]) =>
@ -60,16 +60,14 @@
{heading}
{subheading}
{errors}
{admin_tools}
>
<div class="space-y-8">
<section id="user_stats_badges">
<h2 class="text-xl font-semibold text-surface-content">Stats Badges</h2>
<p class="mt-1 text-sm text-muted">
Generate links for profile badges that display your coding stats.
</p>
<div class="mt-4 space-y-4">
<SectionCard
id="user_stats_badges"
title="Stats Badges"
description="Generate links for profile badges that display your coding stats."
wide
>
<div class="space-y-4">
<div>
<label
for="badge_theme"
@ -125,17 +123,15 @@
</div>
</div>
{/if}
</section>
</SectionCard>
<section id="user_markscribe">
<h2 class="text-xl font-semibold text-surface-content">
Markscribe Template
</h2>
<p class="mt-1 text-sm text-muted">
Use this snippet with markscribe to include your coding stats in a
README.
</p>
<div class="mt-4 rounded-md border border-surface-200 bg-darker p-4">
<SectionCard
id="user_markscribe"
title="Markscribe Template"
description="Use this snippet with markscribe to include your coding stats in a README."
wide
>
<div class="rounded-md border border-surface-200 bg-darker p-4">
<pre
class="overflow-x-auto text-sm text-surface-content">{badges.markscribe_template}</pre>
</div>
@ -154,20 +150,23 @@
alt="Example markscribe output"
class="mt-4 w-full max-w-3xl rounded-md border border-surface-200"
/>
</section>
</SectionCard>
<section id="user_heatmap">
<h2 class="text-xl font-semibold text-surface-content">
Activity Heatmap
</h2>
<p class="mt-1 text-sm text-muted">
A <a
<SectionCard
id="user_heatmap"
title="Activity Heatmap"
description="A customizable heatmap for your coding activity."
wide
>
<p class="text-sm text-muted">
Configuration:
<a
class="text-primary underline"
href={badges.heatmap_config_url}
target="_blank">customizable</a
> heatmap for your coding activity.
target="_blank">open heatmap builder</a
>
</p>
<div class="mt-4 pb-3 rounded-md border border-surface-200 bg-darker p-4">
<div class="mt-4 rounded-md border border-surface-200 bg-darker p-4 pb-3">
<a href={badges.heatmap_config_url} target="_blank" class="block">
<img
src={badges.heatmap_badge_url}
@ -178,6 +177,5 @@
<pre
class="mt-2 overflow-x-auto text-xs text-surface-content">{badges.heatmap_badge_url}</pre>
</div>
</section>
</div>
</SectionCard>
</SettingsShell>

File diff suppressed because it is too large Load diff

View file

@ -4,6 +4,7 @@
import Modal from "../../../components/Modal.svelte";
import MultiSelectCombobox from "../../../components/MultiSelectCombobox.svelte";
import Select from "../../../components/Select.svelte";
import SectionCard from "./components/SectionCard.svelte";
import SettingsShell from "./Shell.svelte";
import type { GoalsPageProps, ProgrammingGoal } from "./types";
@ -26,7 +27,6 @@
user,
options,
errors,
admin_tools,
goal_form,
}: GoalsPageProps = $props();
@ -203,34 +203,19 @@
{heading}
{subheading}
{errors}
{admin_tools}
>
<div>
<section id="user_programming_goals">
<div class="flex flex-wrap items-center justify-between gap-3">
<div>
<h2 class="text-xl font-semibold text-surface-content">
Programming Goals
</h2>
<p class="mt-1 text-sm text-muted">Set up to {MAX_GOALS} goals.</p>
</div>
<div class="flex items-center gap-3">
<SectionCard
id="user_programming_goals"
title="Programming Goals"
description={`Set up to ${MAX_GOALS} goals for your daily, weekly, or monthly coding targets.`}
footerClass="flex flex-col gap-3 sm:flex-row sm:items-center sm:justify-between"
>
<div class="flex items-center justify-between gap-3">
<p
class="text-xs font-semibold uppercase tracking-wider text-secondary/80 sm:text-sm"
>
{activeGoalSummary}
</p>
<Button
type="button"
variant="primary"
class="rounded-md px-3 py-2"
onclick={openCreateModal}
disabled={hasReachedGoalLimit || submitting}
>
New goal
</Button>
</div>
</div>
{#if goals.length === 0}
@ -240,14 +225,6 @@
<p class="text-sm text-muted">
Set a goal to track your coding consistency.
</p>
<Button
type="button"
class="mt-4 rounded-md"
onclick={openCreateModal}
disabled={submitting}
>
Add new goal...
</Button>
</div>
{:else}
<div
@ -294,8 +271,26 @@
{/each}
</div>
{/if}
</section>
</div>
{#snippet footer()}
<p class="text-sm text-muted">
{#if hasReachedGoalLimit}
Goal limit reached. Delete an existing goal before adding another.
{:else}
Add a goal to stay accountable across languages and projects.
{/if}
</p>
<Button
type="button"
variant="primary"
class="rounded-md px-3 py-2"
onclick={openCreateModal}
disabled={hasReachedGoalLimit || submitting}
>
New goal
</Button>
{/snippet}
</SectionCard>
</SettingsShell>
<Modal

View file

@ -3,6 +3,7 @@
import { onMount } from "svelte";
import Button from "../../../components/Button.svelte";
import Modal from "../../../components/Modal.svelte";
import SectionCard from "./components/SectionCard.svelte";
import SettingsShell from "./Shell.svelte";
import type { IntegrationsPageProps } from "./types";
@ -19,7 +20,6 @@
emails,
paths,
errors,
admin_tools,
}: IntegrationsPageProps = $props();
let csrfToken = $state("");
@ -45,27 +45,28 @@
{heading}
{subheading}
{errors}
{admin_tools}
>
<div class="space-y-8">
<section id="user_slack_status">
<h2 class="text-xl font-semibold text-surface-content">
Slack Status Sync
</h2>
<p class="mt-1 text-sm text-muted">
Keep your Slack status updated while you are actively coding.
</p>
<SectionCard
id="user_slack_status"
title="Slack Status Sync"
description="Keep your Slack status updated while you are actively coding."
>
<div class="space-y-4">
{#if !slack.can_enable_status}
<a
href={paths.slack_auth_path}
class="mt-4 inline-flex rounded-md border border-surface-200 bg-surface-100 px-3 py-2 text-sm text-surface-content transition-colors hover:bg-surface-200"
class="inline-flex rounded-md border border-surface-200 bg-surface-100 px-3 py-2 text-sm text-surface-content transition-colors hover:bg-surface-200"
>
Re-authorize with Slack
</a>
{/if}
<form method="post" action={settings_update_path} class="mt-4 space-y-3">
<form
id="integrations-slack-form"
method="post"
action={settings_update_path}
class="space-y-3"
>
<input type="hidden" name="_method" value="patch" />
<input type="hidden" name="authenticity_token" value={csrfToken} />
@ -83,23 +84,26 @@
</Checkbox.Root>
Update my Slack status automatically
</label>
<Button type="submit">Save Slack settings</Button>
</form>
</section>
</div>
<section id="user_slack_notifications">
<h2 class="text-xl font-semibold text-surface-content">
Slack Channel Notifications
</h2>
<p class="mt-1 text-sm text-muted">
Enable notifications in any channel by running
<code
class="rounded bg-darker px-1 py-0.5 text-xs text-surface-content"
{#snippet footer()}
<Button type="submit" form="integrations-slack-form">
Save Slack settings
</Button>
{/snippet}
</SectionCard>
<SectionCard
id="user_slack_notifications"
title="Slack Channel Notifications"
description="Enable notifications in any channel by running /sailorslog on in that channel."
>
<p class="text-sm text-muted">
Command:
<code class="rounded bg-darker px-1 py-0.5 text-xs text-surface-content">
/sailorslog on
</code>
in that channel.
</p>
{#if slack.notification_channels.length > 0}
@ -121,26 +125,27 @@
No channel notifications are enabled.
</p>
{/if}
</section>
<section id="user_github_account">
<h2 class="text-xl font-semibold text-surface-content">
Connected GitHub Account
</h2>
<p class="mt-1 text-sm text-muted">
Connect GitHub to show project links in dashboards and leaderboards.
</p>
</SectionCard>
<SectionCard
id="user_github_account"
title="Connected GitHub Account"
description="Connect GitHub to show project links in dashboards and leaderboards."
hasBody={Boolean(github.connected && github.username)}
>
{#if github.connected && github.username}
<div
class="mt-4 rounded-md border border-surface-200 bg-darker px-3 py-3 text-sm text-surface-content"
class="rounded-md border border-surface-200 bg-darker px-3 py-3 text-sm text-surface-content"
>
Connected as
<a href={github.profile_url || "#"} target="_blank" class="underline">
@{github.username}
</a>
</div>
<div class="mt-3 flex flex-wrap gap-3">
{/if}
{#snippet footer()}
{#if github.connected && github.username}
<Button href={paths.github_auth_path} native class="rounded-md">
Reconnect GitHub
</Button>
@ -152,23 +157,20 @@
>
Unlink GitHub
</Button>
</div>
{:else}
<Button href={paths.github_auth_path} native class="mt-4 rounded-md">
<Button href={paths.github_auth_path} native class="rounded-md">
Connect GitHub
</Button>
{/if}
</section>
{/snippet}
</SectionCard>
<section id="user_email_addresses">
<h2 class="text-xl font-semibold text-surface-content">
Email Addresses
</h2>
<p class="mt-1 text-sm text-muted">
Add or remove email addresses used for sign-in and verification.
</p>
<div class="mt-4 space-y-2">
<SectionCard
id="user_email_addresses"
title="Email Addresses"
description="Add or remove email addresses used for sign-in and verification."
>
<div class="space-y-2">
{#if emails.length > 0}
{#each emails as email}
<div
@ -209,6 +211,7 @@
</div>
<form
id="integrations-email-form"
method="post"
action={paths.add_email_path}
class="mt-4 flex flex-col gap-3 sm:flex-row"
@ -221,10 +224,14 @@
placeholder="name@example.com"
class="grow rounded-md border border-surface-200 bg-darker px-3 py-2 text-sm text-surface-content focus:border-primary focus:outline-none"
/>
<Button type="submit" class="rounded-md">Add email</Button>
</form>
</section>
</div>
{#snippet footer()}
<Button type="submit" class="rounded-md" form="integrations-email-form">
Add email
</Button>
{/snippet}
</SectionCard>
</SettingsShell>
<Modal

View file

@ -2,6 +2,7 @@
import { Checkbox } from "bits-ui";
import { onMount } from "svelte";
import Button from "../../../components/Button.svelte";
import SectionCard from "./components/SectionCard.svelte";
import SettingsShell from "./Shell.svelte";
import type { NotificationsPageProps } from "./types";
@ -14,11 +15,14 @@
settings_update_path,
user,
errors,
admin_tools,
}: NotificationsPageProps = $props();
let csrfToken = $state("");
let weeklySummaryEmailEnabled = $state(user.weekly_summary_email_enabled);
let weeklySummaryEmailEnabled = $state(false);
$effect(() => {
weeklySummaryEmailEnabled = user.weekly_summary_email_enabled;
});
onMount(() => {
csrfToken =
@ -35,19 +39,18 @@
{heading}
{subheading}
{errors}
{admin_tools}
>
<div class="space-y-8">
<section id="user_email_notifications">
<h2 class="text-xl font-semibold text-surface-content">
Email Notifications
</h2>
<p class="mt-1 text-sm text-muted">
Control which product emails Hackatime sends to your linked email
addresses.
</p>
<form method="post" action={settings_update_path} class="mt-4 space-y-4">
<SectionCard
id="user_email_notifications"
title="Email Notifications"
description="Control which product emails Hackatime sends to your linked email addresses."
>
<form
id="notifications-settings-form"
method="post"
action={settings_update_path}
class="space-y-4"
>
<input type="hidden" name="_method" value="patch" />
<input type="hidden" name="authenticity_token" value={csrfToken} />
@ -74,9 +77,12 @@
Includes your weekly coding time, top projects, and top languages.
</p>
</div>
<Button type="submit">Save notification settings</Button>
</form>
</section>
</div>
{#snippet footer()}
<Button type="submit" form="notifications-settings-form">
Save notification settings
</Button>
{/snippet}
</SectionCard>
</SettingsShell>

View file

@ -3,6 +3,7 @@
import { onMount } from "svelte";
import Button from "../../../components/Button.svelte";
import Select from "../../../components/Select.svelte";
import SectionCard from "./components/SectionCard.svelte";
import SettingsShell from "./Shell.svelte";
import type { ProfilePageProps } from "./types";
@ -18,7 +19,6 @@
options,
badges,
errors,
admin_tools,
}: ProfilePageProps = $props();
let csrfToken = $state("");
@ -45,18 +45,18 @@
{heading}
{subheading}
{errors}
{admin_tools}
>
<div class="space-y-8">
<section id="user_region">
<h2 class="text-xl font-semibold text-surface-content">
Region and Timezone
</h2>
<p class="mt-1 text-sm text-muted">
Use your local region and timezone for accurate dashboards and
leaderboards.
</p>
<form method="post" action={settings_update_path} class="mt-4 space-y-4">
<SectionCard
id="user_region"
title="Region and Timezone"
description="Use your local region and timezone for accurate dashboards and leaderboards."
>
<form
id="profile-region-form"
method="post"
action={settings_update_path}
class="space-y-4"
>
<input type="hidden" name="_method" value="patch" />
<input type="hidden" name="authenticity_token" value={csrfToken} />
@ -89,17 +89,26 @@
items={options.timezones}
/>
</div>
<Button type="submit" variant="primary">Save region settings</Button>
</form>
</section>
<section id="user_username">
<h2 class="text-xl font-semibold text-surface-content">Username</h2>
<p class="mt-1 text-sm text-muted">
This username is used in links and public profile pages.
</p>
<form method="post" action={settings_update_path} class="mt-4 space-y-3">
{#snippet footer()}
<Button type="submit" variant="primary" form="profile-region-form">
Save region settings
</Button>
{/snippet}
</SectionCard>
<SectionCard
id="user_username"
title="Username"
description="This username is used in links and public profile pages."
>
<form
id="profile-username-form"
method="post"
action={settings_update_path}
class="space-y-3"
>
<input type="hidden" name="_method" value="patch" />
<input type="hidden" name="authenticity_token" value={csrfToken} />
@ -119,12 +128,10 @@
<p class="mt-2 text-xs text-red">{errors.username[0]}</p>
{/if}
</div>
<Button type="submit" variant="primary">Save username</Button>
</form>
{#if badges.profile_url}
<p class="mt-3 text-sm text-muted">
<p class="text-sm text-muted">
Public profile:
<a
href={badges.profile_url}
@ -135,23 +142,30 @@
</a>
</p>
{/if}
</section>
<section id="user_privacy">
<h2 class="text-xl font-semibold text-surface-content">Privacy</h2>
<p class="mt-1 text-sm text-muted">
Control whether your coding stats can be used by public APIs.
</p>
<form method="post" action={settings_update_path} class="mt-4 space-y-3">
{#snippet footer()}
<Button type="submit" variant="primary" form="profile-username-form">
Save username
</Button>
{/snippet}
</SectionCard>
<SectionCard
id="user_privacy"
title="Privacy"
description="Control whether your coding stats can be used by public APIs."
>
<form
id="profile-privacy-form"
method="post"
action={settings_update_path}
class="space-y-3"
>
<input type="hidden" name="_method" value="patch" />
<input type="hidden" name="authenticity_token" value={csrfToken} />
<label class="flex items-center gap-3 text-sm text-surface-content">
<input
type="hidden"
name="user[allow_public_stats_lookup]"
value="0"
/>
<input type="hidden" name="user[allow_public_stats_lookup]" value="0" />
<Checkbox.Root
bind:checked={allowPublicStatsLookup}
name="user[allow_public_stats_lookup]"
@ -164,17 +178,27 @@
</Checkbox.Root>
Allow public stats lookup
</label>
<Button type="submit" variant="primary">Save privacy settings</Button>
</form>
</section>
<section id="user_theme">
<h2 class="text-xl font-semibold text-surface-content">Theme</h2>
<p class="mt-1 text-sm text-muted">
Pick how Hackatime looks for your account.
</p>
<form method="post" action={settings_update_path} class="mt-4 space-y-4">
{#snippet footer()}
<Button type="submit" variant="primary" form="profile-privacy-form">
Save privacy settings
</Button>
{/snippet}
</SectionCard>
<SectionCard
id="user_theme"
title="Theme"
description="Pick how Hackatime looks for your account."
wide
>
<form
id="profile-theme-form"
method="post"
action={settings_update_path}
class="space-y-4"
>
<input type="hidden" name="_method" value="patch" />
<input type="hidden" name="authenticity_token" value={csrfToken} />
@ -217,9 +241,7 @@
<span class="text-[10px] opacity-80">2h 14m</span>
</div>
<div
class="mt-2 grid grid-cols-[1fr_auto] items-center gap-2"
>
<div class="mt-2 grid grid-cols-[1fr_auto] items-center gap-2">
<span
class="h-2 rounded"
style={`background:${theme.preview.primary};`}
@ -249,9 +271,12 @@
</RadioGroup.Item>
{/each}
</RadioGroup.Root>
<Button type="submit" variant="primary">Save theme</Button>
</form>
</section>
</div>
{#snippet footer()}
<Button type="submit" variant="primary" form="profile-theme-form">
Save theme
</Button>
{/snippet}
</SectionCard>
</SettingsShell>

View file

@ -2,7 +2,8 @@
import { Link } from "@inertiajs/svelte";
import type { Snippet } from "svelte";
import { onMount } from "svelte";
import { buildSections, sectionFromHash } from "./types";
import SubsectionNav from "./components/SubsectionNav.svelte";
import { buildSections, buildSubsections, sectionFromHash } from "./types";
import type { SectionPaths, SettingsCommonProps } from "./types";
let {
@ -12,20 +13,20 @@
heading,
subheading,
errors,
admin_tools,
children,
}: SettingsCommonProps & { children?: Snippet } = $props();
const sections = $derived(buildSections(section_paths, admin_tools.visible));
const sections = $derived(buildSections(section_paths));
const subsections = $derived(buildSubsections(active_section));
const knownSectionIds = $derived(
new Set(sections.map((section) => section.id)),
);
const sectionButtonClass = (sectionId: keyof SectionPaths) =>
`block w-full px-4 py-4 text-left transition-colors ${
`group block w-full rounded-xl border px-3 py-3 text-left transition-colors ${
active_section === sectionId
? "bg-surface-100 text-surface-content"
: "bg-surface text-muted hover:bg-surface-100 hover:text-surface-content"
? "border-surface-300 bg-surface-100 text-surface-content shadow-[0_1px_0_rgba(255,255,255,0.02)]"
: "border-transparent bg-transparent text-muted hover:border-surface-200 hover:bg-surface-100/60 hover:text-surface-content"
}`;
onMount(() => {
@ -49,10 +50,12 @@
<title>{page_title}</title>
</svelte:head>
<div class="mx-auto max-w-7xl">
<div data-settings-shell class="mx-auto max-w-7xl">
<header class="mb-8">
<h1 class="text-3xl font-bold text-surface-content">{heading}</h1>
<p class="mt-2 text-sm text-muted">{subheading}</p>
<h1 class="text-3xl font-bold tracking-tight text-surface-content">
{heading}
</h1>
<p class="mt-2 max-w-3xl text-sm leading-6 text-muted">{subheading}</p>
</header>
{#if errors.full_messages.length > 0}
@ -68,22 +71,48 @@
</div>
{/if}
<div class="grid grid-cols-1 gap-6 lg:grid-cols-[260px_minmax(0,1fr)]">
<aside class="h-max lg:sticky lg:top-8">
<nav
data-settings-mobile-nav
class="-mx-5 mb-6 overflow-x-auto px-5 lg:hidden"
>
<div class="flex min-w-full gap-2 pb-1">
{#each sections as section}
<Link
href={section.path}
class={`inline-flex shrink-0 items-center rounded-full border px-3 py-2 text-sm font-medium transition-colors ${
active_section === section.id
? "border-surface-300 bg-surface-100 text-surface-content"
: "border-surface-200 bg-surface/70 text-muted hover:border-surface-300 hover:text-surface-content"
}`}
>
{section.label}
</Link>
{/each}
</div>
</nav>
<div
class="overflow-hidden rounded-xl border border-surface-200 bg-surface divide-y divide-surface-200"
class="grid grid-cols-1 gap-6 lg:grid-cols-[280px_minmax(0,1fr)] lg:gap-8"
>
<aside class="hidden h-max lg:sticky lg:top-8 lg:block">
<div
data-settings-sidebar
class="rounded-2xl border border-surface-200 bg-surface/90 p-2 shadow-[0_1px_0_rgba(255,255,255,0.02)]"
>
{#each sections as section}
<Link href={section.path} class={sectionButtonClass(section.id)}>
<p class="text-sm font-semibold">{section.label}</p>
<p class="mt-1 text-xs opacity-80">{section.blurb}</p>
<p class="mt-1 text-xs leading-5 opacity-80">{section.blurb}</p>
</Link>
{/each}
</div>
</aside>
<section class="rounded-xl border border-surface-200 bg-surface p-5 md:p-6">
<div data-settings-content class="min-w-0 space-y-5">
<SubsectionNav items={subsections} />
<div class="space-y-5">
{@render children?.()}
</section>
</div>
</div>
</div>
</div>

View file

@ -0,0 +1,70 @@
<script lang="ts">
import type { Snippet } from "svelte";
type Tone = "default" | "danger";
let {
id,
title,
description,
tone = "default",
wide = false,
hasBody = true,
footerClass = "flex items-center justify-end gap-3",
children,
footer,
}: {
id?: string;
title: string;
description: string;
tone?: Tone;
wide?: boolean;
hasBody?: boolean;
footerClass?: string;
children?: Snippet;
footer?: Snippet;
} = $props();
const toneClasses = $derived(
tone === "danger"
? "border-danger/35 bg-danger/5"
: "border-surface-200 bg-surface",
);
const contentWidth = $derived(wide ? "" : "max-w-2xl");
const descriptionWidth = $derived(wide ? "max-w-3xl" : "max-w-2xl");
</script>
<section
{id}
data-settings-card
data-settings-card-tone={tone}
class={`scroll-mt-24 overflow-hidden rounded-2xl border ${toneClasses}`}
>
<div class="border-b border-surface-200 px-5 py-4 sm:px-6 sm:py-5">
<div class={descriptionWidth}>
<h2 class="text-xl font-semibold tracking-tight text-surface-content">
{title}
</h2>
<p class="mt-1 text-sm leading-6 text-muted">{description}</p>
</div>
</div>
{#if hasBody}
<div class="px-5 py-4 sm:px-6 sm:py-5">
<div class={contentWidth}>
{@render children?.()}
</div>
</div>
{/if}
{#if footer}
<div
data-settings-footer
class="border-t border-surface-200 bg-surface-100/60 px-5 py-3.5 sm:px-6 sm:py-4"
>
<div class={`${contentWidth} ${footerClass}`}>
{@render footer()}
</div>
</div>
{/if}
</section>

View file

@ -0,0 +1,65 @@
<script lang="ts">
import { onMount } from "svelte";
import type { SettingsSubsection } from "../types";
let { items = [] }: { items?: SettingsSubsection[] } = $props();
let activeHash = $state("");
const normalizedHash = (value: string) => value.replace(/^#/, "");
const updateActiveHash = () => {
if (typeof window === "undefined") return;
activeHash = normalizedHash(window.location.hash);
};
const scrollToItem = (event: MouseEvent, id: string) => {
if (typeof window === "undefined" || typeof document === "undefined")
return;
const target = document.getElementById(id);
if (!target) return;
event.preventDefault();
activeHash = id;
target.scrollIntoView({ behavior: "smooth", block: "start" });
window.history.replaceState(null, "", `#${id}`);
};
const isActive = (id: string) => {
if (!activeHash) return items[0]?.id === id;
return activeHash === id;
};
onMount(() => {
updateActiveHash();
window.addEventListener("hashchange", updateActiveHash);
return () => window.removeEventListener("hashchange", updateActiveHash);
});
</script>
{#if items.length > 0}
<nav
data-settings-subnav
aria-label="Settings subsections"
class="overflow-x-auto pb-1"
>
<div class="flex min-w-full items-center gap-2">
{#each items as item}
<a
href={`#${item.id}`}
data-settings-subnav-item
data-active={isActive(item.id)}
onclick={(event) => scrollToItem(event, item.id)}
class={`inline-flex shrink-0 items-center rounded-full border px-3 py-1.5 text-sm font-medium transition-colors ${
isActive(item.id)
? "border-surface-300 bg-surface-100 text-surface-content"
: "border-surface-200 bg-surface/70 text-muted hover:border-surface-300 hover:text-surface-content"
}`}
>
{item.label}
</a>
{/each}
</div>
</nav>
{/if}

View file

@ -5,11 +5,22 @@ export type SectionId =
| "access"
| "goals"
| "badges"
| "data"
| "admin";
| "data";
export type SectionPaths = Record<SectionId, string>;
export type SettingsSection = {
id: SectionId;
label: string;
blurb: string;
path: string;
};
export type SettingsSubsection = {
id: string;
label: string;
};
export type Option = {
label: string;
value: string;
@ -82,14 +93,10 @@ export type PathsProps = {
add_email_path: string;
unlink_email_path: string;
rotate_api_key_path: string;
migrate_heartbeats_path: string;
export_all_heartbeats_path: string;
export_range_heartbeats_path: string;
create_heartbeat_import_path: string;
create_deletion_path: string;
user_wakatime_mirrors_path: string;
heartbeat_import_source_path: string;
heartbeat_import_source_sync_path: string;
};
export type OptionsProps = {
@ -148,11 +155,6 @@ export type ConfigFileProps = {
api_url: string;
};
export type MigrationProps = {
enabled: boolean;
jobs: { id: string; status: string }[];
};
export type DataExportProps = {
total_heartbeats: string;
total_coding_time: string;
@ -160,29 +162,15 @@ export type DataExportProps = {
is_restricted: boolean;
};
export type AdminToolsProps = {
visible: boolean;
mirrors: {
id: number;
endpoint_url: string;
enabled?: boolean;
last_synced_at?: string | null;
last_synced_ago: string;
consecutive_failures?: number;
last_error_message?: string | null;
last_error_at?: string | null;
destroy_path: string;
}[];
};
export type UiProps = {
show_dev_import: boolean;
show_imports_and_mirrors: boolean;
show_imports: boolean;
};
export type HeartbeatImportStatusProps = {
import_id: string;
state: string;
source_kind: string;
progress_percent: number;
processed_count: number;
total_count: number | null;
@ -190,31 +178,14 @@ export type HeartbeatImportStatusProps = {
skipped_count: number | null;
errors_count: number;
message: string;
error_message?: string | null;
remote_dump_status?: string | null;
remote_percent_complete?: number | null;
cooldown_until?: string | null;
source_filename?: string | null;
updated_at: string;
started_at?: string;
finished_at?: string;
};
export type HeartbeatImportProps = {
import_id?: string | null;
status?: HeartbeatImportStatusProps | null;
};
export type HeartbeatImportSourceProps = {
id: number;
provider: string;
endpoint_url: string;
sync_enabled: boolean;
status: string;
initial_backfill_start_date?: string | null;
initial_backfill_end_date?: string | null;
backfill_cursor_date?: string | null;
last_synced_at?: string | null;
last_synced_ago?: string | null;
last_error_message?: string | null;
last_error_at?: string | null;
consecutive_failures: number;
imported_count: number;
started_at?: string | null;
finished_at?: string | null;
};
export type ErrorsProps = {
@ -229,7 +200,6 @@ export type SettingsCommonProps = {
heading: string;
subheading: string;
errors: ErrorsProps;
admin_tools: AdminToolsProps;
};
export type ProfilePageProps = SettingsCommonProps & {
@ -278,83 +248,108 @@ export type BadgesPageProps = SettingsCommonProps & {
export type DataPageProps = SettingsCommonProps & {
user: UserProps;
paths: PathsProps;
migration: MigrationProps;
data_export: DataExportProps;
import_source?: HeartbeatImportSourceProps | null;
mirrors: AdminToolsProps["mirrors"];
imports_enabled: boolean;
remote_import_cooldown_until?: string | null;
latest_heartbeat_import?: HeartbeatImportStatusProps | null;
ui: UiProps;
heartbeat_import: HeartbeatImportProps;
};
export type AdminPageProps = SettingsCommonProps & {
admin_tools: AdminToolsProps;
paths: PathsProps;
};
export const buildSections = (sectionPaths: SectionPaths, adminVisible: boolean) => {
const sections = [
export const buildSections = (
sectionPaths: SectionPaths,
): SettingsSection[] => [
{
id: "profile" as SectionId,
id: "profile",
label: "Profile",
blurb: "Username, region, timezone, and privacy.",
path: sectionPaths.profile,
},
{
id: "integrations" as SectionId,
id: "integrations",
label: "Integrations",
blurb: "Slack status, GitHub link, and email sign-in addresses.",
path: sectionPaths.integrations,
},
{
id: "notifications" as SectionId,
id: "notifications",
label: "Notifications",
blurb: "Email notifications and weekly summary preferences.",
path: sectionPaths.notifications,
},
{
id: "access" as SectionId,
id: "access",
label: "Access",
blurb: "Time tracking setup, extension options, and API key access.",
path: sectionPaths.access,
},
{
id: "goals" as SectionId,
id: "goals",
label: "Goals",
blurb: "Set daily, weekly, or monthly programming targets.",
path: sectionPaths.goals,
},
{
id: "badges" as SectionId,
id: "badges",
label: "Badges",
blurb: "Shareable badges and profile snippets.",
path: sectionPaths.badges,
},
{
id: "data" as SectionId,
id: "data",
label: "Data",
blurb: "Exports, imports, mirrors, migration jobs, and deletion controls.",
blurb: "Exports, imports, and deletion controls.",
path: sectionPaths.data,
},
];
if (adminVisible) {
sections.push({
id: "admin",
label: "Admin",
blurb: "Administrative controls.",
path: sectionPaths.admin,
});
}
return sections;
const subsectionMap: Record<SectionId, SettingsSubsection[]> = {
profile: [
{ id: "user_region", label: "Region" },
{ id: "user_username", label: "Username" },
{ id: "user_privacy", label: "Privacy" },
{ id: "user_theme", label: "Theme" },
],
integrations: [
{ id: "user_slack_status", label: "Slack status" },
{ id: "user_slack_notifications", label: "Slack channels" },
{ id: "user_github_account", label: "GitHub" },
{ id: "user_email_addresses", label: "Email addresses" },
],
notifications: [
{ id: "user_email_notifications", label: "Email notifications" },
],
access: [
{ id: "user_tracking_setup", label: "Setup" },
{ id: "user_hackatime_extension", label: "Extension display" },
{ id: "user_api_key", label: "API key" },
{ id: "user_config_file", label: "Config file" },
],
goals: [
{ id: "user_programming_goals", label: "Programming goals" },
],
badges: [
{ id: "user_stats_badges", label: "Stats badges" },
{ id: "user_markscribe", label: "Markscribe" },
{ id: "user_heatmap", label: "Heatmap" },
],
data: [
{ id: "user_imports", label: "Imports" },
{ id: "download_user_data", label: "Download data" },
{ id: "delete_account", label: "Account deletion" },
],
};
export const buildSubsections = (
activeSection: SectionId,
): SettingsSubsection[] => subsectionMap[activeSection] || [];
const hashSectionMap: Record<string, SectionId> = {
user_region: "profile",
user_timezone: "profile",
user_username: "profile",
user_privacy: "profile",
user_theme: "profile",
user_tracking_setup: "access",
user_hackatime_extension: "access",
user_api_key: "access",
user_config_file: "access",
@ -368,11 +363,9 @@ const hashSectionMap: Record<string, SectionId> = {
user_stats_badges: "badges",
user_markscribe: "badges",
user_heatmap: "badges",
user_migration_assistant: "data",
wakatime_import_source: "data",
user_imports: "data",
download_user_data: "data",
delete_account: "data",
wakatime_mirror: "data",
};
export const sectionFromHash = (hash: string): SectionId | null => {

View file

@ -0,0 +1,122 @@
class HeartbeatImportDumpJob < ApplicationJob
queue_as :latency_10s
include GoodJob::ActiveJobExtensions::Concurrency
POLL_INTERVAL = 30.seconds
retry_on HeartbeatImportDumpClient::TransientError,
wait: ->(executions) { (executions**2).seconds + rand(1..4).seconds },
attempts: 8
good_job_control_concurrency_with(
key: -> { "heartbeat_import_dump_job_#{arguments.first}" },
total_limit: 1
)
def perform(import_run_id)
run = HeartbeatImportRun.includes(:user).find_by(id: import_run_id)
return unless run&.remote?
return if run.terminal?
unless Flipper.enabled?(:imports, run.user)
fail_run!(run, "Imports are no longer enabled for this user.")
return
end
client = HeartbeatImportDumpClient.new(source_kind: run.source_kind, api_key: run.encrypted_api_key)
if run.remote_dump_id.blank?
request_dump!(run, client)
return
end
sync_dump!(run, client)
rescue HeartbeatImportDumpClient::AuthenticationError => e
fail_run!(run, e.message)
rescue HeartbeatImportDumpClient::RequestError => e
fail_run!(run, e.message)
end
private
def request_dump!(run, client)
run.update!(
state: :requesting_dump,
started_at: run.started_at || Time.current,
message: "Requesting data dump...",
error_message: nil
)
dump = client.request_dump
run.update!(
state: :waiting_for_dump,
remote_dump_id: dump[:id],
remote_dump_status: dump[:status],
remote_percent_complete: dump[:percent_complete],
remote_requested_at: Time.current,
message: waiting_message(dump),
error_message: nil
)
enqueue_poll(run)
end
def sync_dump!(run, client)
dump = client.list_dumps.find { |item| item[:id] == run.remote_dump_id.to_s }
raise HeartbeatImportDumpClient::RequestError, "Data dump #{run.remote_dump_id} was not found." if dump.blank?
if dump[:has_failed] || dump[:is_stuck]
fail_run!(run, "The remote provider could not finish preparing the data dump.")
return
end
if dump[:is_processing] || dump[:download_url].blank?
run.update!(
state: :waiting_for_dump,
remote_dump_status: dump[:status],
remote_percent_complete: dump[:percent_complete],
message: waiting_message(dump),
error_message: nil
)
enqueue_poll(run)
return
end
run.update!(
state: :downloading_dump,
remote_dump_status: dump[:status],
remote_percent_complete: dump[:percent_complete].positive? ? dump[:percent_complete] : 100.0,
message: "Downloading data dump...",
error_message: nil
)
file_content = client.download_dump(dump[:download_url])
file_path = HeartbeatImportRunner.persist_remote_download(run:, file_content:)
HeartbeatImportJob.perform_later(run.id, file_path)
end
def enqueue_poll(run)
self.class.set(wait: POLL_INTERVAL).perform_later(run.id)
end
def waiting_message(dump)
status = dump[:status].presence || "Preparing data dump"
percent = dump[:percent_complete].to_f
return "#{status}..." unless percent.positive?
"#{status} (#{percent.round}%)"
end
def fail_run!(run, message)
run.update!(
state: :failed,
remote_dump_status: run.remote_dump_status.presence || "failed",
message: "Import failed: #{message}",
error_message: message,
finished_at: Time.current
)
run.clear_sensitive_fields!
end
end

View file

@ -1,7 +1,7 @@
class HeartbeatImportJob < ApplicationJob
queue_as :default
def perform(user_id, import_id, file_path)
HeartbeatImportRunner.run_import(user_id: user_id, import_id: import_id, file_path: file_path)
def perform(import_run_id, file_path)
HeartbeatImportRunner.run_import(import_run_id:, file_path:)
end
end

View file

@ -1,11 +0,0 @@
class HeartbeatImportSourceSchedulerJob < ApplicationJob
queue_as :latency_5m
def perform
return unless Flipper.enabled?(:wakatime_imports_mirrors)
HeartbeatImportSource.where(sync_enabled: true).where.not(status: :paused).pluck(:id).each do |source_id|
HeartbeatImportSourceSyncJob.perform_later(source_id)
end
end
end

View file

@ -1,149 +0,0 @@
class HeartbeatImportSourceSyncDayJob < ApplicationJob
queue_as :latency_5m
include GoodJob::ActiveJobExtensions::Concurrency
retry_on WakatimeCompatibleClient::TransientError,
wait: ->(executions) { (executions**2).seconds + rand(1..4).seconds },
attempts: 8
good_job_control_concurrency_with(
key: -> { "heartbeat_import_source_sync_day_job_#{arguments.first}_#{arguments.second}" },
total_limit: 1
)
def perform(source_id, date_string)
return unless Flipper.enabled?(:wakatime_imports_mirrors)
source = HeartbeatImportSource.find_by(id: source_id)
return unless source&.sync_enabled?
date = Date.iso8601(date_string)
rows = source.client.fetch_heartbeats(date:)
upsert_heartbeats(source.user_id, rows)
source.update!(
last_synced_at: Time.current,
last_error_message: nil,
last_error_at: nil,
consecutive_failures: 0
)
rescue WakatimeCompatibleClient::AuthenticationError => e
source&.update!(
sync_enabled: false,
status: :paused,
last_error_message: e.message.to_s.truncate(500),
last_error_at: Time.current,
consecutive_failures: source.consecutive_failures.to_i + 1
)
rescue WakatimeCompatibleClient::TransientError => e
source&.update!(
status: source&.backfilling? ? :backfilling : :failed,
last_error_message: e.message.to_s.truncate(500),
last_error_at: Time.current,
consecutive_failures: source.consecutive_failures.to_i + 1
)
raise
rescue WakatimeCompatibleClient::RequestError => e
source&.update!(
status: :failed,
last_error_message: e.message.to_s.truncate(500),
last_error_at: Time.current,
consecutive_failures: source.consecutive_failures.to_i + 1
)
rescue ArgumentError => e
source&.update!(
status: :failed,
last_error_message: e.message.to_s.truncate(500),
last_error_at: Time.current,
consecutive_failures: source.consecutive_failures.to_i + 1
)
end
private
def upsert_heartbeats(user_id, rows)
normalized = rows.filter_map { |row| normalize_row(user_id, row) }
return if normalized.empty?
deduped_records = normalized.group_by { |record| record[:fields_hash] }.map do |_, records|
records.max_by { |record| record[:time].to_f }
end
Heartbeat.upsert_all(deduped_records, unique_by: [ :fields_hash ])
end
def normalize_row(user_id, row)
data = row.respond_to?(:with_indifferent_access) ? row.with_indifferent_access : row.to_h.with_indifferent_access
timestamp = extract_timestamp(data)
return nil if timestamp.blank?
attrs = {
user_id: user_id,
branch: value_or_nil(data[:branch]),
category: value_or_nil(data[:category]) || "coding",
dependencies: extract_dependencies(data[:dependencies]),
editor: value_or_nil(data[:editor]),
entity: value_or_nil(data[:entity]),
language: value_or_nil(data[:language]),
machine: value_or_nil(data[:machine]),
operating_system: value_or_nil(data[:operating_system]),
project: value_or_nil(data[:project]),
type: value_or_nil(data[:type]),
user_agent: value_or_nil(data[:user_agent]),
line_additions: data[:line_additions],
line_deletions: data[:line_deletions],
lineno: data[:lineno],
lines: data[:lines],
cursorpos: data[:cursorpos],
project_root_count: data[:project_root_count],
time: timestamp,
is_write: ActiveModel::Type::Boolean.new.cast(data[:is_write]),
source_type: :wakapi_import
}
now = Time.current
attrs[:created_at] = now
attrs[:updated_at] = now
attrs[:fields_hash] = Heartbeat.generate_fields_hash(attrs)
attrs
rescue TypeError, JSON::ParserError
nil
end
def extract_dependencies(value)
return value if value.is_a?(Array)
return [] if value.blank?
JSON.parse(value.to_s)
rescue JSON::ParserError
value.to_s.split(",").map(&:strip).reject(&:blank?)
end
def extract_timestamp(data)
value = data[:time]
value = data[:created_at] if value.blank?
return nil if value.blank?
if value.is_a?(Numeric)
normalized = value.to_f
return (normalized / 1000.0) if normalized > 1_000_000_000_000
return normalized
end
parsed = Time.parse(value.to_s).to_f
return parsed if parsed.positive?
nil
rescue ArgumentError
nil
end
def value_or_nil(value)
return nil if value.nil?
return value.strip.presence if value.is_a?(String)
value
end
end

View file

@ -1,131 +0,0 @@
class HeartbeatImportSourceSyncJob < ApplicationJob
queue_as :latency_5m
include GoodJob::ActiveJobExtensions::Concurrency
BACKFILL_WINDOW_DAYS = 5
retry_on WakatimeCompatibleClient::TransientError,
wait: ->(executions) { (executions**2).seconds + rand(1..4).seconds },
attempts: 8
good_job_control_concurrency_with(
key: -> { "heartbeat_import_source_sync_job_#{arguments.first}" },
total_limit: 1
)
def perform(source_id)
return unless Flipper.enabled?(:wakatime_imports_mirrors)
source = HeartbeatImportSource.find_by(id: source_id)
return unless source&.sync_enabled?
return if source.paused?
initialize_backfill_if_needed(source)
if source.backfilling?
schedule_backfill_window(source)
return
end
source.update!(status: :syncing)
enqueue_day_sync(source, Date.yesterday)
enqueue_day_sync(source, Date.current)
rescue WakatimeCompatibleClient::AuthenticationError => e
source&.update!(
sync_enabled: false,
status: :paused,
last_error_message: e.message.to_s.truncate(500),
last_error_at: Time.current,
consecutive_failures: source.consecutive_failures.to_i + 1
)
rescue WakatimeCompatibleClient::TransientError => e
source&.update!(
status: source&.backfilling? ? :backfilling : :failed,
last_error_message: e.message.to_s.truncate(500),
last_error_at: Time.current,
consecutive_failures: source.consecutive_failures.to_i + 1
)
raise
rescue WakatimeCompatibleClient::RequestError => e
source&.update!(
status: :failed,
last_error_message: e.message.to_s.truncate(500),
last_error_at: Time.current,
consecutive_failures: source.consecutive_failures.to_i + 1
)
end
private
def initialize_backfill_if_needed(source)
should_initialize = source.idle? ||
(source.failed? && source.backfill_cursor_date.blank? && source.last_synced_at.blank?)
return unless should_initialize
return unless source.backfill_cursor_date.blank?
start_date = source.initial_backfill_start_date
end_date = source.initial_backfill_end_date || Date.current
if start_date.blank?
begin
start_date = source.client.fetch_all_time_since_today_start_date
rescue => e
Rails.logger.error("Failed to fetch all_time_since_today for source #{source.id}: #{e.message}")
source.update!(status: :failed, last_error_message: e.message, last_error_at: Time.current)
return
end
end
if start_date > end_date
source.update!(
status: :syncing,
backfill_cursor_date: nil,
initial_backfill_start_date: start_date,
initial_backfill_end_date: end_date
)
return
end
source.update!(
status: :backfilling,
initial_backfill_start_date: start_date,
initial_backfill_end_date: end_date,
backfill_cursor_date: start_date,
last_error_message: nil,
last_error_at: nil,
consecutive_failures: 0
)
end
def schedule_backfill_window(source)
cursor = source.backfill_cursor_date
end_date = source.initial_backfill_end_date || Date.current
return if cursor.blank?
if cursor > end_date
source.update!(status: :syncing, backfill_cursor_date: nil)
self.class.perform_later(source.id)
return
end
window_end = [ cursor + (BACKFILL_WINDOW_DAYS - 1).days, end_date ].min
(cursor..window_end).each do |date|
enqueue_day_sync(source, date)
end
next_cursor = window_end + 1.day
if next_cursor > end_date
source.update!(status: :syncing, backfill_cursor_date: nil)
self.class.perform_later(source.id)
else
source.update!(status: :backfilling, backfill_cursor_date: next_cursor)
self.class.perform_later(source.id)
end
end
def enqueue_day_sync(source, date)
HeartbeatImportSourceSyncDayJob.perform_later(source.id, date.iso8601)
end
end

View file

@ -1,108 +0,0 @@
class MigrateUserFromHackatimeJob < ApplicationJob
queue_as :latency_5m
include GoodJob::ActiveJobExtensions::Concurrency
# only allow one instance of this job to run at a time
good_job_control_concurrency_with(
key: -> { "migrate_user_from_hackatime_job_#{arguments.first}" },
total_limit: 1,
)
def perform(user_id)
return unless Flipper.enabled?(:hackatime_v1_import)
@user = User.find(user_id)
# Import from Hackatime
return unless @user.slack_uid.present?
return unless Hackatime::User.exists?(id: @user.slack_uid)
import_api_keys
import_heartbeats
# We don't want to trigger notifications due to extra time from importing
# heartbeats, so reset
reset_sailors_log
end
private
def reset_sailors_log
return unless @user.sailors_log.present?
@user.sailors_log.update!(projects_summary: {})
@user.sailors_log.send(:initialize_projects_summary)
end
def import_heartbeats
# create Heartbeat records for each Hackatime::Heartbeat in batches of 1000 as upsert
Hackatime::Heartbeat.where(user_id: @user.slack_uid).find_in_batches do |batch|
records_to_upsert = batch.map do |heartbeat|
# Convert dependencies to proper array format or default to empty array
dependencies = if heartbeat.dependencies.is_a?(String)
# If it's a string, try to parse it as JSON, fallback to empty array
begin
JSON.parse(heartbeat.dependencies)
rescue JSON::ParserError
# If it can't be parsed as JSON, split by comma and clean up
heartbeat.dependencies.gsub(/[{}]/, "").split(",").map(&:strip)
end
else
heartbeat.dependencies.presence || []
end
attrs = {
user_id: @user.id,
time: heartbeat.time,
project: heartbeat.project,
branch: heartbeat.branch,
category: heartbeat.category,
dependencies: dependencies,
editor: heartbeat.editor,
entity: heartbeat.entity,
language: heartbeat.language,
machine: heartbeat.machine,
operating_system: heartbeat.operating_system,
type: heartbeat.type,
user_agent: heartbeat.user_agent,
line_additions: heartbeat.line_additions,
line_deletions: heartbeat.line_deletions,
lineno: heartbeat.line_number,
lines: heartbeat.lines,
cursorpos: heartbeat.cursor_position,
project_root_count: heartbeat.project_root_count,
is_write: heartbeat.is_write,
source_type: :wakapi_import
# raw_data: heartbeat.attributes.slice(*Heartbeat.indexed_attributes)
}
{
**attrs,
fields_hash: Heartbeat.generate_fields_hash(attrs)
}
end
# dedupe records by fields_hash
records_to_upsert = records_to_upsert.group_by { |r| r[:fields_hash] }.map do |_, records|
records.max_by { |r| r[:time] }
end
Heartbeat.upsert_all(records_to_upsert, unique_by: [ :fields_hash ])
end
end
def import_api_keys
puts "Importing API keys"
hackatime_user = Hackatime::User.find(@user.slack_uid)
return if hackatime_user.nil?
ApiKey.upsert(
{
user_id: @user.id,
name: "Imported from Hackatime",
token: hackatime_user.api_key
},
unique_by: [ :user_id, :name ]
)
end
end

View file

@ -1,24 +0,0 @@
class MirrorFanoutEnqueueJob < ApplicationJob
queue_as :latency_10s
DEBOUNCE_TTL = 10.seconds
def perform(user_id)
return unless Flipper.enabled?(:wakatime_imports_mirrors)
return if debounced?(user_id)
User.find_by(id: user_id)&.wakatime_mirrors&.active&.pluck(:id)&.each do |mirror_id|
WakatimeMirrorSyncJob.perform_later(mirror_id)
end
end
private
def debounced?(user_id)
key = "mirror_fanout_enqueue_job:user:#{user_id}"
return true if Rails.cache.read(key)
Rails.cache.write(key, true, expires_in: DEBOUNCE_TTL)
false
end
end

View file

@ -30,8 +30,7 @@ class SailorsLogPollForChangesJob < ApplicationJob
private
def update_sailors_log(sailors_log)
# Skip if there's an active migration job for this user
return [] if sailors_log.user.in_progress_migration_jobs?
return [] if sailors_log.user.active_remote_heartbeat_import_run?
project_updates = []
project_durations = Heartbeat.where(user_id: sailors_log.user.id)

View file

@ -1,86 +0,0 @@
class WakatimeMirrorSyncJob < ApplicationJob
queue_as :latency_10s
include GoodJob::ActiveJobExtensions::Concurrency
BATCH_SIZE = 25
MAX_BATCHES_PER_RUN = 20
class MirrorTransientError < StandardError; end
retry_on MirrorTransientError,
wait: ->(executions) { (executions**2).seconds + rand(1..4).seconds },
attempts: 8
retry_on HTTP::TimeoutError, HTTP::ConnectionError,
wait: ->(executions) { (executions**2).seconds + rand(1..4).seconds },
attempts: 8
good_job_control_concurrency_with(
key: -> { "wakatime_mirror_sync_job_#{arguments.first}" },
total_limit: 1
)
def perform(mirror_id)
return unless Flipper.enabled?(:wakatime_imports_mirrors)
mirror = WakatimeMirror.find_by(id: mirror_id)
return unless mirror&.enabled?
batches_processed = 0
cursor = mirror.last_synced_heartbeat_id.to_i
loop do
batch = mirror.direct_heartbeats_after(cursor).limit(BATCH_SIZE).to_a
break if batch.empty?
response = mirror.post_heartbeats(batch.map { |heartbeat| mirror_payload(heartbeat) })
status_code = response.status.to_i
if response.status.success?
cursor = batch.last.id
mirror.update!(
last_synced_heartbeat_id: cursor,
last_synced_at: Time.current,
consecutive_failures: 0,
last_error_message: nil,
last_error_at: nil
)
elsif [ 401, 403 ].include?(status_code)
mirror.mark_auth_failed!("Authentication failed (#{status_code}). Check your API key.")
return
elsif transient_status?(status_code)
mirror.record_transient_failure!("Mirror request failed with status #{status_code}.")
raise MirrorTransientError, "Mirror request failed with status #{status_code}"
else
mirror.mark_failed!("Mirror request failed with status #{status_code}.")
return
end
batches_processed += 1
break if batches_processed >= MAX_BATCHES_PER_RUN
end
if batches_processed >= MAX_BATCHES_PER_RUN &&
mirror.direct_heartbeats_after(cursor).exists?
self.class.perform_later(mirror.id)
end
rescue HTTP::TimeoutError, HTTP::ConnectionError => e
mirror&.record_transient_failure!("Mirror request failed: #{e.class.name}")
raise
end
private
def mirror_payload(heartbeat)
heartbeat.attributes.slice(*payload_attributes)
end
def payload_attributes
@payload_attributes ||= Heartbeat.indexed_attributes - [ "user_id" ]
end
def transient_status?(status_code)
status_code == 408 || status_code == 429 || status_code >= 500
end
end

View file

@ -1,17 +0,0 @@
class Hackatime::Heartbeat < HackatimeRecord
include Heartbeatable
def self.cached_recent_count
Rails.cache.fetch("heartbeats_recent_count", expires_in: 5.minutes) do
recent.size
end
end
scope :recent, -> { where("time > ?", 24.hours.ago) }
scope :today, -> { where("DATE(time) = ?", Date.current) }
# This is a hack to avoid using the default Rails inheritance column Rails is confused by the field `type` in the db
self.inheritance_column = nil
# Prevent collision with Ruby's hash method
self.ignored_columns += [ "hash" ]
end

View file

@ -1,14 +0,0 @@
class Hackatime::ProjectLabel < HackatimeRecord
self.table_name = "project_labels"
has_many :heartbeats,
->(project) { where(user_id: project.user_id) },
foreign_key: :project,
primary_key: :project_key,
class_name: "Hackatime::Heartbeat"
belongs_to :user,
foreign_key: :user_id,
primary_key: :slack_uid,
class_name: "User"
end

View file

@ -1,3 +0,0 @@
class Hackatime::User < HackatimeRecord
self.table_name = "users"
end

View file

@ -1,9 +0,0 @@
class HackatimeRecord < ApplicationRecord
self.abstract_class = true
begin
connects_to database: { reading: :wakatime, writing: :wakatime }
rescue StandardError => e
Rails.logger.warn "HackatimeRecord: Could not connect to wakatime database: #{e.message}"
end
end

View file

@ -0,0 +1,88 @@
class HeartbeatImportRun < ApplicationRecord
COOLDOWN = 4.hours
TERMINAL_STATES = %w[completed failed].freeze
ACTIVE_STATES = %w[queued requesting_dump waiting_for_dump downloading_dump importing].freeze
REMOTE_SOURCE_KINDS = %w[wakatime_dump hackatime_v1_dump].freeze
belongs_to :user
encrypts :encrypted_api_key, deterministic: false
enum :source_kind, {
dev_upload: 0,
wakatime_dump: 1,
hackatime_v1_dump: 2
}
enum :state, {
queued: 0,
requesting_dump: 1,
waiting_for_dump: 2,
downloading_dump: 3,
importing: 4,
completed: 5,
failed: 6
}
validates :encrypted_api_key, presence: true, on: :create, unless: :dev_upload?
scope :latest_first, -> { order(created_at: :desc) }
scope :active_imports, -> { where(state: states.values_at(*ACTIVE_STATES)) }
scope :remote_imports, -> { where(source_kind: source_kinds.values_at(*REMOTE_SOURCE_KINDS)) }
def remote?
!dev_upload?
end
def terminal?
TERMINAL_STATES.include?(state)
end
def active_import?
ACTIVE_STATES.include?(state)
end
def cooldown_until
return nil if remote_requested_at.blank?
remote_requested_at + COOLDOWN
end
def progress_percent
return 100 if completed?
if waiting_for_dump? || downloading_dump? || requesting_dump?
return remote_percent_complete.to_f.clamp(0, 100).round
end
return 0 unless total_count.to_i.positive?
((processed_count.to_f / total_count.to_f) * 100).clamp(0, 100).round
end
def clear_sensitive_fields!
update_columns(encrypted_api_key: nil, updated_at: Time.current)
end
def self.active_for(user)
where(user: user).active_imports.latest_first.first
end
def self.latest_for(user)
where(user: user).latest_first.first
end
def self.remote_cooldown_until_for(user)
latest_remote_request = where(user: user)
.remote_imports
.where.not(remote_requested_at: nil)
.order(remote_requested_at: :desc)
.pick(:remote_requested_at)
return nil if latest_remote_request.blank?
retry_at = latest_remote_request + COOLDOWN
retry_at.future? ? retry_at : nil
end
end

View file

@ -1,77 +0,0 @@
class HeartbeatImportSource < ApplicationRecord
require "uri"
belongs_to :user
encrypts :encrypted_api_key, deterministic: false
enum :provider, {
wakatime_compatible: 0
}
enum :status, {
idle: 0,
backfilling: 1,
syncing: 2,
paused: 3,
failed: 4
}
validates :provider, presence: true
validates :endpoint_url, presence: true
validates :encrypted_api_key, presence: true
validates :user_id, uniqueness: true
validate :validate_endpoint_url
validate :validate_backfill_range
before_validation :normalize_endpoint_url
def client
WakatimeCompatibleClient.new(endpoint_url:, api_key: encrypted_api_key)
end
def reset_backfill!
update!(
status: :idle,
backfill_cursor_date: nil,
last_synced_at: nil,
last_error_message: nil,
last_error_at: nil,
consecutive_failures: 0
)
end
private
def normalize_endpoint_url
self.endpoint_url = endpoint_url.to_s.strip.sub(%r{/*\z}, "")
end
def validate_backfill_range
return unless initial_backfill_start_date.present? && initial_backfill_end_date.present?
return unless initial_backfill_start_date > initial_backfill_end_date
errors.add(:initial_backfill_end_date, "must be on or after the start date")
end
def validate_endpoint_url
return if endpoint_url.blank?
uri = URI.parse(endpoint_url)
unless uri.is_a?(URI::HTTP) || uri.is_a?(URI::HTTPS)
errors.add(:endpoint_url, "must be an HTTP or HTTPS URL")
return
end
if uri.host.blank?
errors.add(:endpoint_url, "must include a host")
return
end
if !Rails.env.development? && uri.scheme != "https"
errors.add(:endpoint_url, "must use https")
end
rescue URI::InvalidURIError
errors.add(:endpoint_url, "is invalid")
end
end

View file

@ -115,17 +115,6 @@ class User < ApplicationRecord
has_many :sign_in_tokens, dependent: :destroy
has_many :project_repo_mappings
has_many :hackatime_heartbeats,
foreign_key: :user_id,
primary_key: :slack_uid,
class_name: "Hackatime::Heartbeat"
has_many :project_labels,
foreign_key: :user_id,
primary_key: :slack_uid,
class_name: "Hackatime::ProjectLabel"
has_many :api_keys
has_many :admin_api_keys, dependent: :destroy
has_many :oauth_applications, as: :owner, dependent: :destroy
@ -135,8 +124,7 @@ class User < ApplicationRecord
primary_key: :slack_uid,
class_name: "SailorsLog"
has_many :wakatime_mirrors, dependent: :destroy
has_one :heartbeat_import_source, dependent: :destroy
has_many :heartbeat_import_runs, dependent: :destroy
scope :search_identity, ->(term) {
term = term.to_s.strip.downcase
@ -225,19 +213,12 @@ class User < ApplicationRecord
after_save :invalidate_activity_graph_cache, if: :saved_change_to_timezone?
def data_migration_jobs
GoodJob::Job.where(
"serialized_params->>'arguments' = ?", [ id ].to_json
).where(
"job_class = ?", "MigrateUserFromHackatimeJob"
).order(created_at: :desc).limit(10).all
def flipper_id
"User;#{id}"
end
def in_progress_migration_jobs?
GoodJob::Job.where(job_class: "MigrateUserFromHackatimeJob")
.where("serialized_params->>'arguments' = ?", [ id ].to_json)
.where(finished_at: nil)
.exists?
def active_remote_heartbeat_import_run?
heartbeat_import_runs.remote_imports.active_imports.exists?
end
def format_extension_text(duration)

View file

@ -1,111 +0,0 @@
class WakatimeMirror < ApplicationRecord
require "uri"
belongs_to :user
encrypts :encrypted_api_key, deterministic: false
attr_accessor :request_host
validates :endpoint_url, presence: true
validates :encrypted_api_key, presence: true
validates :endpoint_url, uniqueness: { scope: :user_id }
validate :validate_endpoint_url
before_validation :normalize_endpoint_url
before_create :initialize_last_synced_heartbeat_id
scope :active, -> { where(enabled: true) }
def direct_heartbeats_after(heartbeat_id)
user.heartbeats.where(source_type: :direct_entry).where("id > ?", heartbeat_id.to_i).order(id: :asc)
end
def post_heartbeats(payload)
HTTP.timeout(connect: 5, read: 30, write: 10)
.headers(
"Authorization" => "Basic #{Base64.strict_encode64(encrypted_api_key)}",
"Content-Type" => "application/json"
)
.post("#{endpoint_url}/users/current/heartbeats.bulk", json: payload)
end
def clear_error_state!
update!(
last_error_message: nil,
last_error_at: nil,
consecutive_failures: 0
)
end
def record_transient_failure!(message)
update!(
status_payload_for_failure(message, keep_enabled: true)
)
end
def mark_auth_failed!(message)
update!(
status_payload_for_failure(message, keep_enabled: false)
)
end
def mark_failed!(message)
update!(
status_payload_for_failure(message, keep_enabled: enabled)
)
end
private
def initialize_last_synced_heartbeat_id
self.last_synced_heartbeat_id ||= user.heartbeats.maximum(:id)
end
def normalize_endpoint_url
self.endpoint_url = endpoint_url.to_s.strip.sub(%r{/*\z}, "")
end
def validate_endpoint_url
return unless endpoint_url.present?
uri = URI.parse(endpoint_url)
unless uri.is_a?(URI::HTTP) || uri.is_a?(URI::HTTPS)
errors.add(:endpoint_url, "must be an HTTP or HTTPS URL")
return
end
if uri.host.blank?
errors.add(:endpoint_url, "must include a host")
return
end
if !Rails.env.development? && uri.scheme != "https"
errors.add(:endpoint_url, "must use https")
return
end
if disallowed_hosts.include?(uri.host.downcase)
errors.add(:endpoint_url, "cannot target this Hackatime host")
end
rescue URI::InvalidURIError
errors.add(:endpoint_url, "is invalid")
end
def disallowed_hosts
hosts = %w[hackatime.hackclub.com www.hackatime.hackclub.com localhost 127.0.0.1]
hosts << request_host.to_s.downcase if request_host.present?
default_host = Rails.application.config.action_mailer.default_url_options&.dig(:host)
hosts << default_host.to_s.downcase if default_host.present?
hosts.uniq
end
def status_payload_for_failure(message, keep_enabled:)
{
enabled: keep_enabled,
last_error_message: message.to_s.truncate(500),
last_error_at: Time.current,
consecutive_failures: consecutive_failures.to_i + 1
}
end
end

View file

@ -64,7 +64,7 @@ class AnonymizeUserService
user.admin_api_keys.destroy_all
user.sign_in_tokens.destroy_all
user.email_verification_requests.destroy_all
user.wakatime_mirrors.destroy_all
user.heartbeat_import_runs.destroy_all
user.project_repo_mappings.destroy_all
user.goals.destroy_all

View file

@ -0,0 +1,116 @@
class HeartbeatImportDumpClient
class AuthenticationError < StandardError; end
class TransientError < StandardError; end
class RequestError < StandardError; end
BASE_URLS = {
"wakatime_dump" => "https://wakatime.com/api/v1",
"hackatime_v1_dump" => "https://waka.hackclub.com/api/v1"
}.freeze
TIMEOUTS = {
connect: 5,
read: 60,
write: 15
}.freeze
def initialize(source_kind:, api_key:)
@source_kind = source_kind.to_s
@endpoint_url = BASE_URLS.fetch(@source_kind)
@api_key = api_key.to_s
end
def request_dump
body = request_json(
method: :post,
path: "/users/current/data_dumps",
json: { type: "heartbeats", email_when_finished: false }
)
normalize_dump(body.fetch("data"))
end
def list_dumps
body = request_json(method: :get, path: "/users/current/data_dumps")
Array(body["data"]).map { |dump| normalize_dump(dump) }
end
def download_dump(download_url)
response = HTTP.follow.timeout(TIMEOUTS).headers(download_headers(download_url)).get(download_url)
handle_response_errors(response)
response.to_s
rescue HTTP::TimeoutError, HTTP::ConnectionError => e
raise TransientError, e.message
end
def self.base_url_for(source_kind)
BASE_URLS.fetch(source_kind.to_s)
end
private
def request_json(method:, path:, json: nil)
request = HTTP.timeout(TIMEOUTS).headers(headers)
response = if json.nil?
request.public_send(method, "#{@endpoint_url}#{path}")
else
request.public_send(method, "#{@endpoint_url}#{path}", json:)
end
handle_response_errors(response)
JSON.parse(response.to_s)
rescue HTTP::TimeoutError, HTTP::ConnectionError => e
raise TransientError, e.message
rescue JSON::ParserError
raise RequestError, "Invalid JSON response"
end
def handle_response_errors(response)
status = response.status.to_i
raise AuthenticationError, "Authentication failed (#{status})" if [ 401, 403 ].include?(status)
raise TransientError, "Request failed with status #{status}" if status == 408 || status == 429 || status >= 500
raise RequestError, "Request failed with status #{status}" unless response.status.success?
end
def normalize_dump(dump)
payload = dump.respond_to?(:with_indifferent_access) ? dump.with_indifferent_access : dump.to_h.with_indifferent_access
{
id: payload[:id].to_s,
status: payload[:status].to_s,
percent_complete: payload[:percent_complete].to_f,
download_url: payload[:download_url].presence,
type: payload[:type].to_s,
is_processing: ActiveModel::Type::Boolean.new.cast(payload[:is_processing]),
is_stuck: ActiveModel::Type::Boolean.new.cast(payload[:is_stuck]),
has_failed: ActiveModel::Type::Boolean.new.cast(payload[:has_failed]),
expires: payload[:expires],
created_at: payload[:created_at]
}
end
def headers
{
"Authorization" => "Basic #{Base64.strict_encode64(basic_auth_credential)}",
"Accept" => "application/json",
"Content-Type" => "application/json"
}
end
def download_headers(download_url)
uri = URI.parse(download_url)
endpoint_uri = URI.parse(@endpoint_url)
if uri.host == endpoint_uri.host
headers.merge("Accept" => "application/json,application/octet-stream,*/*")
else
{ "Accept" => "application/json,application/octet-stream,*/*" }
end
rescue URI::InvalidURIError
{ "Accept" => "application/json,application/octet-stream,*/*" }
end
def basic_auth_credential
@source_kind == "wakatime_dump" ? "#{@api_key}:" : @api_key
end
end

View file

@ -1,134 +1,195 @@
require "fileutils"
require "securerandom"
require "stringio"
require "zlib"
class HeartbeatImportRunner
STATUS_TTL = 12.hours
PROGRESS_INTERVAL = 250
REMOTE_REFRESH_THROTTLE = 5.seconds
TMP_DIR = Rails.root.join("tmp", "heartbeat_imports")
def self.start(user:, uploaded_file:)
import_id = SecureRandom.uuid
file_path = persist_uploaded_file(uploaded_file, import_id)
class ActiveImportError < StandardError; end
class CooldownError < StandardError
attr_reader :retry_at
write_status(user_id: user.id, import_id: import_id, attributes: {
state: "queued",
progress_percent: 0,
processed_count: 0,
total_count: nil,
imported_count: nil,
skipped_count: nil,
errors_count: 0,
def initialize(retry_at)
@retry_at = retry_at
super("Remote imports are limited to once every 4 hours.")
end
end
class FeatureDisabledError < StandardError; end
class InvalidProviderError < StandardError; end
def self.start_dev_upload(user:, uploaded_file:)
ensure_no_active_import!(user)
run = user.heartbeat_import_runs.create!(
source_kind: :dev_upload,
source_filename: uploaded_file.original_filename.to_s,
state: :queued,
message: "Queued import."
})
)
HeartbeatImportJob.perform_later(user.id, import_id, file_path)
import_id
file_path = persist_uploaded_file(uploaded_file, run.id)
HeartbeatImportJob.perform_later(run.id, file_path)
run
end
def self.status(user:, import_id:)
Rails.cache.read(cache_key(user.id, import_id))
def self.start_remote_import(user:, provider:, api_key:)
ensure_imports_enabled!(user)
ensure_no_active_import!(user)
ensure_remote_cooldown!(user)
run = user.heartbeat_import_runs.create!(
source_kind: normalize_provider(provider),
state: :queued,
encrypted_api_key: api_key.to_s,
message: "Queued import."
)
HeartbeatImportDumpJob.perform_later(run.id)
run
end
def self.run_import(user_id:, import_id:, file_path:)
def self.find_run(user:, import_id:)
user.heartbeat_import_runs.find_by(id: import_id)
end
def self.latest_run(user:)
HeartbeatImportRun.latest_for(user)
end
def self.refresh_remote_run!(run)
return run unless run&.remote?
return run unless run.active_import?
return run unless Flipper.enabled?(:imports, run.user)
return run if run.updated_at > REMOTE_REFRESH_THROTTLE.ago
if inline_good_job_execution?
HeartbeatImportDumpJob.perform_now(run.id)
else
HeartbeatImportDumpJob.perform_later(run.id)
end
run.reload
rescue => e
Rails.logger.error("Error refreshing heartbeat import run #{run&.id}: #{e.message}")
run
end
def self.remote_import_cooldown_until(user:)
HeartbeatImportRun.remote_cooldown_until_for(user)
end
def self.serialize(run)
return nil unless run
{
import_id: run.id.to_s,
state: run.state,
source_kind: run.source_kind,
progress_percent: run.progress_percent,
processed_count: run.processed_count,
total_count: run.total_count,
imported_count: run.imported_count,
skipped_count: run.skipped_count,
errors_count: run.errors_count,
message: run.message.to_s,
error_message: run.error_message,
remote_dump_status: run.remote_dump_status,
remote_percent_complete: run.remote_percent_complete,
cooldown_until: run.cooldown_until&.iso8601,
source_filename: run.source_filename,
updated_at: run.updated_at.iso8601,
started_at: run.started_at&.iso8601,
finished_at: run.finished_at&.iso8601
}
end
def self.run_import(import_run_id:, file_path:)
ActiveRecord::Base.connection_pool.with_connection do
user = User.find_by(id: user_id)
unless user
write_status(user_id: user_id, import_id: import_id, attributes: {
state: "failed",
progress_percent: 0,
message: "User not found.",
finished_at: Time.current.iso8601
})
return
end
run = HeartbeatImportRun.includes(:user).find_by(id: import_run_id)
return unless run
return if run.terminal?
file_content = File.read(file_path).force_encoding("UTF-8")
user = run.user
file_content = decode_file_content(File.binread(file_path)).force_encoding("UTF-8")
write_status(user_id: user_id, import_id: import_id, attributes: {
state: "running",
run.update!(
state: :importing,
total_count: nil,
progress_percent: 0,
processed_count: 0,
started_at: Time.current.iso8601,
started_at: run.started_at || Time.current,
message: "Importing heartbeats..."
})
)
result = HeartbeatImportService.import_from_file(
file_content,
user,
progress_interval: PROGRESS_INTERVAL,
on_progress: lambda { |processed_count|
write_status(user_id: user_id, import_id: import_id, attributes: {
state: "running",
progress_percent: 0,
run.update_columns(
processed_count: processed_count,
total_count: nil,
message: "Importing heartbeats..."
})
message: "Importing heartbeats...",
updated_at: Time.current
)
}
)
if result[:success]
write_status(user_id: user_id, import_id: import_id, attributes: {
state: "completed",
progress_percent: 100,
run.update!(
state: :completed,
processed_count: result[:total_count],
total_count: result[:total_count],
imported_count: result[:imported_count],
skipped_count: result[:skipped_count],
errors_count: result[:errors].length,
errors_count: result[:errors].size,
message: build_success_message(result),
finished_at: Time.current.iso8601
})
error_message: nil,
finished_at: Time.current
)
reset_sailors_log!(user) if run.remote?
else
write_status(user_id: user_id, import_id: import_id, attributes: {
state: "failed",
progress_percent: 0,
run.update!(
state: :failed,
imported_count: result[:imported_count],
skipped_count: result[:skipped_count],
errors_count: result[:errors].length,
errors_count: result[:errors].size,
message: "Import failed: #{result[:error]}",
finished_at: Time.current.iso8601
})
error_message: result[:error],
finished_at: Time.current
)
end
end
rescue => e
write_status(user_id: user_id, import_id: import_id, attributes: {
state: "failed",
HeartbeatImportRun.find_by(id: import_run_id)&.update!(
state: :failed,
message: "Import failed: #{e.message}",
finished_at: Time.current.iso8601
})
error_message: e.message,
finished_at: Time.current
)
ensure
FileUtils.rm_f(file_path) if file_path.present?
ActiveRecord::Base.clear_active_connections!
HeartbeatImportRun.find_by(id: import_run_id)&.clear_sensitive_fields!
ActiveRecord::Base.connection_handler.clear_active_connections!
end
def self.write_status(user_id:, import_id:, attributes:)
key = cache_key(user_id, import_id)
existing = Rails.cache.read(key) || {}
payload = existing.merge(attributes).merge(
import_id: import_id,
updated_at: Time.current.iso8601
)
Rails.cache.write(key, payload, expires_in: STATUS_TTL)
payload
end
def self.persist_uploaded_file(uploaded_file, import_id)
tmp_dir = Rails.root.join("tmp", "heartbeat_imports")
FileUtils.mkdir_p(tmp_dir)
def self.persist_uploaded_file(uploaded_file, import_run_id)
FileUtils.mkdir_p(TMP_DIR)
ext = File.extname(uploaded_file.original_filename.to_s)
ext = ".json" if ext.blank?
file_path = tmp_dir.join("#{import_id}#{ext}")
file_path = TMP_DIR.join("#{import_run_id}#{ext}")
FileUtils.cp(uploaded_file.tempfile.path, file_path)
file_path.to_s
end
def self.cache_key(user_id, import_id)
"heartbeat_import_status:user:#{user_id}:import:#{import_id}"
def self.persist_remote_download(run:, file_content:)
FileUtils.mkdir_p(TMP_DIR)
file_path = TMP_DIR.join("#{run.id}-remote.json")
File.binwrite(file_path, decode_file_content(file_content))
file_path.to_s
end
def self.build_success_message(result)
@ -137,4 +198,56 @@ class HeartbeatImportRunner
"#{message} Skipped #{result[:skipped_count]} duplicate heartbeats."
end
def self.ensure_imports_enabled!(user)
return if Flipper.enabled?(:imports, user)
raise FeatureDisabledError, "Imports are not enabled for this user."
end
def self.ensure_no_active_import!(user)
return unless HeartbeatImportRun.active_for(user)
raise ActiveImportError, "Another import is already in progress."
end
def self.ensure_remote_cooldown!(user)
retry_at = remote_import_cooldown_until(user:)
return if retry_at.blank?
raise CooldownError, retry_at
end
def self.normalize_provider(provider)
normalized_provider = provider.to_s
aliases = {
"wakatime" => "wakatime_dump",
"hackatime_v1" => "hackatime_v1_dump",
"wakatime_dump" => "wakatime_dump",
"hackatime_v1_dump" => "hackatime_v1_dump"
}
aliases.fetch(normalized_provider) { raise InvalidProviderError, "Unsupported import provider." }
end
def self.decode_file_content(file_content)
return file_content unless file_content&.bytes&.first(2) == [ 0x1f, 0x8b ]
gz_reader = Zlib::GzipReader.new(StringIO.new(file_content))
gz_reader.read
ensure
gz_reader&.close
end
def self.reset_sailors_log!(user)
return unless user.sailors_log.present?
user.sailors_log.update!(projects_summary: {})
user.sailors_log.send(:initialize_projects_summary)
end
def self.inline_good_job_execution?
Rails.env.development? && Rails.application.config.good_job.execution_mode == :inline
end
end

View file

@ -2,13 +2,8 @@ class HeartbeatImportService
BATCH_SIZE = 50_000
def self.import_from_file(file_content, user, on_progress: nil, progress_interval: 250)
unless Rails.env.development?
raise StandardError, "Not dev env, not running"
end
start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
user_id = user.id
indexed_attrs = Heartbeat.indexed_attributes
imported_count = 0
total_count = 0
errors = []
@ -31,9 +26,9 @@ class HeartbeatImportService
language: hb["language"],
editor: hb["editor"],
operating_system: hb["operating_system"],
machine: hb["machine"],
machine: hb["machine"] || hb["machine_name_id"],
branch: hb["branch"],
user_agent: hb["user_agent"],
user_agent: hb["user_agent"] || hb["user_agent_id"],
is_write: hb["is_write"] || false,
line_additions: hb["line_additions"],
line_deletions: hb["line_deletions"],
@ -42,16 +37,13 @@ class HeartbeatImportService
cursorpos: hb["cursorpos"],
dependencies: hb["dependencies"] || [],
project_root_count: hb["project_root_count"],
source_type: 1
source_type: Heartbeat.source_types.fetch("wakapi_import")
}
string_attrs = attrs.transform_keys(&:to_s)
hash_input = indexed_attrs.each_with_object({}) { |k, h| h[k] = string_attrs[k] }
fields_hash = Digest::MD5.hexdigest(Oj.dump(hash_input, mode: :compat))
attrs[:fields_hash] = fields_hash
attrs[:fields_hash] = Heartbeat.generate_fields_hash(attrs)
existing = seen_hashes[fields_hash]
seen_hashes[fields_hash] = attrs if existing.nil? || attrs[:time] > existing[:time]
existing = seen_hashes[attrs[:fields_hash]]
seen_hashes[attrs[:fields_hash]] = attrs if existing.nil? || attrs[:time] > existing[:time]
if seen_hashes.size >= BATCH_SIZE
imported_count += flush_batch(seen_hashes)
@ -66,7 +58,7 @@ class HeartbeatImportService
on_progress&.call(total_count)
if total_count.zero?
raise StandardError, "Not correct format, download from /my/settings on the offical hackatime then import here"
raise StandardError, "Expected a heartbeat export JSON file."
end
imported_count += flush_batch(seen_hashes) if seen_hashes.any?
@ -122,15 +114,14 @@ class HeartbeatImportService
class HeartbeatSaxHandler < Oj::Saj
def initialize(&block)
@block = block
@in_heartbeats = false
@depth = 0
@current_heartbeat = nil
@current_key = nil
@array_depth = 0
@heartbeat_array_depths = []
@field_array_stack = []
end
def hash_start(key)
if @in_heartbeats && @depth == 2
if inside_heartbeat_array? && @depth == @heartbeat_array_depths.last + 1
@current_heartbeat = {}
end
@depth += 1
@ -138,44 +129,43 @@ class HeartbeatImportService
def hash_end(key)
@depth -= 1
if @in_heartbeats && @depth == 2 && @current_heartbeat
if inside_heartbeat_array? && @depth == @heartbeat_array_depths.last + 1 && @current_heartbeat
@block.call(@current_heartbeat)
@current_heartbeat = nil
end
end
def array_start(key)
if key == "heartbeats" && @depth == 1
@in_heartbeats = true
elsif @current_heartbeat && @current_key
@current_heartbeat[@current_key] = []
@array_depth += 1
@heartbeat_array_depths << @depth if key == "heartbeats"
if @current_heartbeat && key.present?
@current_heartbeat[key] = []
@field_array_stack << key
end
@depth += 1
end
def array_end(key)
@depth -= 1
if key == "heartbeats"
@in_heartbeats = false
elsif @array_depth > 0
@array_depth -= 1
end
@heartbeat_array_depths.pop if key == "heartbeats" && @heartbeat_array_depths.last == @depth
@field_array_stack.pop if @field_array_stack.last == key
end
def add_value(value, key)
return unless @current_heartbeat
if key
@current_key = key
if @array_depth > 0 && @current_heartbeat[@current_key].is_a?(Array)
@current_heartbeat[@current_key] << value
else
@current_heartbeat[key] = value
elsif @field_array_stack.any?
@current_heartbeat[@field_array_stack.last] << value
end
elsif @array_depth > 0 && @current_key && @current_heartbeat[@current_key].is_a?(Array)
@current_heartbeat[@current_key] << value
end
end
private
def inside_heartbeat_array?
@heartbeat_array_depths.any?
end
end
end

View file

@ -1,63 +0,0 @@
class WakatimeCompatibleClient
class AuthenticationError < StandardError; end
class TransientError < StandardError; end
class RequestError < StandardError; end
def initialize(endpoint_url:, api_key:)
@endpoint_url = endpoint_url.to_s.sub(%r{/*\z}, "")
@api_key = api_key.to_s
end
def fetch_all_time_since_today_start_date
body = get_json("/users/current/all_time_since_today")
start_date = body.dig("data", "range", "start_date") ||
body.dig("data", "start_date") ||
body.dig("range", "start_date")
raise RequestError, "Missing start_date in all_time_since_today response" if start_date.blank?
Date.iso8601(start_date.to_s)
rescue ArgumentError
raise RequestError, "Invalid start_date in all_time_since_today response"
end
def fetch_heartbeats(date:)
body = get_json("/users/current/heartbeats", params: { date: date.iso8601 })
if body.is_a?(Array)
body
elsif body["data"].is_a?(Array)
body["data"]
elsif body["heartbeats"].is_a?(Array)
body["heartbeats"]
else
[]
end
end
private
def get_json(path, params: nil)
response = HTTP.timeout(connect: 5, read: 30, write: 10)
.headers(headers)
.get("#{@endpoint_url}#{path}", params:)
status = response.status.to_i
raise AuthenticationError, "Authentication failed (#{status})" if [ 401, 403 ].include?(status)
raise TransientError, "Request failed with status #{status}" if status == 408 || status == 429 || status >= 500
raise RequestError, "Request failed with status #{status}" unless response.status.success?
JSON.parse(response.to_s)
rescue HTTP::TimeoutError, HTTP::ConnectionError => e
raise TransientError, e.message
rescue JSON::ParserError
raise RequestError, "Invalid JSON response"
end
def headers
{
"Authorization" => "Basic #{Base64.strict_encode64("#{@api_key}:")}",
"Content-Type" => "application/json",
"Accept" => "application/json"
}
end
end

View file

@ -188,39 +188,39 @@
"@tailwindcss/forms": ["@tailwindcss/forms@0.5.11", "", { "dependencies": { "mini-svg-data-uri": "^1.2.3" }, "peerDependencies": { "tailwindcss": ">=3.0.0 || >= 3.0.0-alpha.1 || >= 4.0.0-alpha.20 || >= 4.0.0-beta.1" } }, "sha512-h9wegbZDPurxG22xZSoWtdzc41/OlNEUQERNqI/0fOwa2aVlWGu7C35E/x6LDyD3lgtztFSSjKZyuVM0hxhbgA=="],
"@tailwindcss/node": ["@tailwindcss/node@4.1.18", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "enhanced-resolve": "^5.18.3", "jiti": "^2.6.1", "lightningcss": "1.30.2", "magic-string": "^0.30.21", "source-map-js": "^1.2.1", "tailwindcss": "4.1.18" } }, "sha512-DoR7U1P7iYhw16qJ49fgXUlry1t4CpXeErJHnQ44JgTSKMaZUdf17cfn5mHchfJ4KRBZRFA/Coo+MUF5+gOaCQ=="],
"@tailwindcss/node": ["@tailwindcss/node@4.2.1", "", { "dependencies": { "@jridgewell/remapping": "^2.3.5", "enhanced-resolve": "^5.19.0", "jiti": "^2.6.1", "lightningcss": "1.31.1", "magic-string": "^0.30.21", "source-map-js": "^1.2.1", "tailwindcss": "4.2.1" } }, "sha512-jlx6sLk4EOwO6hHe1oCGm1Q4AN/s0rSrTTPBGPM0/RQ6Uylwq17FuU8IeJJKEjtc6K6O07zsvP+gDO6MMWo7pg=="],
"@tailwindcss/oxide": ["@tailwindcss/oxide@4.1.18", "", { "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.1.18", "@tailwindcss/oxide-darwin-arm64": "4.1.18", "@tailwindcss/oxide-darwin-x64": "4.1.18", "@tailwindcss/oxide-freebsd-x64": "4.1.18", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.18", "@tailwindcss/oxide-linux-arm64-gnu": "4.1.18", "@tailwindcss/oxide-linux-arm64-musl": "4.1.18", "@tailwindcss/oxide-linux-x64-gnu": "4.1.18", "@tailwindcss/oxide-linux-x64-musl": "4.1.18", "@tailwindcss/oxide-wasm32-wasi": "4.1.18", "@tailwindcss/oxide-win32-arm64-msvc": "4.1.18", "@tailwindcss/oxide-win32-x64-msvc": "4.1.18" } }, "sha512-EgCR5tTS5bUSKQgzeMClT6iCY3ToqE1y+ZB0AKldj809QXk1Y+3jB0upOYZrn9aGIzPtUsP7sX4QQ4XtjBB95A=="],
"@tailwindcss/oxide": ["@tailwindcss/oxide@4.2.1", "", { "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.2.1", "@tailwindcss/oxide-darwin-arm64": "4.2.1", "@tailwindcss/oxide-darwin-x64": "4.2.1", "@tailwindcss/oxide-freebsd-x64": "4.2.1", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.2.1", "@tailwindcss/oxide-linux-arm64-gnu": "4.2.1", "@tailwindcss/oxide-linux-arm64-musl": "4.2.1", "@tailwindcss/oxide-linux-x64-gnu": "4.2.1", "@tailwindcss/oxide-linux-x64-musl": "4.2.1", "@tailwindcss/oxide-wasm32-wasi": "4.2.1", "@tailwindcss/oxide-win32-arm64-msvc": "4.2.1", "@tailwindcss/oxide-win32-x64-msvc": "4.2.1" } }, "sha512-yv9jeEFWnjKCI6/T3Oq50yQEOqmpmpfzG1hcZsAOaXFQPfzWprWrlHSdGPEF3WQTi8zu8ohC9Mh9J470nT5pUw=="],
"@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.1.18", "", { "os": "android", "cpu": "arm64" }, "sha512-dJHz7+Ugr9U/diKJA0W6N/6/cjI+ZTAoxPf9Iz9BFRF2GzEX8IvXxFIi/dZBloVJX/MZGvRuFA9rqwdiIEZQ0Q=="],
"@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.2.1", "", { "os": "android", "cpu": "arm64" }, "sha512-eZ7G1Zm5EC8OOKaesIKuw77jw++QJ2lL9N+dDpdQiAB/c/B2wDh0QPFHbkBVrXnwNugvrbJFk1gK2SsVjwWReg=="],
"@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.1.18", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Gc2q4Qhs660bhjyBSKgq6BYvwDz4G+BuyJ5H1xfhmDR3D8HnHCmT/BSkvSL0vQLy/nkMLY20PQ2OoYMO15Jd0A=="],
"@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.2.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-q/LHkOstoJ7pI1J0q6djesLzRvQSIfEto148ppAd+BVQK0JYjQIFSK3JgYZJa+Yzi0DDa52ZsQx2rqytBnf8Hw=="],
"@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.1.18", "", { "os": "darwin", "cpu": "x64" }, "sha512-FL5oxr2xQsFrc3X9o1fjHKBYBMD1QZNyc1Xzw/h5Qu4XnEBi3dZn96HcHm41c/euGV+GRiXFfh2hUCyKi/e+yw=="],
"@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.2.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-/f/ozlaXGY6QLbpvd/kFTro2l18f7dHKpB+ieXz+Cijl4Mt9AI2rTrpq7V+t04nK+j9XBQHnSMdeQRhbGyt6fw=="],
"@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.1.18", "", { "os": "freebsd", "cpu": "x64" }, "sha512-Fj+RHgu5bDodmV1dM9yAxlfJwkkWvLiRjbhuO2LEtwtlYlBgiAT4x/j5wQr1tC3SANAgD+0YcmWVrj8R9trVMA=="],
"@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.2.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-5e/AkgYJT/cpbkys/OU2Ei2jdETCLlifwm7ogMC7/hksI2fC3iiq6OcXwjibcIjPung0kRtR3TxEITkqgn0TcA=="],
"@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.1.18", "", { "os": "linux", "cpu": "arm" }, "sha512-Fp+Wzk/Ws4dZn+LV2Nqx3IilnhH51YZoRaYHQsVq3RQvEl+71VGKFpkfHrLM/Li+kt5c0DJe/bHXK1eHgDmdiA=="],
"@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.2.1", "", { "os": "linux", "cpu": "arm" }, "sha512-Uny1EcVTTmerCKt/1ZuKTkb0x8ZaiuYucg2/kImO5A5Y/kBz41/+j0gxUZl+hTF3xkWpDmHX+TaWhOtba2Fyuw=="],
"@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.1.18", "", { "os": "linux", "cpu": "arm64" }, "sha512-S0n3jboLysNbh55Vrt7pk9wgpyTTPD0fdQeh7wQfMqLPM/Hrxi+dVsLsPrycQjGKEQk85Kgbx+6+QnYNiHalnw=="],
"@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.2.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-CTrwomI+c7n6aSSQlsPL0roRiNMDQ/YzMD9EjcR+H4f0I1SQ8QqIuPnsVp7QgMkC1Qi8rtkekLkOFjo7OlEFRQ=="],
"@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.1.18", "", { "os": "linux", "cpu": "arm64" }, "sha512-1px92582HkPQlaaCkdRcio71p8bc8i/ap5807tPRDK/uw953cauQBT8c5tVGkOwrHMfc2Yh6UuxaH4vtTjGvHg=="],
"@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.2.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-WZA0CHRL/SP1TRbA5mp9htsppSEkWuQ4KsSUumYQnyl8ZdT39ntwqmz4IUHGN6p4XdSlYfJwM4rRzZLShHsGAQ=="],
"@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.1.18", "", { "os": "linux", "cpu": "x64" }, "sha512-v3gyT0ivkfBLoZGF9LyHmts0Isc8jHZyVcbzio6Wpzifg/+5ZJpDiRiUhDLkcr7f/r38SWNe7ucxmGW3j3Kb/g=="],
"@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.2.1", "", { "os": "linux", "cpu": "x64" }, "sha512-qMFzxI2YlBOLW5PhblzuSWlWfwLHaneBE0xHzLrBgNtqN6mWfs+qYbhryGSXQjFYB1Dzf5w+LN5qbUTPhW7Y5g=="],
"@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.1.18", "", { "os": "linux", "cpu": "x64" }, "sha512-bhJ2y2OQNlcRwwgOAGMY0xTFStt4/wyU6pvI6LSuZpRgKQwxTec0/3Scu91O8ir7qCR3AuepQKLU/kX99FouqQ=="],
"@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.2.1", "", { "os": "linux", "cpu": "x64" }, "sha512-5r1X2FKnCMUPlXTWRYpHdPYUY6a1Ar/t7P24OuiEdEOmms5lyqjDRvVY1yy9Rmioh+AunQ0rWiOTPE8F9A3v5g=="],
"@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.1.18", "", { "dependencies": { "@emnapi/core": "^1.7.1", "@emnapi/runtime": "^1.7.1", "@emnapi/wasi-threads": "^1.1.0", "@napi-rs/wasm-runtime": "^1.1.0", "@tybys/wasm-util": "^0.10.1", "tslib": "^2.4.0" }, "cpu": "none" }, "sha512-LffYTvPjODiP6PT16oNeUQJzNVyJl1cjIebq/rWWBF+3eDst5JGEFSc5cWxyRCJ0Mxl+KyIkqRxk1XPEs9x8TA=="],
"@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.2.1", "", { "dependencies": { "@emnapi/core": "^1.8.1", "@emnapi/runtime": "^1.8.1", "@emnapi/wasi-threads": "^1.1.0", "@napi-rs/wasm-runtime": "^1.1.1", "@tybys/wasm-util": "^0.10.1", "tslib": "^2.8.1" }, "cpu": "none" }, "sha512-MGFB5cVPvshR85MTJkEvqDUnuNoysrsRxd6vnk1Lf2tbiqNlXpHYZqkqOQalydienEWOHHFyyuTSYRsLfxFJ2Q=="],
"@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.1.18", "", { "os": "win32", "cpu": "arm64" }, "sha512-HjSA7mr9HmC8fu6bdsZvZ+dhjyGCLdotjVOgLA2vEqxEBZaQo9YTX4kwgEvPCpRh8o4uWc4J/wEoFzhEmjvPbA=="],
"@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.2.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-YlUEHRHBGnCMh4Nj4GnqQyBtsshUPdiNroZj8VPkvTZSoHsilRCwXcVKnG9kyi0ZFAS/3u+qKHBdDc81SADTRA=="],
"@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.1.18", "", { "os": "win32", "cpu": "x64" }, "sha512-bJWbyYpUlqamC8dpR7pfjA0I7vdF6t5VpUGMWRkXVE3AXgIZjYUYAK7II1GNaxR8J1SSrSrppRar8G++JekE3Q=="],
"@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.2.1", "", { "os": "win32", "cpu": "x64" }, "sha512-rbO34G5sMWWyrN/idLeVxAZgAKWrn5LiR3/I90Q9MkA67s6T1oB0xtTe+0heoBvHSpbU9Mk7i6uwJnpo4u21XQ=="],
"@tailwindcss/typography": ["@tailwindcss/typography@0.5.19", "", { "dependencies": { "postcss-selector-parser": "6.0.10" }, "peerDependencies": { "tailwindcss": ">=3.0.0 || insiders || >=4.0.0-alpha.20 || >=4.0.0-beta.1" } }, "sha512-w31dd8HOx3k9vPtcQh5QHP9GwKcgbMp87j58qi6xgiBnFFtKEAgCWnDw4qUT8aHwkCp8bKvb/KGKWWHedP0AAg=="],
"@tailwindcss/vite": ["@tailwindcss/vite@4.1.18", "", { "dependencies": { "@tailwindcss/node": "4.1.18", "@tailwindcss/oxide": "4.1.18", "tailwindcss": "4.1.18" }, "peerDependencies": { "vite": "^5.2.0 || ^6 || ^7" } }, "sha512-jVA+/UpKL1vRLg6Hkao5jldawNmRo7mQYrZtNHMIVpLfLhDml5nMRUo/8MwoX2vNXvnaXNNMedrMfMugAVX1nA=="],
"@tailwindcss/vite": ["@tailwindcss/vite@4.2.1", "", { "dependencies": { "@tailwindcss/node": "4.2.1", "@tailwindcss/oxide": "4.2.1", "tailwindcss": "4.2.1" }, "peerDependencies": { "vite": "^5.2.0 || ^6 || ^7" } }, "sha512-TBf2sJjYeb28jD2U/OhwdW0bbOsxkWPwQ7SrqGf9sVcoYwZj7rkXljroBO9wKBut9XnmQLXanuDUeqQK0lGg/w=="],
"@tsconfig/svelte": ["@tsconfig/svelte@5.0.7", "", {}, "sha512-NOtJF9LQnV7k6bpzcXwL/rXdlFHvAT9e0imrftiMc6/+FUNBHRZ8UngDrM+jciA6ENzFYNoFs8rfwumuGF+Dhw=="],
"@tsconfig/svelte": ["@tsconfig/svelte@5.0.8", "", {}, "sha512-UkNnw1/oFEfecR8ypyHIQuWYdkPvHiwcQ78sh+ymIiYoF+uc5H1UBetbjyqT+vgGJ3qQN6nhucJviX6HesWtKQ=="],
"@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="],
@ -238,17 +238,17 @@
"arg": ["arg@5.0.2", "", {}, "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg=="],
"aria-query": ["aria-query@5.3.2", "", {}, "sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw=="],
"aria-query": ["aria-query@5.3.1", "", {}, "sha512-Z/ZeOgVl7bcSYZ/u/rh0fOpvEpq//LZmdbkXyc7syVzjPAhfOa9ebsdTSjEBDU4vs5nC98Kfduj1uFo0qyET3g=="],
"asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="],
"axios": ["axios@1.13.5", "", { "dependencies": { "follow-redirects": "^1.15.11", "form-data": "^4.0.5", "proxy-from-env": "^1.1.0" } }, "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q=="],
"axios": ["axios@1.13.6", "", { "dependencies": { "follow-redirects": "^1.15.11", "form-data": "^4.0.5", "proxy-from-env": "^1.1.0" } }, "sha512-ChTCHMouEe2kn713WHbQGcuYrr6fXTBiu460OTwWrWob16g1bXn4vtz07Ope7ewMozJAnEquLk5lWQWtBig9DQ=="],
"axobject-query": ["axobject-query@4.1.0", "", {}, "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ=="],
"binary-extensions": ["binary-extensions@2.3.0", "", {}, "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="],
"bits-ui": ["bits-ui@2.15.5", "", { "dependencies": { "@floating-ui/core": "^1.7.1", "@floating-ui/dom": "^1.7.1", "esm-env": "^1.1.2", "runed": "^0.35.1", "svelte-toolbelt": "^0.10.6", "tabbable": "^6.2.0" }, "peerDependencies": { "@internationalized/date": "^3.8.1", "svelte": "^5.33.0" } }, "sha512-WhS+P+E//ClLfKU6KqjKC17nGDRLnz+vkwoP6ClFUPd5m1fFVDxTElPX8QVsduLj5V1KFDxlnv6sW2G5Lqk+vw=="],
"bits-ui": ["bits-ui@2.16.3", "", { "dependencies": { "@floating-ui/core": "^1.7.1", "@floating-ui/dom": "^1.7.1", "esm-env": "^1.1.2", "runed": "^0.35.1", "svelte-toolbelt": "^0.10.6", "tabbable": "^6.2.0" }, "peerDependencies": { "@internationalized/date": "^3.8.1", "svelte": "^5.33.0" } }, "sha512-5hJ5dEhf5yPzkRFcxzgQHScGodeo0gK0MUUXrdLlRHWaBOBGZiacWLG96j/wwFatKwZvouw7q+sn14i0fx3RIg=="],
"braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="],
@ -330,7 +330,7 @@
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
"devalue": ["devalue@5.6.2", "", {}, "sha512-nPRkjWzzDQlsejL1WVifk5rvcFi/y1onBRxjaFMjZeR9mFpqu2gmAZ9xUB9/IEanEP/vBtGeGganC/GO1fmufg=="],
"devalue": ["devalue@5.6.4", "", {}, "sha512-Gp6rDldRsFh/7XuouDbxMH3Mx8GMCcgzIb1pDTvNyn8pZGQ22u+Wa+lGV9dQCltFQ7uVw0MhRyb8XDskNFOReA=="],
"didyoumean": ["didyoumean@1.2.2", "", {}, "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw=="],
@ -508,7 +508,7 @@
"prettier": ["prettier@3.8.1", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-UOnG6LftzbdaHZcKoPFtOcCKztrQ57WkHDeRD9t/PTQtmT0NHSeWWepj6pS0z/N7+08BHFDQVUrfmfMRcZwbMg=="],
"prettier-plugin-svelte": ["prettier-plugin-svelte@3.4.1", "", { "peerDependencies": { "prettier": "^3.0.0", "svelte": "^3.2.0 || ^4.0.0-next.0 || ^5.0.0-next.0" } }, "sha512-xL49LCloMoZRvSwa6IEdN2GV6cq2IqpYGstYtMT+5wmml1/dClEoI0MZR78MiVPpu6BdQFfN0/y73yO6+br5Pg=="],
"prettier-plugin-svelte": ["prettier-plugin-svelte@3.5.1", "", { "peerDependencies": { "prettier": "^3.0.0", "svelte": "^3.2.0 || ^4.0.0-next.0 || ^5.0.0-next.0" } }, "sha512-65+fr5+cgIKWKiqM1Doum4uX6bY8iFCdztvvp2RcF+AJoieaw9kJOFMNcJo/bkmKYsxFaM9OsVZK/gWauG/5mg=="],
"proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="],
@ -544,9 +544,9 @@
"supports-preserve-symlinks-flag": ["supports-preserve-symlinks-flag@1.0.0", "", {}, "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="],
"svelte": ["svelte@5.51.2", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "@types/trusted-types": "^2.0.7", "acorn": "^8.12.1", "aria-query": "^5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "devalue": "^5.6.2", "esm-env": "^1.2.1", "esrap": "^2.2.2", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-AqApqNOxVS97V4Ko9UHTHeSuDJrwauJhZpLDs1gYD8Jk48ntCSWD7NxKje+fnGn5Ja1O3u2FzQZHPdifQjXe3w=="],
"svelte": ["svelte@5.53.11", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "@jridgewell/sourcemap-codec": "^1.5.0", "@sveltejs/acorn-typescript": "^1.0.5", "@types/estree": "^1.0.5", "@types/trusted-types": "^2.0.7", "acorn": "^8.12.1", "aria-query": "5.3.1", "axobject-query": "^4.1.0", "clsx": "^2.1.1", "devalue": "^5.6.3", "esm-env": "^1.2.1", "esrap": "^2.2.2", "is-reference": "^3.0.3", "locate-character": "^3.0.0", "magic-string": "^0.30.11", "zimmerframe": "^1.1.2" } }, "sha512-GYmqRjRhJYLQBonfdfGAt28gkfWEShrtXKGXcFGneXi502aBE+I1dJcs/YQriByvP6xqXRz/OdBGC6tfvUQHyQ=="],
"svelte-check": ["svelte-check@4.4.0", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.25", "chokidar": "^4.0.1", "fdir": "^6.2.0", "picocolors": "^1.0.0", "sade": "^1.7.4" }, "peerDependencies": { "svelte": "^4.0.0 || ^5.0.0-next.0", "typescript": ">=5.0.0" }, "bin": { "svelte-check": "bin/svelte-check" } }, "sha512-gB3FdEPb8tPO3Y7Dzc6d/Pm/KrXAhK+0Fk+LkcysVtupvAh6Y/IrBCEZNupq57oh0hcwlxCUamu/rq7GtvfSEg=="],
"svelte-check": ["svelte-check@4.4.5", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.25", "chokidar": "^4.0.1", "fdir": "^6.2.0", "picocolors": "^1.0.0", "sade": "^1.7.4" }, "peerDependencies": { "svelte": "^4.0.0 || ^5.0.0-next.0", "typescript": ">=5.0.0" }, "bin": { "svelte-check": "bin/svelte-check" } }, "sha512-1bSwIRCvvmSHrlK52fOlZmVtUZgil43jNL/2H18pRpa+eQjzGt6e3zayxhp1S7GajPFKNM/2PMCG+DZFHlG9fw=="],
"svelte-toolbelt": ["svelte-toolbelt@0.10.6", "", { "dependencies": { "clsx": "^2.1.1", "runed": "^0.35.1", "style-to-object": "^1.0.8" }, "peerDependencies": { "svelte": "^5.30.2" } }, "sha512-YWuX+RE+CnWYx09yseAe4ZVMM7e7GRFZM6OYWpBKOb++s+SQ8RBIMMe+Bs/CznBMc0QPLjr+vDBxTAkozXsFXQ=="],
@ -554,7 +554,7 @@
"tailwind-merge": ["tailwind-merge@2.6.1", "", {}, "sha512-Oo6tHdpZsGpkKG88HJ8RR1rg/RdnEkQEfMoEk2x1XRI3F1AxeU+ijRXpiVUF4UbLfcxxRGw6TbUINKYdWVsQTQ=="],
"tailwindcss": ["tailwindcss@4.1.18", "", {}, "sha512-4+Z+0yiYyEtUVCScyfHCxOYP06L5Ne+JiHhY2IjR2KWMIWhJOYZKLSGZaP5HkZ8+bY0cxfzwDE5uOmzFXyIwxw=="],
"tailwindcss": ["tailwindcss@4.2.1", "", {}, "sha512-/tBrSQ36vCleJkAOsy9kbNTgaxvGbyOamC30PRePTQe/o1MFwEKHQk4Cn7BNGaPtjp+PuUrByJehM1hgxfq4sw=="],
"tapable": ["tapable@2.3.0", "", {}, "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg=="],
@ -576,7 +576,7 @@
"vite": ["vite@7.3.1", "", { "dependencies": { "esbuild": "^0.27.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA=="],
"vite-plugin-ruby": ["vite-plugin-ruby@5.1.2", "", { "dependencies": { "debug": "^4.3.4", "fast-glob": "^3.3.2" }, "peerDependencies": { "vite": ">=5.0.0" } }, "sha512-aIhAhq9BOVhfWGYuludgYK9ch58SQClpDVyF8WfpNsr5xr6dCw24mgpFso2KsXYONGYzX2kDt+cca54nDzkitg=="],
"vite-plugin-ruby": ["vite-plugin-ruby@5.1.3", "", { "dependencies": { "debug": "^4.3.4", "fast-glob": "^3.3.2" }, "peerDependencies": { "vite": ">=5.0.0" } }, "sha512-vpTOKbR6AmnupmXvQccRcr/jIY+oobNuCbNJHUzkT8oQ2BBQSlp22Xec3zszBBc7GjwDGn2+E+IQoNRXdEY7Ig=="],
"vitefu": ["vitefu@1.1.1", "", { "peerDependencies": { "vite": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0-beta.0" }, "optionalPeers": ["vite"] }, "sha512-B/Fegf3i8zh0yFbpzZ21amWzHmuNlLlmJT6n7bu5e+pCHUKQIfXSYokrqOBGEMMe9UG2sostKQF9mml/vYaWJQ=="],
@ -584,10 +584,12 @@
"zod": ["zod@3.25.76", "", {}, "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ=="],
"@inertiajs/svelte/@inertiajs/core": ["@inertiajs/core@file:vendor/inertia/packages/core", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.31", "@types/lodash-es": "^4.17.12", "laravel-precognition": "2.0.0-beta.0", "lodash-es": "^4.17.23" }, "peerDependencies": { "axios": "^1.13.2" }, "optionalPeers": ["axios"] }],
"@inertiajs/svelte/@inertiajs/core": ["@inertiajs/core@file:vendor/inertia/packages/core", {}],
"@layerstack/tailwind/tailwindcss": ["tailwindcss@3.4.19", "", { "dependencies": { "@alloc/quick-lru": "^5.2.0", "arg": "^5.0.2", "chokidar": "^3.6.0", "didyoumean": "^1.2.2", "dlv": "^1.1.3", "fast-glob": "^3.3.2", "glob-parent": "^6.0.2", "is-glob": "^4.0.3", "jiti": "^1.21.7", "lilconfig": "^3.1.3", "micromatch": "^4.0.8", "normalize-path": "^3.0.0", "object-hash": "^3.0.0", "picocolors": "^1.1.1", "postcss": "^8.4.47", "postcss-import": "^15.1.0", "postcss-js": "^4.0.1", "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", "postcss-nested": "^6.2.0", "postcss-selector-parser": "^6.1.2", "resolve": "^1.22.8", "sucrase": "^3.35.0" }, "bin": { "tailwind": "lib/cli.js", "tailwindcss": "lib/cli.js" } }, "sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ=="],
"@tailwindcss/node/lightningcss": ["lightningcss@1.31.1", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-android-arm64": "1.31.1", "lightningcss-darwin-arm64": "1.31.1", "lightningcss-darwin-x64": "1.31.1", "lightningcss-freebsd-x64": "1.31.1", "lightningcss-linux-arm-gnueabihf": "1.31.1", "lightningcss-linux-arm64-gnu": "1.31.1", "lightningcss-linux-arm64-musl": "1.31.1", "lightningcss-linux-x64-gnu": "1.31.1", "lightningcss-linux-x64-musl": "1.31.1", "lightningcss-win32-arm64-msvc": "1.31.1", "lightningcss-win32-x64-msvc": "1.31.1" } }, "sha512-l51N2r93WmGUye3WuFoN5k10zyvrVs0qfKBhyC5ogUQ6Ew6JUSswh78mbSO+IU3nTWsyOArqPCcShdQSadghBQ=="],
"@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.8.1", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" }, "bundled": true }, "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg=="],
"@tailwindcss/oxide-wasm32-wasi/@emnapi/runtime": ["@emnapi/runtime@1.8.1", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg=="],
@ -620,6 +622,28 @@
"@layerstack/tailwind/tailwindcss/postcss-selector-parser": ["postcss-selector-parser@6.1.2", "", { "dependencies": { "cssesc": "^3.0.0", "util-deprecate": "^1.0.2" } }, "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg=="],
"@tailwindcss/node/lightningcss/lightningcss-android-arm64": ["lightningcss-android-arm64@1.31.1", "", { "os": "android", "cpu": "arm64" }, "sha512-HXJF3x8w9nQ4jbXRiNppBCqeZPIAfUo8zE/kOEGbW5NZvGc/K7nMxbhIr+YlFlHW5mpbg/YFPdbnCh1wAXCKFg=="],
"@tailwindcss/node/lightningcss/lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.31.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-02uTEqf3vIfNMq3h/z2cJfcOXnQ0GRwQrkmPafhueLb2h7mqEidiCzkE4gBMEH65abHRiQvhdcQ+aP0D0g67sg=="],
"@tailwindcss/node/lightningcss/lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.31.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-1ObhyoCY+tGxtsz1lSx5NXCj3nirk0Y0kB/g8B8DT+sSx4G9djitg9ejFnjb3gJNWo7qXH4DIy2SUHvpoFwfTA=="],
"@tailwindcss/node/lightningcss/lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.31.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-1RINmQKAItO6ISxYgPwszQE1BrsVU5aB45ho6O42mu96UiZBxEXsuQ7cJW4zs4CEodPUioj/QrXW1r9pLUM74A=="],
"@tailwindcss/node/lightningcss/lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.31.1", "", { "os": "linux", "cpu": "arm" }, "sha512-OOCm2//MZJ87CdDK62rZIu+aw9gBv4azMJuA8/KB74wmfS3lnC4yoPHm0uXZ/dvNNHmnZnB8XLAZzObeG0nS1g=="],
"@tailwindcss/node/lightningcss/lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.31.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-WKyLWztD71rTnou4xAD5kQT+982wvca7E6QoLpoawZ1gP9JM0GJj4Tp5jMUh9B3AitHbRZ2/H3W5xQmdEOUlLg=="],
"@tailwindcss/node/lightningcss/lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.31.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-mVZ7Pg2zIbe3XlNbZJdjs86YViQFoJSpc41CbVmKBPiGmC4YrfeOyz65ms2qpAobVd7WQsbW4PdsSJEMymyIMg=="],
"@tailwindcss/node/lightningcss/lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.31.1", "", { "os": "linux", "cpu": "x64" }, "sha512-xGlFWRMl+0KvUhgySdIaReQdB4FNudfUTARn7q0hh/V67PVGCs3ADFjw+6++kG1RNd0zdGRlEKa+T13/tQjPMA=="],
"@tailwindcss/node/lightningcss/lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.31.1", "", { "os": "linux", "cpu": "x64" }, "sha512-eowF8PrKHw9LpoZii5tdZwnBcYDxRw2rRCyvAXLi34iyeYfqCQNA9rmUM0ce62NlPhCvof1+9ivRaTY6pSKDaA=="],
"@tailwindcss/node/lightningcss/lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.31.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-aJReEbSEQzx1uBlQizAOBSjcmr9dCdL3XuC/6HLXAxmtErsj2ICo5yYggg1qOODQMtnjNQv2UHb9NpOuFtYe4w=="],
"@tailwindcss/node/lightningcss/lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.31.1", "", { "os": "win32", "cpu": "x64" }, "sha512-I9aiFrbd7oYHwlnQDqr1Roz+fTz61oDDJX7n9tYF9FJymH1cIN1DtKw3iYt6b8WZgEjoNwVSncwF4wx/ZedMhw=="],
"d3-sankey/d3-array/internmap": ["internmap@1.0.1", "", {}, "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw=="],
"d3-sankey/d3-shape/d3-path": ["d3-path@1.0.9", "", {}, "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg=="],

View file

@ -15,11 +15,6 @@ development:
encoding: unicode
pool: <%= ENV.fetch("RAILS_MAX_THREADS") { 5 } %>
url: <%= ENV['DATABASE_URL'] %>
wakatime:
adapter: postgresql
encoding: unicode
url: <%= ENV['WAKATIME_DATABASE_URL'] %>
replica: true
sailors_log:
adapter: postgresql
encoding: unicode
@ -35,11 +30,6 @@ test:
adapter: postgresql
database: app_test
url: <%= ENV['TEST_DATABASE_URL'] %>
wakatime:
adapter: postgresql
database: app_test
url: <%= ENV['TEST_DATABASE_URL'] %>
replica: true
sailors_log:
adapter: postgresql
database: app_test
@ -55,11 +45,6 @@ production:
encoding: unicode
pool: <%= ENV.fetch("RAILS_MAX_THREADS") { 16 }.to_i + 8 %> # Web threads + 8 GoodJob threads
url: <%= ENV['POOL_DATABASE_URL'] %>
wakatime:
adapter: postgresql
encoding: unicode
url: <%= ENV['WAKATIME_DATABASE_URL'] %>
replica: true
sailors_log:
adapter: postgresql
encoding: unicode

View file

@ -110,10 +110,6 @@ Rails.application.configure do
class: "Cache::HeartbeatCountsJob",
kwargs: { force_reload: true }
},
heartbeat_import_source_scheduler: {
cron: "*/5 * * * *",
class: "HeartbeatImportSourceSchedulerJob"
},
weekly_summary_email: {
cron: "30 17 * * 5",
class: "WeeklySummaryEmailJob",

View file

@ -134,8 +134,6 @@ Rails.application.routes.draw do
member do
patch :update_trust_level
end
resource :wakatime_mirrors, only: [ :create ]
resources :wakatime_mirrors, only: [ :destroy ]
end
get "my/projects", to: "my/project_repo_mappings#index", as: :my_projects
@ -157,17 +155,9 @@ Rails.application.routes.draw do
delete "my/settings/goals/:goal_id", to: "settings/goals#destroy", as: :my_settings_goal_destroy
get "my/settings/badges", to: "settings/badges#show", as: :my_settings_badges
get "my/settings/data", to: "settings/data#show", as: :my_settings_data
get "my/settings/admin", to: "settings/admin#show", as: :my_settings_admin
post "my/settings/migrate_heartbeats", to: "settings/data#migrate_heartbeats", as: :my_settings_migrate_heartbeats
post "my/settings/rotate_api_key", to: "settings/access#rotate_api_key", as: :my_settings_rotate_api_key
namespace :my do
resource :heartbeat_import_source,
only: [ :create, :update, :show, :destroy ],
controller: "heartbeat_import_sources" do
post :sync, on: :collection, action: :sync_now
end
resources :heartbeat_imports, only: [ :create, :show ]
resources :project_repo_mappings, param: :project_name, only: [ :edit, :update ], constraints: { project_name: /.+/ } do
@ -181,7 +171,6 @@ Rails.application.routes.draw do
resources :heartbeats, only: [] do
collection do
post :export
post :import
end
end
end

View file

@ -0,0 +1,29 @@
class CreateHeartbeatImportRuns < ActiveRecord::Migration[8.1]
def change
create_table :heartbeat_import_runs do |t|
t.references :user, null: false, foreign_key: true
t.integer :source_kind, null: false
t.integer :state, null: false, default: 0
t.string :source_filename
t.string :encrypted_api_key
t.string :remote_dump_id
t.string :remote_dump_status
t.float :remote_percent_complete
t.integer :processed_count, null: false, default: 0
t.integer :total_count
t.integer :imported_count
t.integer :skipped_count
t.integer :errors_count, null: false, default: 0
t.text :message
t.text :error_message
t.datetime :started_at
t.datetime :finished_at
t.datetime :remote_requested_at
t.timestamps
end
add_index :heartbeat_import_runs, [ :user_id, :created_at ]
add_index :heartbeat_import_runs, [ :user_id, :state ]
end
end

29
db/schema.rb generated
View file

@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema[8.1].define(version: 2026_03_11_170528) do
ActiveRecord::Schema[8.1].define(version: 2026_03_12_134424) do
# These are extensions that must be enabled in order to support this database
enable_extension "pg_catalog.plpgsql"
enable_extension "pg_stat_statements"
@ -234,6 +234,32 @@ ActiveRecord::Schema[8.1].define(version: 2026_03_11_170528) do
t.index ["scheduled_at"], name: "index_good_jobs_on_scheduled_at", where: "(finished_at IS NULL)"
end
create_table "heartbeat_import_runs", force: :cascade do |t|
t.datetime "created_at", null: false
t.string "encrypted_api_key"
t.text "error_message"
t.integer "errors_count", default: 0, null: false
t.datetime "finished_at"
t.integer "imported_count"
t.text "message"
t.integer "processed_count", default: 0, null: false
t.string "remote_dump_id"
t.string "remote_dump_status"
t.float "remote_percent_complete"
t.datetime "remote_requested_at"
t.integer "skipped_count"
t.string "source_filename"
t.integer "source_kind", null: false
t.datetime "started_at"
t.integer "state", default: 0, null: false
t.integer "total_count"
t.datetime "updated_at", null: false
t.bigint "user_id", null: false
t.index ["user_id", "created_at"], name: "index_heartbeat_import_runs_on_user_id_and_created_at"
t.index ["user_id", "state"], name: "index_heartbeat_import_runs_on_user_id_and_state"
t.index ["user_id"], name: "index_heartbeat_import_runs_on_user_id"
end
create_table "heartbeat_import_sources", force: :cascade do |t|
t.date "backfill_cursor_date"
t.integer "consecutive_failures", default: 0, null: false
@ -654,6 +680,7 @@ ActiveRecord::Schema[8.1].define(version: 2026_03_11_170528) do
add_foreign_key "email_addresses", "users"
add_foreign_key "email_verification_requests", "users"
add_foreign_key "goals", "users"
add_foreign_key "heartbeat_import_runs", "users"
add_foreign_key "heartbeat_import_sources", "users"
add_foreign_key "heartbeats", "raw_heartbeat_uploads"
add_foreign_key "heartbeats", "users"

View file

@ -14,23 +14,23 @@
"@sveltejs/vite-plugin-svelte": "^6.2.4",
"@tailwindcss/forms": "^0.5.11",
"@tailwindcss/typography": "^0.5.19",
"@tailwindcss/vite": "^4.1.18",
"@tsconfig/svelte": "^5.0.7",
"axios": "^1.13.2",
"bits-ui": "^2.15.5",
"@tailwindcss/vite": "^4.2.1",
"@tsconfig/svelte": "^5.0.8",
"axios": "^1.13.6",
"bits-ui": "^2.16.3",
"d3-scale": "^4.0.2",
"layerchart": "^1.0.13",
"plur": "^6.0.0",
"svelte": "^5.51.2",
"svelte-check": "^4.4.0",
"tailwindcss": "^4.1.18",
"svelte": "^5.53.11",
"svelte-check": "^4.4.5",
"tailwindcss": "^4.2.1",
"tslib": "^2.8.1",
"typescript": "^5.9.3",
"vite": "^7.3.1",
"vite-plugin-ruby": "^5.1.2"
"vite-plugin-ruby": "^5.1.3"
},
"devDependencies": {
"prettier": "^3.8.1",
"prettier-plugin-svelte": "^3.4.1"
"prettier-plugin-svelte": "^3.5.1"
}
}

View file

@ -98,33 +98,34 @@ RSpec.describe 'Api::V1::My', type: :request do
end
end
path '/my/heartbeats/import' do
post('Import Heartbeats') do
path '/my/heartbeat_imports' do
post('Create Heartbeat Import') do
tags 'My Data'
description 'Import heartbeats from a JSON file.'
description 'Start a development upload import or a one-time remote dump import.'
security [ Bearer: [], ApiKeyAuth: [] ]
consumes 'multipart/form-data'
produces 'application/json'
parameter name: :heartbeat_file,
parameter name: :"heartbeat_import[provider]",
in: :formData,
schema: { type: :string, format: :binary },
description: 'JSON file containing heartbeats'
schema: { type: :string, enum: %w[wakatime_dump hackatime_v1_dump] },
description: 'Remote import provider preset'
parameter name: :"heartbeat_import[api_key]",
in: :formData,
schema: { type: :string },
description: 'API key for the selected remote import provider'
response(302, 'redirect') do
response(202, 'accepted') do
let(:Authorization) { "Bearer dev-api-key-12345" }
let(:api_key) { 'dev-api-key-12345' }
let(:heartbeat_file) do
Rack::Test::UploadedFile.new(
StringIO.new("[]"),
"application/json",
original_filename: "heartbeats.json"
)
end
let(:"heartbeat_import[provider]") { "wakatime_dump" }
let(:"heartbeat_import[api_key]") { "test-api-key" }
before do
login_browser_user
Flipper.enable_actor(:imports, user)
end
after { Flipper.disable(:imports) }
run_test!
end
end
@ -251,29 +252,29 @@ RSpec.describe 'Api::V1::My', type: :request do
end
end
path '/my/settings/migrate_heartbeats' do
post('Migrate Heartbeats') do
tags 'My Settings'
description 'Trigger a migration of heartbeats from legacy formats or systems.'
path '/my/heartbeat_imports/{id}' do
get('Get Heartbeat Import Status') do
tags 'My Data'
description 'Fetch the latest state for a heartbeat import run.'
security [ Bearer: [], ApiKeyAuth: [] ]
produces 'application/json'
response(302, 'redirect') do
parameter name: :id, in: :path, type: :string, description: 'Heartbeat import run id'
response(200, 'successful') do
let(:Authorization) { "Bearer dev-api-key-12345" }
let(:api_key) { 'dev-api-key-12345' }
let(:id) do
HeartbeatImportRun.create!(
user: user,
source_kind: :dev_upload,
state: :completed,
source_filename: "heartbeats.json",
message: "Completed."
).id
end
before do
login_browser_user
@hackatime_v1_import_was_enabled = Flipper.enabled?(:hackatime_v1_import)
Flipper.enable(:hackatime_v1_import)
end
after do
if @hackatime_v1_import_was_enabled
Flipper.enable(:hackatime_v1_import)
else
Flipper.disable(:hackatime_v1_import)
end
end
before { login_browser_user }
run_test!
end
end

View file

@ -2591,26 +2591,28 @@ paths:
responses:
'302':
description: redirect
"/my/heartbeats/import":
"/my/heartbeat_imports":
post:
summary: Import Heartbeats
summary: Create Heartbeat Import
tags:
- My Data
description: Import heartbeats from a JSON file.
description: Start a development upload import or a one-time remote dump import.
security:
- Bearer: []
ApiKeyAuth: []
parameters: []
responses:
'302':
description: redirect
'202':
description: accepted
requestBody:
content:
multipart/form-data:
schema:
type: string
format: binary
description: JSON file containing heartbeats
enum:
- wakatime_dump
- hackatime_v1_dump
description: Remote import provider preset
"/my/projects":
get:
summary: List Project Repo Mappings
@ -2723,18 +2725,25 @@ paths:
responses:
'200':
description: successful
"/my/settings/migrate_heartbeats":
post:
summary: Migrate Heartbeats
"/my/heartbeat_imports/{id}":
get:
summary: Get Heartbeat Import Status
tags:
- My Settings
description: Trigger a migration of heartbeats from legacy formats or systems.
- My Data
description: Fetch the latest state for a heartbeat import run.
security:
- Bearer: []
ApiKeyAuth: []
parameters:
- name: id
in: path
description: Heartbeat import run id
required: true
schema:
type: string
responses:
'302':
description: redirect
'200':
description: successful
"/deletion":
post:
summary: Create Deletion Request

View file

@ -1,110 +0,0 @@
require "test_helper"
class My::HeartbeatImportSourcesControllerTest < ActionDispatch::IntegrationTest
setup do
Flipper.enable(:wakatime_imports_mirrors)
end
teardown do
Flipper.disable(:wakatime_imports_mirrors)
end
test "requires auth for create" do
post my_heartbeat_import_source_path, params: {
heartbeat_import_source: {
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "api-key"
}
}
assert_response :redirect
assert_redirected_to root_path
end
test "authenticated user can create source and queue sync" do
user = User.create!(timezone: "UTC")
sign_in_as(user)
GoodJob::Job.where(job_class: "HeartbeatImportSourceSyncJob").delete_all
assert_difference -> { HeartbeatImportSource.count }, 1 do
assert_difference -> { GoodJob::Job.where(job_class: "HeartbeatImportSourceSyncJob").count }, 1 do
post my_heartbeat_import_source_path, params: {
heartbeat_import_source: {
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "api-key",
sync_enabled: "1"
}
}
end
end
assert_response :redirect
assert_redirected_to my_settings_data_path
end
test "show returns configured source payload" do
user = User.create!(timezone: "UTC")
source = user.create_heartbeat_import_source!(
provider: :wakatime_compatible,
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "api-key"
)
sign_in_as(user)
get my_heartbeat_import_source_path
assert_response :success
payload = JSON.parse(response.body)
assert_equal source.id, payload.dig("import_source", "id")
assert_equal "wakatime_compatible", payload.dig("import_source", "provider")
end
test "sync now queues source sync" do
user = User.create!(timezone: "UTC")
source = user.create_heartbeat_import_source!(
provider: :wakatime_compatible,
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "api-key",
sync_enabled: true
)
sign_in_as(user)
GoodJob::Job.where(job_class: "HeartbeatImportSourceSyncJob").delete_all
assert_difference -> { GoodJob::Job.where(job_class: "HeartbeatImportSourceSyncJob").count }, 1 do
post sync_my_heartbeat_import_source_path
end
assert_response :redirect
assert_redirected_to my_settings_data_path
assert_equal source.id, GoodJob::Job.where(job_class: "HeartbeatImportSourceSyncJob").last.serialized_params.dig("arguments", 0)
end
test "destroy removes source" do
user = User.create!(timezone: "UTC")
user.create_heartbeat_import_source!(
provider: :wakatime_compatible,
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "api-key"
)
sign_in_as(user)
assert_difference -> { HeartbeatImportSource.count }, -1 do
delete my_heartbeat_import_source_path
end
assert_response :redirect
assert_redirected_to my_settings_data_path
end
test "returns not found json when imports and mirrors are disabled" do
user = User.create!(timezone: "UTC")
sign_in_as(user)
Flipper.disable(:wakatime_imports_mirrors)
get my_heartbeat_import_source_path, as: :json
assert_response :not_found
payload = JSON.parse(response.body)
assert_equal "Imports and mirrors are currently disabled.", payload["error"]
end
end

View file

@ -1,8 +1,22 @@
require "test_helper"
class My::HeartbeatImportsControllerTest < ActionDispatch::IntegrationTest
include ActiveJob::TestHelper
fixtures :users
setup do
@original_queue_adapter = ActiveJob::Base.queue_adapter
ActiveJob::Base.queue_adapter = :test
clear_enqueued_jobs
clear_performed_jobs
end
teardown do
Flipper.disable(:imports)
ActiveJob::Base.queue_adapter = @original_queue_adapter
end
test "create rejects guests" do
post my_heartbeat_imports_path
@ -10,39 +24,25 @@ class My::HeartbeatImportsControllerTest < ActionDispatch::IntegrationTest
assert_equal "You must be logged in to view this page.", JSON.parse(response.body)["error"]
end
test "create rejects non-development environment" do
test "create rejects dev upload outside development" do
user = users(:one)
sign_in_as(user)
post my_heartbeat_imports_path, params: { heartbeat_file: uploaded_file }
assert_redirected_with_import_error("Heartbeat import is only available in development.")
end
test "create returns error when no import data is provided" do
user = users(:one)
sign_in_as(user)
post my_heartbeat_imports_path
assert_response :forbidden
assert_equal "Heartbeat import is only available in development.", JSON.parse(response.body)["error"]
assert_redirected_with_import_error("No import data provided.")
end
test "show rejects non-development environment" do
user = users(:one)
sign_in_as(user)
get my_heartbeat_import_path("import-123")
assert_response :forbidden
assert_equal "Heartbeat import is only available in development.", JSON.parse(response.body)["error"]
end
test "create returns error when file is missing" do
user = users(:one)
sign_in_as(user)
with_development_env do
post my_heartbeat_imports_path
end
assert_response :unprocessable_entity
assert_equal "pls select a file to import", JSON.parse(response.body)["error"]
end
test "create returns error when file type is invalid" do
test "create returns error when dev upload file type is invalid" do
user = users(:one)
sign_in_as(user)
@ -52,58 +52,181 @@ class My::HeartbeatImportsControllerTest < ActionDispatch::IntegrationTest
}
end
assert_response :unprocessable_entity
assert_equal "pls upload only json (download from the button above it)", JSON.parse(response.body)["error"]
assert_redirected_with_import_error("pls upload only json (download from the button above it)")
end
test "create starts import and returns status" do
test "create starts dev upload import" do
user = users(:one)
sign_in_as(user)
with_development_env do
with_memory_cache do
post my_heartbeat_imports_path, params: {
heartbeat_file: uploaded_file
}
assert_difference -> { user.heartbeat_import_runs.count }, +1 do
assert_enqueued_with(job: HeartbeatImportJob) do
post my_heartbeat_imports_path, params: { heartbeat_file: uploaded_file }
end
end
end
assert_response :accepted
body = JSON.parse(response.body)
assert body["import_id"].present?
assert_equal "queued", body.dig("status", "state")
assert_equal 0, body.dig("status", "progress_percent")
run = user.heartbeat_import_runs.order(:created_at).last
assert_redirected_to my_settings_data_url
assert_equal "queued", run.state
assert_equal "dev_upload", run.source_kind
end
test "remote create rejects users without the imports feature" do
user = users(:one)
sign_in_as(user)
post my_heartbeat_imports_path, params: remote_params(provider: "wakatime_dump")
assert_redirected_with_import_error("Imports are not enabled for this user.")
end
test "remote create rejects during cooldown" do
user = users(:one)
sign_in_as(user)
Flipper.enable_actor(:imports, user)
user.heartbeat_import_runs.create!(
source_kind: :wakatime_dump,
state: :completed,
encrypted_api_key: "old-secret",
remote_requested_at: 1.hour.ago
)
post my_heartbeat_imports_path, params: remote_params(provider: "wakatime_dump")
assert_redirected_with_import_error("Remote imports are limited to once every 4 hours.")
assert flash[:cooldown_until].present?
end
test "remote create rejects when another import is active" do
user = users(:one)
sign_in_as(user)
Flipper.enable_actor(:imports, user)
user.heartbeat_import_runs.create!(
source_kind: :dev_upload,
state: :queued,
source_filename: "old.json"
)
post my_heartbeat_imports_path, params: remote_params(provider: "wakatime_dump")
assert_redirected_with_import_error("Another import is already in progress.")
end
test "remote create starts wakatime import" do
user = users(:one)
sign_in_as(user)
Flipper.enable_actor(:imports, user)
assert_difference -> { user.heartbeat_import_runs.count }, +1 do
assert_enqueued_with(job: HeartbeatImportDumpJob) do
post my_heartbeat_imports_path, params: remote_params(provider: "wakatime_dump")
end
end
run = user.heartbeat_import_runs.order(:created_at).last
assert_redirected_to my_settings_data_url
assert_equal "wakatime_dump", run.source_kind
assert_equal "queued", run.state
end
test "remote create starts hackatime v1 import" do
user = users(:one)
sign_in_as(user)
Flipper.enable_actor(:imports, user)
assert_difference -> { user.heartbeat_import_runs.count }, +1 do
assert_enqueued_with(job: HeartbeatImportDumpJob) do
post my_heartbeat_imports_path, params: remote_params(provider: "hackatime_v1_dump")
end
end
run = user.heartbeat_import_runs.order(:created_at).last
assert_redirected_to my_settings_data_url
assert_equal "hackatime_v1_dump", run.source_kind
assert_equal "queued", run.state
end
test "show returns status for existing import" do
user = users(:one)
sign_in_as(user)
with_development_env do
with_memory_cache do
post my_heartbeat_imports_path, params: { heartbeat_file: uploaded_file }
import_id = JSON.parse(response.body).fetch("import_id")
run = user.heartbeat_import_runs.create!(
source_kind: :dev_upload,
state: :completed,
source_filename: "heartbeats.json",
imported_count: 4,
total_count: 5,
skipped_count: 1,
message: "Completed."
)
get my_heartbeat_import_path(import_id)
get my_heartbeat_import_path(run)
assert_response :success
assert_equal run.id.to_s, JSON.parse(response.body).fetch("import_id")
end
test "show refreshes stale remote imports" do
user = users(:one)
sign_in_as(user)
Flipper.enable_actor(:imports, user)
run = user.heartbeat_import_runs.create!(
source_kind: :hackatime_v1_dump,
state: :waiting_for_dump,
encrypted_api_key: "secret",
remote_dump_id: "dump-123",
remote_requested_at: 10.minutes.ago,
remote_dump_status: "Pending…",
message: "Pending…..."
)
run.update_column(:updated_at, 10.seconds.ago)
singleton_class = HeartbeatImportRunner.singleton_class
singleton_class.alias_method :__original_refresh_remote_run_for_test, :refresh_remote_run!
singleton_class.define_method(:refresh_remote_run!) do |stale_run|
stale_run.update!(
remote_dump_status: "Completed",
message: "Downloading data dump..."
)
stale_run.reload
end
begin
get my_heartbeat_import_path(run)
ensure
singleton_class.alias_method :refresh_remote_run!, :__original_refresh_remote_run_for_test
singleton_class.remove_method :__original_refresh_remote_run_for_test
end
assert_response :success
assert_equal "queued", JSON.parse(response.body).fetch("state")
body = JSON.parse(response.body)
assert_equal "Completed", body["remote_dump_status"]
assert_equal "Downloading data dump...", body["message"]
end
test "show returns not found for unknown import id" do
test "show returns not found for another user's import" do
user = users(:one)
other_user = users(:two)
sign_in_as(user)
with_development_env do
with_memory_cache do
get my_heartbeat_import_path("missing-import")
end
end
run = other_user.heartbeat_import_runs.create!(
source_kind: :dev_upload,
state: :queued,
source_filename: "other.json"
)
get my_heartbeat_import_path(run)
assert_response :not_found
assert_equal "Import not found", JSON.parse(response.body).fetch("error")
assert_equal "Import not found", JSON.parse(response.body)["error"]
end
private
@ -119,14 +242,6 @@ class My::HeartbeatImportsControllerTest < ActionDispatch::IntegrationTest
rails_singleton.remove_method :__original_env_for_test
end
def with_memory_cache
original_cache = Rails.cache
Rails.cache = ActiveSupport::Cache::MemoryStore.new
yield
ensure
Rails.cache = original_cache
end
def uploaded_file(filename: "heartbeats.json", content_type: "application/json", content: '{"heartbeats":[]}')
Rack::Test::UploadedFile.new(
StringIO.new(content),
@ -134,4 +249,18 @@ class My::HeartbeatImportsControllerTest < ActionDispatch::IntegrationTest
original_filename: filename
)
end
def remote_params(provider:)
{
heartbeat_import: {
provider: provider,
api_key: "remote-key-#{SecureRandom.hex(8)}"
}
}
end
def assert_redirected_with_import_error(message)
assert_redirected_to my_settings_data_url
assert_equal message, session[:inertia_errors]&.dig(:import)
end
end

View file

@ -1,46 +0,0 @@
require "test_helper"
class WakatimeMirrorsControllerTest < ActionDispatch::IntegrationTest
setup do
Flipper.enable(:wakatime_imports_mirrors)
end
teardown do
Flipper.disable(:wakatime_imports_mirrors)
end
test "creates mirror when imports and mirrors are enabled" do
user = User.create!(timezone: "UTC")
sign_in_as(user)
assert_difference -> { user.reload.wakatime_mirrors.count }, 1 do
post user_wakatime_mirrors_path(user), params: {
wakatime_mirror: {
endpoint_url: "https://wakapi.dev/api/compat/wakatime/v1",
encrypted_api_key: "mirror-key"
}
}
end
assert_response :redirect
assert_redirected_to my_settings_data_path
end
test "blocks mirror create when imports and mirrors are disabled" do
user = User.create!(timezone: "UTC")
sign_in_as(user)
Flipper.disable(:wakatime_imports_mirrors)
assert_no_difference -> { user.reload.wakatime_mirrors.count } do
post user_wakatime_mirrors_path(user), params: {
wakatime_mirror: {
endpoint_url: "https://wakapi.dev/api/compat/wakatime/v1",
encrypted_api_key: "mirror-key"
}
}
end
assert_response :redirect
assert_redirected_to my_settings_data_path
end
end

View file

@ -0,0 +1,130 @@
require "test_helper"
class HeartbeatImportDumpJobTest < ActiveJob::TestCase
include ActiveJob::TestHelper
setup do
@original_queue_adapter = ActiveJob::Base.queue_adapter
ActiveJob::Base.queue_adapter = :test
end
teardown do
Flipper.disable(:imports)
ActiveJob::Base.queue_adapter = @original_queue_adapter
clear_enqueued_jobs
clear_performed_jobs
end
test "requests a remote dump and schedules polling" do
user = User.create!(timezone: "UTC")
Flipper.enable_actor(:imports, user)
run = user.heartbeat_import_runs.create!(
source_kind: :wakatime_dump,
state: :queued,
encrypted_api_key: "secret"
)
fake_client = Object.new
fake_client.define_singleton_method(:request_dump) do
{
id: "dump-123",
status: "Pending",
percent_complete: 12.0,
download_url: nil,
type: "heartbeats",
is_processing: true,
is_stuck: false,
has_failed: false
}
end
with_dump_client(fake_client) do
assert_enqueued_with(job: HeartbeatImportDumpJob) do
HeartbeatImportDumpJob.perform_now(run.id)
end
end
run.reload
assert_equal "waiting_for_dump", run.state
assert_equal "dump-123", run.remote_dump_id
assert_not_nil run.remote_requested_at
end
test "downloads a completed dump and enqueues the import job" do
user = User.create!(timezone: "UTC")
Flipper.enable_actor(:imports, user)
run = user.heartbeat_import_runs.create!(
source_kind: :hackatime_v1_dump,
state: :waiting_for_dump,
encrypted_api_key: "secret",
remote_dump_id: "dump-456",
remote_requested_at: Time.current
)
fake_client = Object.new
fake_client.define_singleton_method(:list_dumps) do
[
{
id: "dump-456",
status: "Completed",
percent_complete: 100.0,
download_url: "https://example.invalid/download.json",
type: "heartbeats",
is_processing: false,
is_stuck: false,
has_failed: false
}
]
end
fake_client.define_singleton_method(:download_dump) do |_url|
'{"heartbeats":[]}'
end
with_dump_client(fake_client) do
assert_enqueued_with(job: HeartbeatImportJob) do
HeartbeatImportDumpJob.perform_now(run.id)
end
end
run.reload
assert_equal "downloading_dump", run.state
end
test "marks the run as failed on authentication errors" do
user = User.create!(timezone: "UTC")
Flipper.enable_actor(:imports, user)
run = user.heartbeat_import_runs.create!(
source_kind: :wakatime_dump,
state: :queued,
encrypted_api_key: "secret"
)
fake_client = Object.new
fake_client.define_singleton_method(:request_dump) do
raise HeartbeatImportDumpClient::AuthenticationError, "Authentication failed (401)"
end
with_dump_client(fake_client) do
HeartbeatImportDumpJob.perform_now(run.id)
end
run.reload
assert_equal "failed", run.state
assert_equal "Import failed: Authentication failed (401)", run.message
assert_nil run.encrypted_api_key
end
private
def with_dump_client(fake_client)
singleton_class = HeartbeatImportDumpClient.singleton_class
singleton_class.alias_method :__original_new_for_test, :new
singleton_class.define_method(:new) do |*|
fake_client
end
yield
ensure
singleton_class.alias_method :new, :__original_new_for_test
singleton_class.remove_method :__original_new_for_test
end
end

View file

@ -1,187 +0,0 @@
require "test_helper"
require "webmock/minitest"
class HeartbeatImportSourceSyncJobTest < ActiveJob::TestCase
setup do
Flipper.enable(:wakatime_imports_mirrors)
end
teardown do
Flipper.disable(:wakatime_imports_mirrors)
end
def create_source(user:, **attrs)
user.create_heartbeat_import_source!(
{
provider: :wakatime_compatible,
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "import-key",
sync_enabled: true,
status: :idle
}.merge(attrs)
)
end
def queued_jobs_for(job_class)
GoodJob::Job.where(job_class: job_class)
end
test "full-history default schedules backfill windows and re-enqueues coordinator" do
GoodJob::Job.delete_all
user = User.create!(timezone: "UTC")
source = create_source(user: user)
stub_request(:get, "https://wakatime.com/api/v1/users/current/all_time_since_today")
.to_return(
status: 200,
body: {
data: {
range: {
start_date: (Date.current - 10.days).iso8601
}
}
}.to_json,
headers: { "Content-Type" => "application/json" }
)
HeartbeatImportSourceSyncJob.perform_now(source.id)
day_jobs = queued_jobs_for("HeartbeatImportSourceSyncDayJob")
sync_jobs = queued_jobs_for("HeartbeatImportSourceSyncJob")
assert_equal 5, day_jobs.count
assert_equal 1, sync_jobs.count
source.reload
assert source.backfilling?
assert_equal(Date.current - 5.days, source.backfill_cursor_date)
end
test "range override limits scheduled days" do
GoodJob::Job.delete_all
user = User.create!(timezone: "UTC")
start_date = Date.current - 2.days
end_date = Date.current - 1.day
source = create_source(
user: user,
initial_backfill_start_date: start_date,
initial_backfill_end_date: end_date
)
HeartbeatImportSourceSyncJob.perform_now(source.id)
day_jobs = queued_jobs_for("HeartbeatImportSourceSyncDayJob")
day_args = day_jobs.map { |job| job.serialized_params.fetch("arguments").last }
assert_equal 2, day_jobs.count
assert_includes day_args, start_date.iso8601
assert_includes day_args, end_date.iso8601
end
test "ongoing sync enqueues today and yesterday" do
GoodJob::Job.delete_all
user = User.create!(timezone: "UTC")
source = create_source(
user: user,
status: :syncing,
initial_backfill_start_date: Date.current - 7.days,
initial_backfill_end_date: Date.current,
backfill_cursor_date: nil,
last_synced_at: Time.current
)
HeartbeatImportSourceSyncJob.perform_now(source.id)
day_jobs = queued_jobs_for("HeartbeatImportSourceSyncDayJob")
scheduled_dates = day_jobs.map { |job| Date.iso8601(job.serialized_params.fetch("arguments").last) }
assert_equal 2, day_jobs.count
assert_includes scheduled_dates, Date.current
assert_includes scheduled_dates, Date.yesterday
end
test "day job imports and dedupes by fields_hash" do
user = User.create!(timezone: "UTC")
source = create_source(user: user, status: :syncing)
timestamp = Time.current.to_f
payload = [
{
entity: "src/a.rb",
type: "file",
category: "coding",
project: "alpha",
language: "Ruby",
editor: "VS Code",
time: timestamp
},
{
entity: "src/a.rb",
type: "file",
category: "coding",
project: "alpha",
language: "Ruby",
editor: "VS Code",
time: timestamp
}
]
stub_request(:get, "https://wakatime.com/api/v1/users/current/heartbeats")
.with(query: { "date" => Date.current.iso8601 })
.to_return(
status: 200,
body: { data: payload }.to_json,
headers: { "Content-Type" => "application/json" }
)
HeartbeatImportSourceSyncDayJob.perform_now(source.id, Date.current.iso8601)
assert_equal 1, user.heartbeats.where(source_type: :wakapi_import).count
assert source.reload.last_synced_at.present?
end
test "day job pauses source on auth errors" do
user = User.create!(timezone: "UTC")
source = create_source(user: user, status: :syncing)
stub_request(:get, "https://wakatime.com/api/v1/users/current/heartbeats")
.with(query: { "date" => Date.current.iso8601 })
.to_return(status: 401, body: "{}")
HeartbeatImportSourceSyncDayJob.perform_now(source.id, Date.current.iso8601)
source.reload
assert source.paused?
assert_not source.sync_enabled
assert_includes source.last_error_message, "Authentication failed"
end
test "day job marks transient errors for retry" do
user = User.create!(timezone: "UTC")
source = create_source(user: user, status: :syncing)
stub_request(:get, "https://wakatime.com/api/v1/users/current/heartbeats")
.with(query: { "date" => Date.current.iso8601 })
.to_return(status: 500, body: "{}")
assert_raises(WakatimeCompatibleClient::TransientError) do
HeartbeatImportSourceSyncDayJob.new.perform(source.id, Date.current.iso8601)
end
source.reload
assert source.failed?
assert_equal 1, source.consecutive_failures
end
test "coordinator does nothing when imports and mirrors are disabled" do
GoodJob::Job.delete_all
user = User.create!(timezone: "UTC")
source = create_source(user: user)
Flipper.disable(:wakatime_imports_mirrors)
HeartbeatImportSourceSyncJob.perform_now(source.id)
assert_equal 0, queued_jobs_for("HeartbeatImportSourceSyncDayJob").count
assert_equal "idle", source.reload.status
end
end

View file

@ -1,48 +0,0 @@
require "test_helper"
class MirrorFanoutEnqueueJobTest < ActiveJob::TestCase
setup do
Flipper.enable(:wakatime_imports_mirrors)
end
teardown do
Flipper.disable(:wakatime_imports_mirrors)
end
test "debounce prevents enqueue storms per user" do
original_cache = Rails.cache
Rails.cache = ActiveSupport::Cache::MemoryStore.new
GoodJob::Job.delete_all
user = User.create!(timezone: "UTC")
user.wakatime_mirrors.create!(
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "mirror-key-1"
)
user.wakatime_mirrors.create!(
endpoint_url: "https://wakapi.dev/api/compat/wakatime/v1",
encrypted_api_key: "mirror-key-2"
)
assert_difference -> { GoodJob::Job.where(job_class: "WakatimeMirrorSyncJob").count }, 2 do
MirrorFanoutEnqueueJob.perform_now(user.id)
MirrorFanoutEnqueueJob.perform_now(user.id)
end
ensure
Rails.cache = original_cache
end
test "does not enqueue mirror sync when imports and mirrors are disabled" do
GoodJob::Job.delete_all
user = User.create!(timezone: "UTC")
user.wakatime_mirrors.create!(
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "mirror-key"
)
Flipper.disable(:wakatime_imports_mirrors)
assert_no_difference -> { GoodJob::Job.where(job_class: "WakatimeMirrorSyncJob").count } do
MirrorFanoutEnqueueJob.perform_now(user.id)
end
end
end

View file

@ -1,344 +0,0 @@
require "test_helper"
require "webmock/minitest"
require "socket"
require "net/http"
require "timeout"
class WakatimeMirrorSyncJobTest < ActiveJob::TestCase
setup do
Flipper.enable(:wakatime_imports_mirrors)
end
teardown do
Flipper.disable(:wakatime_imports_mirrors)
end
class MockWakatimeServer
attr_reader :base_url, :port
def initialize
@requests = Queue.new
@server = TCPServer.new("0.0.0.0", 0)
@stopped = false
@clients = []
@mutex = Mutex.new
@port = @server.addr[1]
@base_url = "http://127.0.0.2:#{@port}/api/v1"
end
def start
@thread = Thread.new do
loop do
break if @stopped
socket = @server.accept
@mutex.synchronize { @clients << socket }
handle_client(socket)
rescue IOError, Errno::EBADF
break
end
end
wait_until_ready!
end
def stop
@stopped = true
@server.close unless @server.closed?
@mutex.synchronize do
@clients.each { |client| client.close unless client.closed? }
@clients.clear
end
@thread&.join(2)
end
def pop_requests
requests = []
loop do
requests << @requests.pop(true)
end
rescue ThreadError
requests
end
private
def handle_client(socket)
request_line = socket.gets
return if request_line.nil?
_method, path, = request_line.split(" ")
headers = {}
while (line = socket.gets)
break if line == "\r\n"
key, value = line.split(":", 2)
headers[key.to_s.strip.downcase] = value.to_s.strip
end
content_length = headers.fetch("content-length", "0").to_i
body = content_length.positive? ? socket.read(content_length).to_s : ""
if path == "/api/v1/users/current/heartbeats.bulk"
@requests << {
path: path,
body: body,
authorization: headers["authorization"]
}
respond(socket, 201, "{}")
elsif path == "/__health"
respond(socket, 200, "{}")
else
respond(socket, 404, "{}")
end
ensure
@mutex.synchronize { @clients.delete(socket) }
socket.close unless socket.closed?
end
def respond(socket, status, body)
phrase = status == 200 ? "OK" : status == 201 ? "Created" : "Not Found"
socket.write("HTTP/1.1 #{status} #{phrase}\r\n")
socket.write("Content-Type: application/json\r\n")
socket.write("Content-Length: #{body.bytesize}\r\n")
socket.write("Connection: close\r\n")
socket.write("\r\n")
socket.write(body)
end
def wait_until_ready!
Timeout.timeout(5) do
loop do
begin
response = Net::HTTP.get_response(URI("http://127.0.0.2:#{@port}/__health"))
return if response.is_a?(Net::HTTPSuccess)
rescue Errno::ECONNREFUSED, Errno::EHOSTUNREACH
end
sleep 0.05
end
end
end
end
def create_heartbeat(user:, source_type:, entity:, project: "mirror-project", at_time: Time.current)
user.heartbeats.create!(
entity: entity,
type: "file",
category: "coding",
time: at_time.to_f,
project: project,
source_type: source_type
)
end
test "sync sends only direct heartbeats in chunks of 25 and advances cursor" do
user = User.create!(timezone: "UTC")
mirror = user.wakatime_mirrors.create!(
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "mirror-key"
)
direct_heartbeats = 30.times.map do |index|
create_heartbeat(
user: user,
source_type: :direct_entry,
entity: "src/direct_#{index}.rb",
project: "direct-project",
at_time: Time.current + index.seconds
)
end
5.times do |index|
create_heartbeat(
user: user,
source_type: :wakapi_import,
entity: "src/imported_#{index}.rb",
project: "import-project",
at_time: Time.current + (100 + index).seconds
)
end
payload_batches = []
stub_request(:post, "https://wakatime.com/api/v1/users/current/heartbeats.bulk")
.to_return do |request|
payload_batches << JSON.parse(request.body)
{ status: 201, body: "{}", headers: { "Content-Type" => "application/json" } }
end
with_development_env do
WakatimeMirrorSyncJob.perform_now(mirror.id)
end
assert_equal [ 25, 5 ], payload_batches.map(&:size)
assert_equal 30, payload_batches.flatten.size
assert payload_batches.flatten.all? { |row| row["project"] == "direct-project" }
assert_equal direct_heartbeats.last.id, mirror.reload.last_synced_heartbeat_id
end
test "sync respects last_synced_heartbeat_id cursor" do
user = User.create!(timezone: "UTC")
mirror = user.wakatime_mirrors.create!(
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "mirror-key"
)
first = create_heartbeat(
user: user,
source_type: :direct_entry,
entity: "src/old.rb",
at_time: Time.current - 1.minute
)
create_heartbeat(
user: user,
source_type: :direct_entry,
entity: "src/new.rb",
at_time: Time.current
)
mirror.update!(last_synced_heartbeat_id: first.id)
payload_batches = []
stub_request(:post, "https://wakatime.com/api/v1/users/current/heartbeats.bulk")
.to_return do |request|
payload_batches << JSON.parse(request.body)
{ status: 201, body: "{}", headers: { "Content-Type" => "application/json" } }
end
with_development_env do
WakatimeMirrorSyncJob.perform_now(mirror.id)
end
assert_equal 1, payload_batches.flatten.size
assert_equal "src/new.rb", payload_batches.flatten.first["entity"]
end
test "auth failures disable mirror and stop syncing" do
user = User.create!(timezone: "UTC")
mirror = user.wakatime_mirrors.create!(
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "mirror-key"
)
create_heartbeat(
user: user,
source_type: :direct_entry,
entity: "src/direct.rb"
)
stub_request(:post, "https://wakatime.com/api/v1/users/current/heartbeats.bulk")
.to_return(status: 401, body: "{}")
with_development_env do
WakatimeMirrorSyncJob.perform_now(mirror.id)
end
mirror.reload
assert_not mirror.enabled
assert_includes mirror.last_error_message, "Authentication failed"
assert mirror.last_error_at.present?
assert_equal 1, mirror.consecutive_failures
end
test "transient failures keep mirror enabled and raise for retry" do
user = User.create!(timezone: "UTC")
mirror = user.wakatime_mirrors.create!(
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "mirror-key"
)
create_heartbeat(
user: user,
source_type: :direct_entry,
entity: "src/direct.rb"
)
stub_request(:post, "https://wakatime.com/api/v1/users/current/heartbeats.bulk")
.to_return(status: 500, body: "{}")
assert_raises(WakatimeMirrorSyncJob::MirrorTransientError) do
WakatimeMirrorSyncJob.new.perform(mirror.id)
end
mirror.reload
assert mirror.enabled
assert_equal 1, mirror.consecutive_failures
end
test "sync posts to a real wakatime-compatible mock server on a random port" do
WebMock.allow_net_connect!
server = MockWakatimeServer.new
server.start
user = User.create!(timezone: "UTC")
mirror = user.wakatime_mirrors.create!(
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "mirror-key"
)
mirror.update_column(:endpoint_url, server.base_url)
create_heartbeat(
user: user,
source_type: :direct_entry,
entity: "src/direct_1.rb",
project: "direct-project"
)
create_heartbeat(
user: user,
source_type: :direct_entry,
entity: "src/direct_2.rb",
project: "direct-project"
)
create_heartbeat(
user: user,
source_type: :wakapi_import,
entity: "src/imported.rb",
project: "import-project"
)
with_development_env do
WakatimeMirrorSyncJob.perform_now(mirror.id)
end
requests = server.pop_requests
assert_equal 1, requests.length
assert_operator server.port, :>, 0
payload = JSON.parse(requests.first.fetch(:body))
assert_equal 2, payload.length
assert_equal [ "src/direct_1.rb", "src/direct_2.rb" ], payload.map { |row| row["entity"] }
assert_match(/\ABasic /, requests.first.fetch(:authorization).to_s)
ensure
server&.stop
WebMock.disable_net_connect!(allow_localhost: false)
end
test "does nothing when imports and mirrors are disabled" do
user = User.create!(timezone: "UTC")
mirror = user.wakatime_mirrors.create!(
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "mirror-key"
)
create_heartbeat(
user: user,
source_type: :direct_entry,
entity: "src/direct.rb"
)
Flipper.disable(:wakatime_imports_mirrors)
stub_request(:post, "https://wakatime.com/api/v1/users/current/heartbeats.bulk")
.to_return(status: 201, body: "{}")
WakatimeMirrorSyncJob.perform_now(mirror.id)
assert_not_requested :post, "https://wakatime.com/api/v1/users/current/heartbeats.bulk"
end
private
def with_development_env
rails_singleton = class << Rails; self; end
rails_singleton.alias_method :__original_env_for_test, :env
rails_singleton.define_method(:env) { ActiveSupport::StringInquirer.new("development") }
yield
ensure
rails_singleton.remove_method :env
rails_singleton.alias_method :env, :__original_env_for_test
rails_singleton.remove_method :__original_env_for_test
end
end

View file

@ -0,0 +1,44 @@
require "test_helper"
class HeartbeatImportRunTest < ActiveSupport::TestCase
test "requires api key for remote imports on create" do
run = HeartbeatImportRun.new(
user: User.create!(timezone: "UTC"),
source_kind: :wakatime_dump,
state: :queued
)
assert_not run.valid?
assert_includes run.errors[:encrypted_api_key], "can't be blank"
end
test "remote cooldown helper returns future timestamp for recent remote import" do
user = User.create!(timezone: "UTC")
run = user.heartbeat_import_runs.create!(
source_kind: :wakatime_dump,
state: :completed,
encrypted_api_key: "secret",
remote_requested_at: 2.hours.ago
)
cooldown_until = HeartbeatImportRun.remote_cooldown_until_for(user)
assert_in_delta run.remote_requested_at + 4.hours, cooldown_until, 1.second
end
test "active_for returns the latest active import" do
user = User.create!(timezone: "UTC")
user.heartbeat_import_runs.create!(
source_kind: :dev_upload,
state: :completed,
source_filename: "old.json"
)
latest = user.heartbeat_import_runs.create!(
source_kind: :dev_upload,
state: :queued,
source_filename: "new.json"
)
assert_equal latest, HeartbeatImportRun.active_for(user)
end
end

View file

@ -1,50 +0,0 @@
require "test_helper"
class HeartbeatImportSourceTest < ActiveSupport::TestCase
test "validates one source per user" do
user = User.create!(timezone: "UTC")
HeartbeatImportSource.create!(
user: user,
provider: :wakatime_compatible,
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "abc123"
)
duplicate = HeartbeatImportSource.new(
user: user,
provider: :wakatime_compatible,
endpoint_url: "https://wakapi.dev/api/compat/wakatime/v1",
encrypted_api_key: "xyz789"
)
assert_not duplicate.valid?
assert_includes duplicate.errors[:user_id], "has already been taken"
end
test "requires https endpoint outside development" do
source = HeartbeatImportSource.new(
user: User.create!(timezone: "UTC"),
provider: :wakatime_compatible,
endpoint_url: "http://example.com/api/v1",
encrypted_api_key: "abc123"
)
assert_not source.valid?
assert_includes source.errors[:endpoint_url], "must use https"
end
test "validates backfill date range order" do
source = HeartbeatImportSource.new(
user: User.create!(timezone: "UTC"),
provider: :wakatime_compatible,
endpoint_url: "https://example.com/api/v1",
encrypted_api_key: "abc123",
initial_backfill_start_date: Date.new(2026, 2, 10),
initial_backfill_end_date: Date.new(2026, 2, 1)
)
assert_not source.valid?
assert_includes source.errors[:initial_backfill_end_date], "must be on or after the start date"
end
end

View file

@ -29,4 +29,32 @@ class UserTest < ActiveSupport::TestCase
assert_equal "gruvbox_dark", metadata[:value]
end
test "flipper id uses the user id" do
user = User.create!(timezone: "UTC")
assert_equal "User;#{user.id}", user.flipper_id
end
test "active remote heartbeat import run only counts remote imports" do
user = User.create!(timezone: "UTC")
assert_not user.active_remote_heartbeat_import_run?
user.heartbeat_import_runs.create!(
source_kind: :dev_upload,
state: :queued,
source_filename: "dev.json"
)
assert_not user.active_remote_heartbeat_import_run?
user.heartbeat_import_runs.create!(
source_kind: :wakatime_dump,
state: :waiting_for_dump,
encrypted_api_key: "secret"
)
assert user.active_remote_heartbeat_import_run?
end
end

View file

@ -1,52 +0,0 @@
require "test_helper"
class WakatimeMirrorTest < ActiveSupport::TestCase
def create_direct_heartbeat(user, at_time)
user.heartbeats.create!(
entity: "src/file.rb",
type: "file",
category: "coding",
time: at_time.to_f,
project: "mirror-test",
source_type: :direct_entry
)
end
test "initializes cursor at current heartbeat tip on create" do
user = User.create!(timezone: "UTC")
first = create_direct_heartbeat(user, Time.current - 5.minutes)
second = create_direct_heartbeat(user, Time.current - 1.minute)
mirror = user.wakatime_mirrors.create!(
endpoint_url: "https://wakatime.com/api/v1",
encrypted_api_key: "mirror-key"
)
assert_equal second.id, mirror.last_synced_heartbeat_id
assert_operator second.id, :>, first.id
end
test "rejects endpoints that point to hackatime host" do
user = User.create!(timezone: "UTC")
mirror = user.wakatime_mirrors.build(
endpoint_url: "https://hackatime.hackclub.com/api/v1",
encrypted_api_key: "mirror-key"
)
assert_not mirror.valid?
assert_includes mirror.errors[:endpoint_url], "cannot target this Hackatime host"
end
test "rejects app host equivalent endpoints" do
user = User.create!(timezone: "UTC")
mirror = user.wakatime_mirrors.build(
endpoint_url: "https://example.com/api/v1",
encrypted_api_key: "mirror-key"
)
mirror.request_host = "example.com"
assert_not mirror.valid?
assert_includes mirror.errors[:endpoint_url], "cannot target this Hackatime host"
end
end

View file

@ -0,0 +1,56 @@
require "test_helper"
require "webmock/minitest"
class HeartbeatImportDumpClientTest < ActiveSupport::TestCase
test "request_dump sends basic auth with the raw api key" do
client = HeartbeatImportDumpClient.new(source_kind: :hackatime_v1_dump, api_key: "secret-key")
stub = stub_request(:post, "https://waka.hackclub.com/api/v1/users/current/data_dumps")
.with(
headers: {
"Authorization" => "Basic #{Base64.strict_encode64("secret-key")}",
"Accept" => "application/json",
"Content-Type" => "application/json"
},
body: { type: "heartbeats", email_when_finished: false }.to_json
)
.to_return(
status: 201,
body: {
data: {
id: "dump-123",
status: "Processing",
percent_complete: 0,
type: "heartbeats",
is_processing: true,
is_stuck: false,
has_failed: false
}
}.to_json,
headers: { "Content-Type" => "application/json" }
)
response = client.request_dump
assert_requested stub
assert_equal "dump-123", response[:id]
end
test "download_dump reuses auth header for same-host downloads" do
client = HeartbeatImportDumpClient.new(source_kind: :hackatime_v1_dump, api_key: "secret-key")
stub = stub_request(:get, "https://waka.hackclub.com/downloads/dump-123.json")
.with(
headers: {
"Authorization" => "Basic #{Base64.strict_encode64("secret-key")}",
"Accept" => "application/json,application/octet-stream,*/*"
}
)
.to_return(status: 200, body: '{"heartbeats":[]}')
body = client.download_dump("https://waka.hackclub.com/downloads/dump-123.json")
assert_requested stub
assert_equal '{"heartbeats":[]}', body
end
end

View file

@ -0,0 +1,72 @@
require "test_helper"
class HeartbeatImportServiceTest < ActiveSupport::TestCase
test "deduplicates imported heartbeats by fields hash" do
user = User.create!(timezone: "UTC")
file_content = {
heartbeats: [
{
entity: "/tmp/test.rb",
type: "file",
time: 1_700_000_000.0,
project: "hackatime",
language: "Ruby",
is_write: true
},
{
entity: "/tmp/test.rb",
type: "file",
time: 1_700_000_000.0,
project: "hackatime",
language: "Ruby",
is_write: true
}
]
}.to_json
result = HeartbeatImportService.import_from_file(file_content, user)
assert result[:success]
assert_equal 2, result[:total_count]
assert_equal 1, result[:imported_count]
assert_equal 1, result[:skipped_count]
assert_equal 1, user.heartbeats.count
end
test "imports heartbeats from wakatime data dump day groups" do
user = User.create!(timezone: "UTC")
file_content = {
range: { start: 1_727_905_169, end: 1_727_905_177 },
days: [
{
date: "2024-10-02",
heartbeats: [
{
entity: "/home/skyfall/tavern/manifest.json",
type: "file",
time: 1_727_905_177,
category: "coding",
project: "tavern",
language: "JSON",
editor: "vscode",
operating_system: "Linux",
machine_name_id: "skyfall-pc",
user_agent_id: "wakatime/v1.102.1",
is_write: true
}
]
}
]
}.to_json
result = HeartbeatImportService.import_from_file(file_content, user)
assert result[:success]
assert_equal 1, result[:total_count]
assert_equal 1, result[:imported_count]
heartbeat = user.heartbeats.order(:created_at).last
assert_equal "skyfall-pc", heartbeat.machine
assert_equal "wakatime/v1.102.1", heartbeat.user_agent
end
end

View file

@ -13,7 +13,8 @@ class AccessSettingsTest < ApplicationSystemTestCase
test "access settings page renders key sections" do
assert_settings_page(
path: my_settings_access_path,
marker_text: "Time Tracking Setup"
marker_text: "Time Tracking Setup",
card_count: 4
)
assert_text "Extension Display"

View file

@ -1,25 +0,0 @@
require "application_system_test_case"
require_relative "test_helpers"
class AdminSettingsTest < ApplicationSystemTestCase
include SettingsSystemTestHelpers
setup do
@user = User.create!(timezone: "UTC")
sign_in_as(@user)
end
test "admin settings redirects non-admin users" do
visit my_settings_admin_path
assert_current_path my_settings_profile_path, ignore_query: true
assert_text "You are not authorized to access this page"
end
test "admin settings page is available to admin users" do
@user.update!(admin_level: :admin)
visit my_settings_admin_path
assert_text "Mirror and import controls are available under Data settings for all users."
end
end

View file

@ -12,7 +12,8 @@ class BadgesSettingsTest < ApplicationSystemTestCase
test "badges settings page renders key sections" do
assert_settings_page(
path: my_settings_badges_path,
marker_text: "Stats Badges"
marker_text: "Stats Badges",
card_count: 3
)
assert_text "Markscribe Template"

View file

@ -5,19 +5,21 @@ class DataSettingsTest < ApplicationSystemTestCase
include SettingsSystemTestHelpers
setup do
Flipper.enable(:wakatime_imports_mirrors)
@user = User.create!(timezone: "UTC")
sign_in_as(@user)
end
teardown do
Flipper.disable(:wakatime_imports_mirrors)
Flipper.disable(:imports)
end
test "data settings page renders key sections" do
Flipper.enable_actor(:imports, @user)
assert_settings_page(
path: my_settings_data_path,
marker_text: "Migration Assistant"
marker_text: "Imports",
card_count: 3
)
assert_text "Download Data"
@ -47,62 +49,39 @@ class DataSettingsTest < ApplicationSystemTestCase
assert_text "I changed my mind"
end
test "regular user can add and delete mirror endpoint from data settings" do
visit my_settings_data_path
endpoint_url = "https://example-wakatime.invalid/api/v1"
fill_in "mirror_endpoint_url", with: endpoint_url
fill_in "mirror_key", with: "mirror-key-#{SecureRandom.hex(8)}"
assert_difference -> { @user.reload.wakatime_mirrors.count }, +1 do
click_on "Add mirror"
assert_text "WakaTime mirror added successfully"
end
assert_text endpoint_url
assert_difference -> { @user.reload.wakatime_mirrors.count }, -1 do
accept_confirm do
click_on "Delete mirror"
end
assert_text "WakaTime mirror removed successfully"
end
end
test "data settings rejects hackatime mirror endpoint" do
visit my_settings_data_path
fill_in "mirror_endpoint_url", with: "https://hackatime.hackclub.com/api/v1"
fill_in "mirror_key", with: "mirror-key-#{SecureRandom.hex(8)}"
click_on "Add mirror"
assert_text "cannot target this Hackatime host"
assert_equal 0, @user.reload.wakatime_mirrors.count
end
test "data settings can configure import source and show status panel" do
visit my_settings_data_path
fill_in "import_endpoint_url", with: "https://wakatime.com/api/v1"
fill_in "import_api_key", with: "import-key-#{SecureRandom.hex(8)}"
assert_difference -> { HeartbeatImportSource.count }, +1 do
click_on "Create source"
assert_text "Import source configured successfully."
end
assert_text "Status:"
assert_text "Imported:"
assert_button "Sync now"
end
test "imports and mirrors section is hidden when feature is disabled" do
Flipper.disable(:wakatime_imports_mirrors)
test "imports card is visible when feature is enabled for the user" do
Flipper.enable_actor(:imports, @user)
visit my_settings_data_path
assert_no_text "Imports & Mirrors"
assert_no_field "mirror_endpoint_url"
assert_no_field "import_endpoint_url"
assert_text "Imports"
assert_text "WakaTime"
assert_text "Hackatime v1"
assert_field "remote_import_api_key"
assert_text "Start remote import"
end
test "imports card is hidden when feature is disabled" do
visit my_settings_data_path
assert_no_text "Request a one-time heartbeat dump from WakaTime or legacy Hackatime."
assert_no_field "remote_import_api_key"
assert_no_button "Start remote import"
end
test "data settings shows remote import cooldown notice" do
Flipper.enable_actor(:imports, @user)
@user.heartbeat_import_runs.create!(
source_kind: :wakatime_dump,
state: :waiting_for_dump,
encrypted_api_key: "secret",
remote_requested_at: 1.hour.ago,
message: "Waiting..."
)
visit my_settings_data_path
assert_text "Remote imports can be started again after"
end
end

View file

@ -12,7 +12,8 @@ class IntegrationsSettingsTest < ApplicationSystemTestCase
test "integrations settings page renders key sections" do
assert_settings_page(
path: my_settings_integrations_path,
marker_text: "Slack Status Sync"
marker_text: "Slack Status Sync",
card_count: 4
)
assert_text "Slack Channel Notifications"

View file

@ -15,6 +15,15 @@ class ProfileSettingsTest < ApplicationSystemTestCase
assert_current_path my_settings_path, ignore_query: true
assert_text "Settings"
assert_text "Region and Timezone"
assert_selector "[data-settings-card]", minimum: 4
end
test "settings hash redirects to the matching settings page and subsection" do
visit "#{my_settings_profile_path}#user_api_key"
assert_current_path my_settings_access_path, ignore_query: true
assert_text "API Key"
assert_selector "[data-settings-subnav-item][data-active='true']", text: "API key"
end
test "profile settings updates country and username" do

View file

@ -1,12 +1,15 @@
module SettingsSystemTestHelpers
private
def assert_settings_page(path:, marker_text:)
def assert_settings_page(path:, marker_text:, card_count: 1)
visit path
assert_current_path path, ignore_query: true
assert_text "Settings"
assert_text marker_text
assert_selector "[data-settings-shell]"
assert_selector "[data-settings-content]"
assert_selector "[data-settings-card]", minimum: card_count
end
def choose_select_option(select_id, option_text)