Rails.application.configure do config.good_job.preserve_job_records = true config.good_job.cleanup_preserved_jobs_before_seconds_ago = 60 * 60 * 24 * 7 config.good_job.cleanup_interval_jobs = 1000 config.good_job.cleanup_interval_seconds = 3600 if Rails.env.development? config.good_job.execution_mode = :async # Run jobs in background threads in development config.good_job.poll_interval = 5 # Poll every 5 seconds for scheduled jobs else config.good_job.execution_mode = :external # Use external execution mode in production and staging end config.good_job.enable_cron = Rails.env.production? # https://github.com/bensheldon/good_job#configuring-your-queues # 12 threads total config.good_job.queues = "latency_critical:2; latency_10s:3; latency_5m,latency_10s:3; literally_whenever,*,latency_5m,latency_10s:4" config.good_job.cron = { # update_slack_status: { # cron: "*/5 * * * *", # class: "UserSlackStatusUpdateJob" # }, daily_leaderboard_update: { cron: "* * * * *", class: "LeaderboardUpdateJob", args: [ :daily ], kwargs: { force_update: true } }, last_7_days_leaderboard_update: { cron: "*/7 * * * *", class: "LeaderboardUpdateJob", args: [ :last_7_days ], kwargs: { force_update: true } }, # sailors_log_poll: { # cron: "*/2 * * * *", # class: "SailorsLogPollForChangesJob" # }, # update_slack_channel_cache: { # cron: "0 11 * * *", # class: "SlackCommand::UpdateSlackChannelCacheJob" # }, slack_username_update: { cron: "0 0 * * *", class: "SlackUsernameUpdateJob" }, # scan_github_repos: { # cron: "0 10 * * *", # class: "ScanGithubReposJob" # }, # sync_all_user_repo_events: { # cron: "0 */6 * * *", # Every 6 hours (at minute 0 of 0, 6, 12, 18 hours) # class: "SyncAllUserRepoEventsJob", # description: "Periodically syncs repository events for all eligible users." # }, # scan_repo_events_for_commits: { # cron: "0 */3 * * *", # Every 3 hours at minute 0 # class: "ScanRepoEventsForCommitsJob", # description: "Scans repository host events (PushEvents) and enqueues jobs to process new commits." # }, # cleanup_expired_email_verification_requests: { # cron: "* * * * *", # class: "CleanupExpiredEmailVerificationRequestsJob" # }, # update_airtable_user_data: { # cron: "0 13 * * *", # class: "UpdateAirtableUserDataJob" # }, cache_active_user_graph_data_job: { cron: "*/10 * * * *", class: "Cache::ActiveUsersGraphDataJob", kwargs: { force_reload: true } }, cache_currently_hacking: { cron: "* * * * *", class: "Cache::CurrentlyHackingJob", kwargs: { force_reload: true } }, cache_home_stats: { cron: "*/10 * * * *", class: "Cache::HomeStatsJob", kwargs: { force_reload: true } }, cache_active_projects: { cron: "* * * * *", class: "Cache::ActiveProjectsJob", kwargs: { force_reload: true } }, cache_usage_social_proof: { cron: "* * * * *", class: "Cache::UsageSocialProofJob", kwargs: { force_reload: true } }, cache_setup_social_proof: { cron: "* * * * *", class: "Cache::SetupSocialProofJob", kwargs: { force_reload: true } }, cache_minutes_logged: { cron: "* * * * *", class: "Cache::MinutesLoggedJob", kwargs: { force_reload: true } }, cache_heartbeat_counts: { cron: "* * * * *", class: "Cache::HeartbeatCountsJob", kwargs: { force_reload: true } }, weekly_summary_email: { cron: "30 18 * * 0", class: "WeeklySummaryEmailJob", description: "Sends weekly coding summaries on Sundays at 18:30 GMT." }, geocode_users_without_country: { cron: "7 * * * *", class: "GeocodeUsersWithoutCountryJob" }, cleanup_successful_jobs: { cron: "0 0 * * *", class: "CleanupSuccessfulJobsJob" }, process_account_deletions: { cron: "0 2 * * *", class: "ProcessAccountDeletionsJob", description: "nuke accounts after 30 days" }, sync_stale_repo_metadata: { cron: "0 4 * * *", class: "SyncStaleRepoMetadataJob", description: "Refreshes repository metadata (stars, commit counts, etc.) for repositories with stale data." } # cleanup_old_leaderboards: { # cron: "0 3 * * *", # daily at 3 # class: "CleanupOldLeaderboardsJob", # description: "Remove leaderboards older than 2 days" # } } end