fix: change stats calculations

This commit is contained in:
Leafd 2025-10-08 19:05:10 -04:00 committed by Leafd
parent fc9c49f7ff
commit e5c5c28e2b
4 changed files with 402 additions and 54 deletions

View file

@ -11,6 +11,7 @@
"tauri": "tauri"
},
"dependencies": {
"@sentry/vue": "^10.18.0",
"@tauri-apps/api": "^2",
"@tauri-apps/plugin-deep-link": "^2.4.3",
"@tauri-apps/plugin-opener": "^2",
@ -18,6 +19,7 @@
"@tauri-apps/plugin-updater": "~2",
"chart.js": "^4.5.0",
"crypto-js": "^4.2.0",
"posthog-js": "^1.273.1",
"vue": "^3.5.13",
"vue-chartjs": "^5.3.2"
},

118
pnpm-lock.yaml generated
View file

@ -8,6 +8,9 @@ importers:
.:
dependencies:
'@sentry/vue':
specifier: ^10.18.0
version: 10.18.0(vue@3.5.22(typescript@5.9.3))
'@tauri-apps/api':
specifier: ^2
version: 2.8.0
@ -29,6 +32,9 @@ importers:
crypto-js:
specifier: ^4.2.0
version: 4.2.0
posthog-js:
specifier: ^1.273.1
version: 1.273.1
vue:
specifier: ^3.5.13
version: 3.5.22(typescript@5.9.3)
@ -259,6 +265,9 @@ packages:
'@kurkle/color@0.3.4':
resolution: {integrity: sha512-M5UknZPHRu3DEDWoipU6sE8PdkZ6Z/S+v4dD+Ke8IaNlpdSQah50lz1KtcFBa2vsdOnwbbnxJwVM4wty6udA5w==}
'@posthog/core@1.2.2':
resolution: {integrity: sha512-f16Ozx6LIigRG+HsJdt+7kgSxZTHeX5f1JlCGKI1lXcvlZgfsCR338FuMI2QRYXGl+jg/vYFzGOTQBxl90lnBg==}
'@rollup/rollup-android-arm-eabi@4.52.3':
resolution: {integrity: sha512-h6cqHGZ6VdnwliFG1NXvMPTy/9PS3h8oLh7ImwR+kl+oYnQizgjxsONmmPSb2C66RksfkfIxEVtDSEcJiO0tqw==}
cpu: [arm]
@ -369,6 +378,40 @@ packages:
cpu: [x64]
os: [win32]
'@sentry-internal/browser-utils@10.18.0':
resolution: {integrity: sha512-6Y5VkNcj5ecIFsKdL8/7hrLt7pCuWR4BRLsKOHAmhdCnXtobf7v6DeBow2Hk5yEYO0AwjP5mqvoBAewbS+h3GA==}
engines: {node: '>=18'}
'@sentry-internal/feedback@10.18.0':
resolution: {integrity: sha512-uuupIivGPCpRStMU1I3sYPgD+pl8PqNV1DSVgVS5LF99h8tqjmRGS1xkCrUaUhVhVmsnxzbnvXb1hsOaCXX7DA==}
engines: {node: '>=18'}
'@sentry-internal/replay-canvas@10.18.0':
resolution: {integrity: sha512-asp1biXA+F5HAKl7RvPbf5s087bg1bpxMB9E69xWc1ECUfFMPrFRNS7mAJ5A8DTd1K74E9cFsLl6zO29HpH4+w==}
engines: {node: '>=18'}
'@sentry-internal/replay@10.18.0':
resolution: {integrity: sha512-ixr3K19q4oTRgM0xANi+8ThDUbxV5iixUIgvJrT7c1L6yyidovIwO0D82ZY3phUfMkgE+mX3cxX46gXTRTglKQ==}
engines: {node: '>=18'}
'@sentry/browser@10.18.0':
resolution: {integrity: sha512-JrPfxjCsuVYUe16U4fo4W2Fn0f9BwRev3G28a4ZIkwKwJo+qSnIk1mT8Eam8nwNCU8MZjB4KNE9w2p0kaoQxvQ==}
engines: {node: '>=18'}
'@sentry/core@10.18.0':
resolution: {integrity: sha512-zlhAlzc/Qpza8f/CMUb7zg/9FOhWouKAm9zyV9jZlx9lL6WceVbUEwQ3rq8ncGgM+LMwlASCOjsz5a728vAhCw==}
engines: {node: '>=18'}
'@sentry/vue@10.18.0':
resolution: {integrity: sha512-SC6vzLtVslNZMWgFU1PHvDWMV8XXC+YCysNGf7dWeSuR8mq/aymEY/ZE1MtNyh+kcKoeUfLDEhYVrMsxXKxwIw==}
engines: {node: '>=18'}
peerDependencies:
pinia: 2.x || 3.x
vue: 2.x || 3.x
peerDependenciesMeta:
pinia:
optional: true
'@tailwindcss/node@4.1.14':
resolution: {integrity: sha512-hpz+8vFk3Ic2xssIA3e01R6jkmsAhvkQdXlEbRTk6S10xDAtiQiM3FyvZVGsucefq764euO/b8WUW9ysLdThHw==}
@ -624,6 +667,9 @@ packages:
resolution: {integrity: sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==}
engines: {node: '>=18'}
core-js@3.45.1:
resolution: {integrity: sha512-L4NPsJlCfZsPeXukyzHFlg/i7IIVwHSItR0wg0FLNqYClJ4MQYTYLbC7EkjKYRLZF2iof2MUgN0EGy7MdQFChg==}
crypto-js@4.2.0:
resolution: {integrity: sha512-KALDyEYgpY+Rlob/iriUtjV6d5Eq+Y191A5g4UqLAi8CyGP9N1+FdVbkc1SxKc2r4YAYqG8JzO2KGL+AizD70Q==}
@ -662,6 +708,9 @@ packages:
picomatch:
optional: true
fflate@0.4.8:
resolution: {integrity: sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==}
fsevents@2.3.3:
resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==}
engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0}
@ -779,6 +828,20 @@ packages:
resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==}
engines: {node: ^10 || ^12 || >=14}
posthog-js@1.273.1:
resolution: {integrity: sha512-6w3j6nAWJj7W7/iksWLXRpdLrLZrQA8jTsEQ71bvmyw4bwCqhgPfxutrmeoAUNaxot2FB1JHc9Lagslg35h61g==}
peerDependencies:
'@rrweb/types': 2.0.0-alpha.17
rrweb-snapshot: 2.0.0-alpha.17
peerDependenciesMeta:
'@rrweb/types':
optional: true
rrweb-snapshot:
optional: true
preact@10.27.2:
resolution: {integrity: sha512-5SYSgFKSyhCbk6SrXyMpqjb5+MQBgfvEKE/OC+PujcY34sOpqtr+0AZQtPYx5IA6VxynQ7rUPCtKzyovpj9Bpg==}
rollup@4.52.3:
resolution: {integrity: sha512-RIDh866U8agLgiIcdpB+COKnlCreHJLfIhWC3LVflku5YHfpnsIKigRZeFfMfCc4dVcqNVfQQ5gO/afOck064A==}
engines: {node: '>=18.0.0', npm: '>=8.0.0'}
@ -871,6 +934,9 @@ packages:
typescript:
optional: true
web-vitals@4.2.4:
resolution: {integrity: sha512-r4DIlprAGwJ7YM11VZp4R884m0Vmgr6EAKe3P+kO0PPj3Unqyvv59rczf6UiGcb9Z8QxZVcqKNwv/g0WNdWwsw==}
yallist@5.0.0:
resolution: {integrity: sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==}
engines: {node: '>=18'}
@ -993,6 +1059,8 @@ snapshots:
'@kurkle/color@0.3.4': {}
'@posthog/core@1.2.2': {}
'@rollup/rollup-android-arm-eabi@4.52.3':
optional: true
@ -1059,6 +1127,40 @@ snapshots:
'@rollup/rollup-win32-x64-msvc@4.52.3':
optional: true
'@sentry-internal/browser-utils@10.18.0':
dependencies:
'@sentry/core': 10.18.0
'@sentry-internal/feedback@10.18.0':
dependencies:
'@sentry/core': 10.18.0
'@sentry-internal/replay-canvas@10.18.0':
dependencies:
'@sentry-internal/replay': 10.18.0
'@sentry/core': 10.18.0
'@sentry-internal/replay@10.18.0':
dependencies:
'@sentry-internal/browser-utils': 10.18.0
'@sentry/core': 10.18.0
'@sentry/browser@10.18.0':
dependencies:
'@sentry-internal/browser-utils': 10.18.0
'@sentry-internal/feedback': 10.18.0
'@sentry-internal/replay': 10.18.0
'@sentry-internal/replay-canvas': 10.18.0
'@sentry/core': 10.18.0
'@sentry/core@10.18.0': {}
'@sentry/vue@10.18.0(vue@3.5.22(typescript@5.9.3))':
dependencies:
'@sentry/browser': 10.18.0
'@sentry/core': 10.18.0
vue: 3.5.22(typescript@5.9.3)
'@tailwindcss/node@4.1.14':
dependencies:
'@jridgewell/remapping': 2.3.5
@ -1302,6 +1404,8 @@ snapshots:
chownr@3.0.0: {}
core-js@3.45.1: {}
crypto-js@4.2.0: {}
csstype@3.1.3: {}
@ -1352,6 +1456,8 @@ snapshots:
optionalDependencies:
picomatch: 4.0.3
fflate@0.4.8: {}
fsevents@2.3.3:
optional: true
@ -1436,6 +1542,16 @@ snapshots:
picocolors: 1.1.1
source-map-js: 1.2.1
posthog-js@1.273.1:
dependencies:
'@posthog/core': 1.2.2
core-js: 3.45.1
fflate: 0.4.8
preact: 10.27.2
web-vitals: 4.2.4
preact@10.27.2: {}
rollup@4.52.3:
dependencies:
'@types/estree': 1.0.8
@ -1521,4 +1637,6 @@ snapshots:
optionalDependencies:
typescript: 5.9.3
web-vitals@4.2.4: {}
yallist@5.0.0: {}

View file

@ -15,7 +15,16 @@ pub struct HeartbeatData {
pub language: Option<String>,
pub entity: Option<String>,
pub time: f64,
#[serde(default)]
pub timestamp: i64,
#[serde(skip_serializing_if = "Option::is_none")]
pub created_at: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub category: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub operating_system: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub machine: Option<String>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
@ -60,23 +69,33 @@ pub async fn get_latest_heartbeat(
.ok_or("No access token available")?;
let client = reqwest::Client::new();
let request_url = format!(
"{}/api/v1/authenticated/heartbeats/latest",
base_url
);
push_log("info", "backend", format!("Fetching latest heartbeat from: {}", request_url));
let response = client
.get(&format!(
"{}/api/v1/authenticated/heartbeats/latest",
base_url
))
.get(&request_url)
.bearer_auth(access_token)
.send()
.await
.map_err(|e| format!("Failed to get latest heartbeat: {}", e))?;
.map_err(|e| {
push_log("error", "backend", format!("HTTP request failed: {}", e));
format!("Failed to get latest heartbeat: {}", e)
})?;
let status = response.status();
push_log("info", "backend", format!("API response status: {} {}", status.as_u16(), status.canonical_reason().unwrap_or("")));
if !status.is_success() {
let error_text = response
.text()
.await
.unwrap_or_else(|_| "Unknown error".to_string());
push_log("error", "backend", format!("API returned error response: {}", error_text));
if status == 429 {
push_log("warn", "backend", "Rate limited, will retry later".to_string());
@ -86,13 +105,53 @@ pub async fn get_latest_heartbeat(
return Err(format!("Failed to get latest heartbeat: {}", error_text));
}
let heartbeat_response: HeartbeatResponse = response
.json()
let response_text = response
.text()
.await
.map_err(|e| format!("Failed to parse heartbeat response: {}", e))?;
.map_err(|e| {
push_log("error", "backend", format!("Failed to read response body: {}", e));
format!("Failed to read response: {}", e)
})?;
push_log("info", "backend", format!("Raw API response: {}", response_text));
let heartbeat_data: Option<HeartbeatData> = if response_text.trim() == "null" || response_text.trim().is_empty() {
None
} else {
match serde_json::from_str::<HeartbeatData>(&response_text) {
Ok(mut data) => {
if data.timestamp == 0 {
data.timestamp = data.time as i64;
}
push_log("info", "backend", format!("Successfully parsed heartbeat data: {:?}", data));
Some(data)
}
Err(e) => {
push_log("error", "backend", format!("Failed to parse heartbeat JSON: {}", e));
None
}
}
};
let heartbeat_response = HeartbeatResponse {
heartbeat: heartbeat_data,
};
push_log("info", "backend", format!("Latest heartbeat response: {:?}", heartbeat_response));
if let Some(heartbeat) = &heartbeat_response.heartbeat {
push_log("info", "backend", format!(
"Heartbeat details - ID: {}, Project: {}, Language: {}, Editor: {}, Time: {}",
heartbeat.id,
heartbeat.project.as_ref().unwrap_or(&"None".to_string()),
heartbeat.language.as_ref().unwrap_or(&"None".to_string()),
heartbeat.editor.as_ref().unwrap_or(&"None".to_string()),
heartbeat.timestamp
));
let mut session = session_state.lock().await;
let current_time = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
@ -101,14 +160,21 @@ pub async fn get_latest_heartbeat(
let heartbeat_age = current_time - heartbeat.timestamp;
let is_recent = heartbeat_age < 120;
let is_recent = heartbeat_age < 180;
push_log("debug", "backend", format!(
"Heartbeat age: {} seconds, is_recent: {}",
heartbeat_age,
is_recent
));
let is_duplicate = session.last_heartbeat_id == Some(heartbeat.id);
if is_duplicate {
push_log("info", "backend", "Duplicate heartbeat detected, ending session".to_string());
if is_duplicate && is_recent {
push_log("debug", "backend", "Duplicate heartbeat but still recent, continuing session".to_string());
} else if is_duplicate && !is_recent {
push_log("info", "backend", "Duplicate heartbeat and too old, ending session".to_string());
session.is_active = false;
session.start_time = None;
session.last_heartbeat_id = None;
@ -183,6 +249,7 @@ pub async fn get_latest_heartbeat(
}
} else {
push_log("info", "backend", "No heartbeat data in response (heartbeat is null)".to_string());
let mut session = session_state.lock().await;
if session.is_active {
push_log("info", "backend", "No heartbeat data, ending session".to_string());

View file

@ -66,37 +66,71 @@ async fn fetch_hours_with_cache(
let db = Database::new().await?;
let cache_key = format!("hours:{}:{}", start_date, end_date);
if let Ok(Some(cached_data)) = db.get_cached_data(&cache_key).await {
push_log("debug", "backend", format!("Using cached data for {}", cache_key));
return serde_json::from_str(&cached_data)
.map_err(|e| format!("Failed to parse cached data: {}", e));
let today = chrono::Utc::now().date_naive().format("%Y-%m-%d").to_string();
let is_current_date = end_date == today || start_date == today;
if !is_current_date {
if let Ok(Some(cached_data)) = db.get_cached_data(&cache_key).await {
push_log("debug", "backend", format!("Using cached data for {}", cache_key));
let result: serde_json::Value = serde_json::from_str(&cached_data)
.map_err(|e| format!("Failed to parse cached data: {}", e))?;
push_log("info", "backend", format!("CACHED DATA for {}: {}", cache_key, serde_json::to_string_pretty(&result).unwrap_or_else(|_| "Failed to serialize".to_string())));
return Ok(result);
}
} else {
push_log("info", "backend", format!("Skipping cache for current date: {}", cache_key));
}
push_log("debug", "backend", format!("Fetching fresh data for {}", cache_key));
let url = format!(
"{}/api/v1/authenticated/hours?start_date={}&end_date={}",
base_url,
start_date,
end_date
);
push_log("info", "backend", format!("API REQUEST: GET {}", url));
push_log("debug", "backend", format!("Authorization: Bearer {}...", &access_token[..20.min(access_token.len())]));
let response = client
.get(&format!(
"{}/api/v1/authenticated/hours?start_date={}&end_date={}",
base_url,
start_date,
end_date
))
.get(&url)
.bearer_auth(access_token)
.send()
.await
.map_err(|e| format!("Failed to fetch hours: {}", e))?;
.map_err(|e| {
let error_msg = format!("Failed to fetch hours: {}", e);
push_log("error", "backend", format!(" API REQUEST FAILED: {}", error_msg));
error_msg
})?;
if !response.status().is_success() {
return Err(format!("API request failed with status: {}", response.status()));
let status = response.status();
push_log("info", "backend", format!("API RESPONSE: Status {}", status));
if !status.is_success() {
let error_msg = format!("API request failed with status: {}", status);
push_log("error", "backend", format!(" {}", error_msg));
return Err(error_msg);
}
let data: serde_json::Value = response
.json()
.await
.map_err(|e| format!("Failed to parse API response: {}", e))?;
.map_err(|e| {
let error_msg = format!("Failed to parse API response: {}", e);
push_log("error", "backend", format!(" {}", error_msg));
error_msg
})?;
let data_str = serde_json::to_string(&data)
.map_err(|e| format!("Failed to serialize data for caching: {}", e))?;
db.set_cached_data(&cache_key, &data_str, 30).await.ok();
push_log("info", "backend", format!("API RESPONSE DATA: {}", serde_json::to_string_pretty(&data).unwrap_or_else(|_| "Failed to serialize".to_string())));
if !is_current_date {
let data_str = serde_json::to_string(&data)
.map_err(|e| format!("Failed to serialize data for caching: {}", e))?;
db.set_cached_data(&cache_key, &data_str, 30).await.ok();
push_log("debug", "backend", format!("Cached data for {}", cache_key));
} else {
push_log("debug", "backend", format!("Not caching data for current date: {}", cache_key));
}
Ok(data)
}
@ -106,22 +140,41 @@ async fn fetch_streak_with_cache(
base_url: &str,
access_token: &str,
) -> Result<serde_json::Value, String> {
push_log("info", "backend", format!("Fetching streak data from API"));
let url = format!("{}/api/v1/authenticated/streak", base_url);
push_log("info", "backend", format!("API REQUEST: GET {}", url));
push_log("debug", "backend", format!("Authorization: Bearer {}...", &access_token[..20.min(access_token.len())]));
let response = client
.get(&format!("{}/api/v1/authenticated/streak", base_url))
.get(&url)
.bearer_auth(access_token)
.send()
.await
.map_err(|e| format!("Failed to fetch streak: {}", e))?;
.map_err(|e| {
let error_msg = format!("Failed to fetch streak: {}", e);
push_log("error", "backend", format!(" API REQUEST FAILED: {}", error_msg));
error_msg
})?;
if !response.status().is_success() {
return Err(format!("Streak API request failed with status: {}", response.status()));
let status = response.status();
push_log("info", "backend", format!(" API RESPONSE: Status {}", status));
if !status.is_success() {
let error_msg = format!("Streak API request failed with status: {}", status);
push_log("error", "backend", format!(" {}", error_msg));
return Err(error_msg);
}
let data: serde_json::Value = response
.json()
.await
.map_err(|e| format!("Failed to parse streak response: {}", e))?;
.map_err(|e| {
let error_msg = format!("Failed to parse streak response: {}", e);
push_log("error", "backend", format!(" {}", error_msg));
error_msg
})?;
push_log("info", "backend", format!(" STREAK API RESPONSE DATA: {}", serde_json::to_string_pretty(&data).unwrap_or_else(|_| "Failed to serialize".to_string())));
Ok(data)
}
@ -214,6 +267,31 @@ pub async fn get_statistics_data(
Err(_) => 0
};
let prev_week_end = end_date - chrono::Duration::days(7);
let prev_week_start = prev_week_end - chrono::Duration::days(7);
push_log("info", "backend", format!("[get_statistics_data] Fetching previous week: {} to {}",
prev_week_start.format("%Y-%m-%d"), prev_week_end.format("%Y-%m-%d")));
let prev_week_seconds = match fetch_hours_with_cache(
&client,
base_url,
access_token,
&prev_week_start.format("%Y-%m-%d").to_string(),
&prev_week_end.format("%Y-%m-%d").to_string()
).await {
Ok(data) => {
let seconds = data["total_seconds"].as_u64().unwrap_or(0);
push_log("info", "backend", format!("[get_statistics_data] Previous week: {} seconds ({:.2}h)",
seconds, seconds as f64 / 3600.0));
seconds
},
Err(e) => {
push_log("error", "backend", format!("[get_statistics_data] Failed to fetch previous week: {}", e));
0
}
};
let hours_data = serde_json::json!({
"weekly_stats": {
"time_coded_seconds": total_seconds,
@ -221,6 +299,10 @@ pub async fn get_statistics_data(
},
"all_time_stats": {
"time_coded_seconds": all_time_seconds
},
"calculated_metrics": {
"prev_week_hours": (prev_week_seconds as f64 / 3600.0 * 10.0).round() / 10.0,
"prev_week_seconds": prev_week_seconds
}
});
@ -229,7 +311,8 @@ pub async fn get_statistics_data(
.map_err(|e| format!("Failed to fetch streak data: {}", e))?;
let mut dashboard_stats = hours_data;
if let Some(streak) = streak_data.get("current_streak") {
if let Some(streak) = streak_data.get("streak_days") {
dashboard_stats["current_streak"] = streak.clone();
}
if let Some(longest) = streak_data.get("longest_streak") {
@ -246,9 +329,12 @@ pub async fn get_dashboard_stats(
api_config: ApiConfig,
state: State<'_, Arc<tauri::async_runtime::Mutex<AuthState>>>,
) -> Result<serde_json::Value, String> {
push_log("info", "backend", "get_dashboard_stats called".to_string());
let auth_state = state.lock().await;
if !auth_state.is_authenticated {
push_log("error", "backend", "Not authenticated".to_string());
return Err("Not authenticated".to_string());
}
@ -257,42 +343,59 @@ pub async fn get_dashboard_stats(
} else {
&api_config.base_url
};
push_log("info", "backend", format!("Using base URL: {}", base_url));
let access_token = auth_state
.access_token
.as_ref()
.ok_or("No access token available")?;
push_log("debug", "backend", format!("Access token present: {}...", &access_token[..20.min(access_token.len())]));
let client = reqwest::Client::new();
let end_date = chrono::Utc::now().date_naive();
let start_date = end_date - chrono::Duration::days(7);
push_log("info", "backend", format!("Date range: {} to {}", start_date.format("%Y-%m-%d"), end_date.format("%Y-%m-%d")));
push_log("info", "backend", "Fetching current week hours...".to_string());
let _current_week_data = fetch_hours_with_cache(
&client,
base_url,
access_token,
&start_date.format("%Y-%m-%d").to_string(),
&end_date.format("%Y-%m-%d").to_string()
).await.map_err(|e| format!("Failed to fetch current week hours: {}", e))?;
).await.map_err(|e| {
push_log("error", "backend", format!("Failed to fetch current week hours: {}", e));
format!("Failed to fetch current week hours: {}", e)
})?;
let prev_week_end = start_date;
let prev_week_start = prev_week_end - chrono::Duration::days(7);
push_log("info", "backend", format!("Fetching previous week hours: {} to {}", prev_week_start.format("%Y-%m-%d"), prev_week_end.format("%Y-%m-%d")));
let prev_week_data = fetch_hours_with_cache(
&client,
base_url,
access_token,
&prev_week_start.format("%Y-%m-%d").to_string(),
&prev_week_end.format("%Y-%m-%d").to_string()
).await.unwrap_or_else(|_| serde_json::json!({"total_seconds": 0}));
).await.unwrap_or_else(|e| {
push_log("warn", "backend", format!("Failed to fetch previous week hours: {}", e));
serde_json::json!({"total_seconds": 0})
});
let mut daily_hours = serde_json::Map::new();
let mut total_seconds = 0u64;
push_log("info", "backend", "Fetching daily hours for last 7 days...".to_string());
for days_ago in 0..7 {
let date = end_date - chrono::Duration::days(days_ago);
let date_str = date.format("%Y-%m-%d").to_string();
@ -303,6 +406,9 @@ pub async fn get_dashboard_stats(
let seconds = day_data["total_seconds"].as_u64().unwrap_or(0);
total_seconds += seconds;
push_log("info", "backend", format!(" {} ({} days ago): {} seconds ({:.2} hours)",
date_str, days_ago, seconds, seconds as f64 / 3600.0));
let day_name = match date.weekday() {
chrono::Weekday::Mon => "Mon",
chrono::Weekday::Tue => "Tue",
@ -320,7 +426,9 @@ pub async fn get_dashboard_stats(
"seconds": seconds
}));
}
Err(_) => {
Err(e) => {
push_log("warn", "backend", format!("Failed to fetch hours for {}: {}", date_str, e));
let day_name = match date.weekday() {
chrono::Weekday::Mon => "Mon",
chrono::Weekday::Tue => "Tue",
@ -341,15 +449,23 @@ pub async fn get_dashboard_stats(
}
}
push_log("info", "backend", format!("Total seconds for last 7 days: {} ({:.2} hours)", total_seconds, total_seconds as f64 / 3600.0));
push_log("info", "backend", "Fetching streak data...".to_string());
let streak_data = fetch_streak_with_cache(&client, base_url, access_token)
.await
.unwrap_or_else(|_| serde_json::json!({"current_streak": 0, "longest_streak": 0}));
.unwrap_or_else(|e| {
push_log("error", "backend", format!("Failed to fetch streak data: {}", e));
serde_json::json!({"current_streak": 0, "longest_streak": 0})
});
let current_week_seconds = total_seconds as f64;
let prev_week_seconds = prev_week_data["total_seconds"].as_f64().unwrap_or(0.0);
push_log("info", "backend", format!("Week comparison: Current week = {:.2}h, Previous week = {:.2}h",
current_week_seconds / 3600.0, prev_week_seconds / 3600.0));
let daily_average_hours = current_week_seconds / 3600.0 / 7.0;
@ -364,10 +480,18 @@ pub async fn get_dashboard_stats(
} else {
0.0
};
let current_streak = streak_data["streak_days"].as_u64().unwrap_or(0);
let longest_streak = streak_data["longest_streak"].as_u64().unwrap_or(0);
push_log("info", "backend", format!("Streak: Current = {} days (from API field 'streak_days'), Longest = {} days", current_streak, longest_streak));
push_log("info", "backend", format!("Metrics: Daily average = {:.2}h, Weekly hours = {:.2}h, Weekly change = {:.1}%",
daily_average_hours, weekly_hours, weekly_change_percent));
Ok(serde_json::json!({
"current_streak": streak_data["current_streak"].as_u64().unwrap_or(0),
"longest_streak": streak_data["longest_streak"].as_u64().unwrap_or(0),
let result = serde_json::json!({
"current_streak": current_streak,
"longest_streak": longest_streak,
"weekly_stats": {
"time_coded_seconds": total_seconds,
"daily_hours": daily_hours
@ -376,9 +500,14 @@ pub async fn get_dashboard_stats(
"daily_average_hours": (daily_average_hours * 10.0).round() / 10.0,
"weekly_hours": (weekly_hours * 10.0).round() / 10.0,
"weekly_change_percent": weekly_change_percent,
"prev_week_hours": (prev_week_seconds / 3600.0 * 10.0).round() / 10.0
"prev_week_hours": (prev_week_seconds / 3600.0 * 10.0).round() / 10.0,
"prev_week_seconds": prev_week_seconds
}
}))
});
push_log("info", "backend", format!("FINAL DASHBOARD STATS: {}", serde_json::to_string_pretty(&result).unwrap_or_else(|_| "Failed to serialize".to_string())));
Ok(result)
}
async fn process_statistics_data(
@ -392,9 +521,22 @@ async fn process_statistics_data(
let all_time_time = dashboard_stats["all_time_stats"]["time_coded_seconds"]
.as_u64()
.unwrap_or(0) as f64;
let prev_week_time = dashboard_stats["calculated_metrics"]["prev_week_seconds"]
.as_f64()
.unwrap_or_else(|| {
dashboard_stats["calculated_metrics"]["prev_week_hours"]
.as_f64()
.unwrap_or(0.0) * 3600.0
});
push_log("info", "backend", format!("[process_statistics_data] Using previous week: {} seconds ({:.2}h)",
prev_week_time, prev_week_time / 3600.0));
push_log("info", "backend", format!("[process_statistics_data] Current week: {} seconds ({:.2}h)",
weekly_time, weekly_time / 3600.0));
let trends = calculate_trends(weekly_time, current_streak).await;
let trends = calculate_trends(weekly_time, prev_week_time, current_streak).await;
let charts = generate_chart_data(&dashboard_stats).await?;
@ -413,11 +555,12 @@ async fn process_statistics_data(
})
}
async fn calculate_trends(weekly_time: f64, current_streak: u64) -> Vec<TrendStatistic> {
async fn calculate_trends(weekly_time: f64, prev_week_time: f64, current_streak: u64) -> Vec<TrendStatistic> {
let mut trends = Vec::new();
let last_week_time = weekly_time * 0.85;
let last_week_time = prev_week_time;
let last_week_streak = if current_streak > 0 {
current_streak - 1
} else {
@ -425,7 +568,18 @@ async fn calculate_trends(weekly_time: f64, current_streak: u64) -> Vec<TrendSta
};
let time_change = ((weekly_time - last_week_time) / last_week_time * 100.0).round() as i32;
let time_change = if last_week_time > 0.0 {
let change = ((weekly_time - last_week_time) / last_week_time * 100.0).round() as i32;
push_log("info", "backend", format!("[calculate_trends] Weekly change: {:.2}h -> {:.2}h = {}%",
last_week_time / 3600.0, weekly_time / 3600.0, change));
change
} else if weekly_time > 0.0 {
push_log("info", "backend", "[calculate_trends] No previous week data, defaulting to +100%".to_string());
100
} else {
push_log("info", "backend", "[calculate_trends] No data for either week, 0%".to_string());
0
};
let time_trend = if time_change > 0 {
TrendStatistic {
title: "Weekly Coding Time".to_string(),
@ -496,8 +650,15 @@ async fn calculate_trends(weekly_time: f64, current_streak: u64) -> Vec<TrendSta
let daily_average = weekly_time / 3600.0 / 7.0;
let last_week_daily = daily_average * 0.9;
let focus_change = ((daily_average - last_week_daily) / last_week_daily * 100.0).round() as i32;
let last_week_daily = last_week_time / 3600.0 / 7.0;
let focus_change = if last_week_daily > 0.0 {
((daily_average - last_week_daily) / last_week_daily * 100.0).round() as i32
} else if daily_average > 0.0 {
100
} else {
0
};
let focus_trend = if focus_change > 0 {
TrendStatistic {
@ -754,7 +915,7 @@ async fn generate_insights(
description: "You're building your coding skills! Every hour counts.".to_string(),
value: format!("{:.0}h total", total_hours),
trend: "Growing skills".to_string(),
icon: "📚".to_string(),
icon: "".to_string(),
color: "#2196F3".to_string(),
}
} else {
@ -763,7 +924,7 @@ async fn generate_insights(
description: "Every expert was once a beginner. Keep coding!".to_string(),
value: format!("{:.0}h total", total_hours),
trend: "Beginning journey".to_string(),
icon: "🌱".to_string(),
icon: "".to_string(),
color: "#9C27B0".to_string(),
}
};