diff options
| author | Dawid Rycerz <dawid@rycerz.xyz> | 2026-02-15 21:27:00 +0100 |
|---|---|---|
| committer | Dawid Rycerz <dawid@rycerz.xyz> | 2026-02-15 21:27:00 +0100 |
| commit | ce0dbf6b249956700c6a1705bf4ad85a09d53e8c (patch) | |
| tree | d7c3236807cfbf75d7f3a355eb5df5a5e2cc4ad7 /src/cleanup.rs | |
| parent | 064a1d01c5c14f5ecc032fa9b8346a4a88b893f6 (diff) | |
Switch, cleanup, and status CLI commands. Persistent build state via
state.json. Post-deploy hooks on success and failure with
WITRYNA_BUILD_STATUS. Dependency diet (axum→tiny_http, clap→argh,
tracing→log). Drop built-in rate limiting. Nix flake with NixOS module.
Arch Linux PKGBUILD. Centralized version management.
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
Diffstat (limited to 'src/cleanup.rs')
| -rw-r--r-- | src/cleanup.rs | 92 |
1 files changed, 74 insertions, 18 deletions
diff --git a/src/cleanup.rs b/src/cleanup.rs index ced8320..b2b068b 100644 --- a/src/cleanup.rs +++ b/src/cleanup.rs @@ -1,6 +1,7 @@ +use crate::state; use anyhow::{Context as _, Result}; +use log::{debug, info, warn}; use std::path::Path; -use tracing::{debug, info, warn}; /// Result of a cleanup operation. #[derive(Debug, Default)] @@ -35,7 +36,7 @@ pub async fn cleanup_old_builds( ) -> Result<CleanupResult> { // If max_to_keep is 0, keep all builds if max_to_keep == 0 { - debug!(%site_name, "max_builds_to_keep is 0, skipping cleanup"); + debug!("[{site_name}] max_builds_to_keep is 0, skipping cleanup"); return Ok(CleanupResult::default()); } @@ -44,7 +45,7 @@ pub async fn cleanup_old_builds( // Check if builds directory exists if !builds_dir.exists() { - debug!(%site_name, "builds directory does not exist, skipping cleanup"); + debug!("[{site_name}] builds directory does not exist, skipping cleanup"); return Ok(CleanupResult::default()); } @@ -59,10 +60,14 @@ pub async fn cleanup_old_builds( // Calculate how many to remove let to_remove = build_timestamps.len().saturating_sub(max_to_keep as usize); if to_remove == 0 { - debug!(%site_name, count = build_timestamps.len(), max = max_to_keep, "no builds to remove"); + debug!( + "[{site_name}] no builds to remove: count={} max={max_to_keep}", + build_timestamps.len() + ); } // Remove oldest builds (they're at the end after reverse sort) + let mut removed_timestamps = Vec::new(); for timestamp in build_timestamps.iter().skip(max_to_keep as usize) { let build_path = builds_dir.join(timestamp); let log_path = site_log_dir.join(format!("{timestamp}.log")); @@ -70,11 +75,15 @@ pub async fn cleanup_old_builds( // Remove build directory match tokio::fs::remove_dir_all(&build_path).await { Ok(()) => { - debug!(path = %build_path.display(), "removed old build"); + debug!("removed old build: {}", build_path.display()); result.builds_removed += 1; + removed_timestamps.push(timestamp.clone()); } Err(e) => { - warn!(path = %build_path.display(), error = %e, "failed to remove old build"); + warn!( + "failed to remove old build: path={} error={e}", + build_path.display() + ); } } @@ -82,11 +91,14 @@ pub async fn cleanup_old_builds( if log_path.exists() { match tokio::fs::remove_file(&log_path).await { Ok(()) => { - debug!(path = %log_path.display(), "removed old log"); + debug!("removed old log: {}", log_path.display()); result.logs_removed += 1; } Err(e) => { - warn!(path = %log_path.display(), error = %e, "failed to remove old log"); + warn!( + "failed to remove old log: path={} error={e}", + log_path.display() + ); } } } @@ -95,18 +107,24 @@ pub async fn cleanup_old_builds( let hook_log_path = site_log_dir.join(format!("{timestamp}-hook.log")); match tokio::fs::remove_file(&hook_log_path).await { Ok(()) => { - debug!(path = %hook_log_path.display(), "removed old hook log"); + debug!("removed old hook log: {}", hook_log_path.display()); result.logs_removed += 1; } Err(e) if e.kind() == std::io::ErrorKind::NotFound => { // Not every build has a hook — silently skip } Err(e) => { - warn!(path = %hook_log_path.display(), error = %e, "failed to remove old hook log"); + warn!( + "failed to remove old hook log: path={} error={e}", + hook_log_path.display() + ); } } } + // Prune removed builds from state.json + state::remove_builds(base_dir, site_name, &removed_timestamps).await; + // Remove orphaned temp files (crash recovery) if site_log_dir.exists() && let Ok(mut entries) = tokio::fs::read_dir(&site_log_dir).await @@ -117,10 +135,13 @@ pub async fn cleanup_old_builds( let path = entry.path(); match tokio::fs::remove_file(&path).await { Ok(()) => { - debug!(path = %path.display(), "removed orphaned temp file"); + debug!("removed orphaned temp file: {}", path.display()); } Err(e) => { - warn!(path = %path.display(), error = %e, "failed to remove orphaned temp file"); + warn!( + "failed to remove orphaned temp file: path={} error={e}", + path.display() + ); } } } @@ -129,10 +150,8 @@ pub async fn cleanup_old_builds( if result.builds_removed > 0 || result.logs_removed > 0 { info!( - %site_name, - builds_removed = result.builds_removed, - logs_removed = result.logs_removed, - "cleanup completed" + "[{site_name}] cleanup completed: builds_removed={} logs_removed={}", + result.builds_removed, result.logs_removed ); } @@ -142,7 +161,11 @@ pub async fn cleanup_old_builds( /// List all build timestamps in a builds directory. /// /// Returns directory names that look like timestamps, excluding 'current' symlink. -async fn list_build_timestamps(builds_dir: &Path) -> Result<Vec<String>> { +/// +/// # Errors +/// +/// Returns an error if the builds directory cannot be read or entries cannot be inspected. +pub async fn list_build_timestamps(builds_dir: &Path) -> Result<Vec<String>> { let mut timestamps = Vec::new(); let mut entries = tokio::fs::read_dir(builds_dir) @@ -176,7 +199,8 @@ async fn list_build_timestamps(builds_dir: &Path) -> Result<Vec<String>> { /// Check if a string looks like a valid timestamp format. /// /// Expected format: YYYYMMDD-HHMMSS-microseconds (e.g., 20260126-143000-123456) -fn looks_like_timestamp(s: &str) -> bool { +#[must_use] +pub fn looks_like_timestamp(s: &str) -> bool { let parts: Vec<&str> = s.split('-').collect(); let [date, time, micros, ..] = parts.as_slice() else { return false; @@ -410,6 +434,38 @@ mod tests { } #[tokio::test] + async fn cleanup_does_not_delete_state_json() { + let base_dir = temp_dir("cleanup-test").await; + let log_dir = base_dir.join("logs"); + let site = "test-site"; + + // Create 3 builds (keep 1 → remove 2) + for ts in &[ + "20260126-100000-000001", + "20260126-100000-000002", + "20260126-100000-000003", + ] { + create_build_and_log(&base_dir, &log_dir, site, ts).await; + } + + // Write a state.json in the builds dir + let state_path = base_dir.join("builds").join(site).join("state.json"); + fs::write(&state_path, r#"{"status":"success"}"#) + .await + .unwrap(); + + let result = cleanup_old_builds(&base_dir, &log_dir, site, 1).await; + assert!(result.is_ok()); + let result = result.unwrap(); + assert_eq!(result.builds_removed, 2); + + // state.json must still exist + assert!(state_path.exists(), "state.json must not be deleted"); + + cleanup(&base_dir).await; + } + + #[tokio::test] async fn cleanup_removes_orphaned_tmp_files() { let base_dir = temp_dir("cleanup-test").await; let log_dir = base_dir.join("logs"); |
