Skip to content

Commit 956caa5

Browse files
committed
feat: implement cleanup endpoint for old sourcemap builds with payload handling
1 parent cb63731 commit 956caa5

File tree

2 files changed

+178
-1
lines changed

2 files changed

+178
-1
lines changed

apps/backend/src/routes.rs

Lines changed: 145 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ use axum::extract::State;
55
use axum::http::StatusCode;
66
use axum::routing::{delete, get, post};
77
use serde::{Deserialize, Serialize};
8+
use std::collections::{BTreeSet, HashSet};
89
use tower_http::trace::TraceLayer;
910
use tracing::info;
1011
use uuid::Uuid;
@@ -67,6 +68,13 @@ pub struct ApplyPayload {
6768
pub column: u32,
6869
}
6970

71+
#[derive(Debug, Deserialize)]
72+
#[serde(rename_all = "camelCase")]
73+
pub struct CleanupPayload {
74+
#[serde(default)]
75+
pub excluded_build_ids: Vec<String>,
76+
}
77+
7078
#[derive(Serialize)]
7179
#[serde(rename_all = "camelCase")]
7280
pub struct OriginalPosition {
@@ -83,6 +91,16 @@ pub struct ApplyResponse {
8391
pub original: OriginalPosition,
8492
}
8593

94+
#[derive(Serialize)]
95+
#[serde(rename_all = "camelCase")]
96+
pub struct CleanupResponse {
97+
pub ok: bool,
98+
pub latest_build_id: Option<String>,
99+
pub excluded_build_ids: Vec<String>,
100+
pub deleted_build_ids: Vec<String>,
101+
pub deleted_files: u64,
102+
}
103+
86104
pub fn public_router(state: SharedState) -> Router {
87105
Router::new()
88106
.route("/health", get(health))
@@ -98,6 +116,7 @@ pub fn internal_router(state: SharedState) -> Router {
98116
Router::new()
99117
.route("/health", get(health))
100118
.route("/internal/sourcemaps", delete(wipe))
119+
.route("/internal/sourcemaps/cleanup", delete(cleanup_old_builds))
101120
.route("/internal/sourcemaps", get(list_sourcemaps))
102121
.route("/internal/sourcemaps/apply", post(apply_sourcemap))
103122
.layer(TraceLayer::new_for_http())
@@ -239,6 +258,50 @@ pub async fn apply_sourcemap(
239258
}))
240259
}
241260

261+
pub async fn cleanup_old_builds(
262+
auth: AdminAuthenticatedProject,
263+
State(state): State<SharedState>,
264+
Json(payload): Json<CleanupPayload>,
265+
) -> Result<Json<CleanupResponse>, AppError> {
266+
let project_id = auth.project_id;
267+
let prefix = format!("{project_id}/");
268+
let keys = state.storage.list_prefix_keys(&prefix).await?;
269+
270+
let excluded_build_ids = normalized_build_ids(&payload.excluded_build_ids);
271+
let (latest_build_id, deleted_build_ids) =
272+
select_builds_for_cleanup(&keys, &excluded_build_ids);
273+
let deleted_build_ids_set: HashSet<&str> =
274+
deleted_build_ids.iter().map(String::as_str).collect();
275+
276+
let keys_to_delete: Vec<String> = keys
277+
.into_iter()
278+
.filter(|key| {
279+
parse_sourcemap_key(key)
280+
.map(|(build_id, _)| deleted_build_ids_set.contains(build_id.as_str()))
281+
.unwrap_or(false)
282+
})
283+
.collect();
284+
285+
let deleted_files = state.storage.delete_keys(&keys_to_delete).await?;
286+
287+
info!(
288+
%project_id,
289+
latest_build_id = ?latest_build_id,
290+
excluded_build_ids = ?excluded_build_ids,
291+
deleted_build_ids = ?deleted_build_ids,
292+
deleted_files,
293+
"cleaned up old sourcemap builds"
294+
);
295+
296+
Ok(Json(CleanupResponse {
297+
ok: true,
298+
latest_build_id,
299+
excluded_build_ids,
300+
deleted_build_ids,
301+
deleted_files,
302+
}))
303+
}
304+
242305
fn map_file_name(file_name: &str) -> String {
243306
if file_name.ends_with(".map") {
244307
file_name.to_string()
@@ -276,9 +339,50 @@ fn parse_sourcemap_key(key: &str) -> Option<(String, String)> {
276339
Some((build_id, file_name))
277340
}
278341

342+
fn normalized_build_ids(input: &[String]) -> Vec<String> {
343+
let mut seen = HashSet::new();
344+
let mut out = Vec::new();
345+
for value in input {
346+
let trimmed = value.trim();
347+
if trimmed.is_empty() {
348+
continue;
349+
}
350+
if seen.insert(trimmed.to_string()) {
351+
out.push(trimmed.to_string());
352+
}
353+
}
354+
out
355+
}
356+
357+
fn select_builds_for_cleanup(
358+
keys: &[String],
359+
excluded_build_ids: &[String],
360+
) -> (Option<String>, Vec<String>) {
361+
let mut builds = BTreeSet::new();
362+
for key in keys {
363+
if let Some((build_id, _)) = parse_sourcemap_key(key) {
364+
builds.insert(build_id);
365+
}
366+
}
367+
368+
let latest_build_id = builds.last().cloned();
369+
let excluded_set: HashSet<&str> = excluded_build_ids.iter().map(String::as_str).collect();
370+
let deleted_build_ids = builds
371+
.into_iter()
372+
.filter(|build_id| {
373+
Some(build_id) != latest_build_id.as_ref() && !excluded_set.contains(build_id.as_str())
374+
})
375+
.collect();
376+
377+
(latest_build_id, deleted_build_ids)
378+
}
379+
279380
#[cfg(test)]
280381
mod tests {
281-
use super::{map_file_name, parse_sourcemap_key, require_non_empty};
382+
use super::{
383+
map_file_name, normalized_build_ids, parse_sourcemap_key, require_non_empty,
384+
select_builds_for_cleanup,
385+
};
282386

283387
#[test]
284388
fn map_file_name_adds_map_suffix_when_missing() {
@@ -300,4 +404,44 @@ mod tests {
300404
let err = require_non_empty("build_id", " ").expect_err("value should be invalid");
301405
assert!(format!("{err}").contains("build_id is required"));
302406
}
407+
408+
#[test]
409+
fn normalized_build_ids_deduplicates_and_trims() {
410+
let normalized = normalized_build_ids(&[
411+
" build-1 ".to_string(),
412+
"build-1".to_string(),
413+
"".to_string(),
414+
" ".to_string(),
415+
"build-2".to_string(),
416+
]);
417+
assert_eq!(
418+
normalized,
419+
vec!["build-1".to_string(), "build-2".to_string()]
420+
);
421+
}
422+
423+
#[test]
424+
fn select_builds_for_cleanup_keeps_latest_and_excluded() {
425+
let keys = vec![
426+
"proj/build-001/app.js.map".to_string(),
427+
"proj/build-002/app.js.map".to_string(),
428+
"proj/build-003/app.js.map".to_string(),
429+
"proj/build-004/app.js.map".to_string(),
430+
];
431+
let excluded = vec!["build-002".to_string()];
432+
let (latest, deleted) = select_builds_for_cleanup(&keys, &excluded);
433+
434+
assert_eq!(latest, Some("build-004".to_string()));
435+
assert_eq!(
436+
deleted,
437+
vec!["build-001".to_string(), "build-003".to_string()]
438+
);
439+
}
440+
441+
#[test]
442+
fn select_builds_for_cleanup_handles_no_builds() {
443+
let (latest, deleted) = select_builds_for_cleanup(&[], &[]);
444+
assert_eq!(latest, None);
445+
assert!(deleted.is_empty());
446+
}
303447
}

apps/backend/src/storage.rs

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,39 @@ impl Storage {
107107
Ok(deleted)
108108
}
109109

110+
pub async fn delete_keys(&self, keys: &[String]) -> Result<u64, AppError> {
111+
let mut deleted: u64 = 0;
112+
113+
for chunk in keys.chunks(1000) {
114+
let objects: Vec<ObjectIdentifier> = chunk
115+
.iter()
116+
.filter_map(|key| ObjectIdentifier::builder().key(key).build().ok())
117+
.collect();
118+
119+
if objects.is_empty() {
120+
continue;
121+
}
122+
123+
let count = objects.len() as u64;
124+
let delete = Delete::builder()
125+
.set_objects(Some(objects))
126+
.build()
127+
.map_err(s3_error)?;
128+
129+
self.client
130+
.delete_objects()
131+
.bucket(&self.bucket)
132+
.delete(delete)
133+
.send()
134+
.await
135+
.map_err(s3_error)?;
136+
137+
deleted += count;
138+
}
139+
140+
Ok(deleted)
141+
}
142+
110143
pub async fn list_prefix_keys(&self, prefix: &str) -> Result<Vec<String>, AppError> {
111144
let mut keys = Vec::new();
112145
let mut continuation_token: Option<String> = None;

0 commit comments

Comments
 (0)