diff --git a/apps/api/src/error.rs b/apps/api/src/error.rs index 72c80f2..89b029d 100644 --- a/apps/api/src/error.rs +++ b/apps/api/src/error.rs @@ -74,7 +74,25 @@ impl IntoResponse for ApiError { impl From for ApiError { fn from(err: sqlx::Error) -> Self { - Self::internal(format!("database error: {err}")) + match &err { + sqlx::Error::RowNotFound => Self::not_found("resource not found"), + sqlx::Error::PoolTimedOut => Self { + status: StatusCode::SERVICE_UNAVAILABLE, + message: "database pool timed out".to_string(), + }, + sqlx::Error::Database(db_err) => { + // PostgreSQL unique_violation = 23505, foreign_key_violation = 23503 + let code = db_err.code().unwrap_or_default(); + if code == "23505" { + Self::bad_request(format!("duplicate entry: {}", db_err.message())) + } else if code == "23503" { + Self::bad_request(format!("foreign key violation: {}", db_err.message())) + } else { + Self::internal(format!("database error: {err}")) + } + } + _ => Self::internal(format!("database error: {err}")), + } } } @@ -89,3 +107,36 @@ impl From for ApiError { Self::internal(format!("HTTP client error: {err}")) } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn sqlx_row_not_found_maps_to_404() { + let err: ApiError = sqlx::Error::RowNotFound.into(); + assert_eq!(err.status, StatusCode::NOT_FOUND); + } + + #[test] + fn sqlx_pool_timeout_maps_to_503() { + let err: ApiError = sqlx::Error::PoolTimedOut.into(); + assert_eq!(err.status, StatusCode::SERVICE_UNAVAILABLE); + } + + #[test] + fn sqlx_other_error_maps_to_500() { + let err: ApiError = sqlx::Error::PoolClosed.into(); + assert_eq!(err.status, StatusCode::INTERNAL_SERVER_ERROR); + } + + #[test] + fn api_error_constructors() { + assert_eq!(ApiError::bad_request("x").status, StatusCode::BAD_REQUEST); + assert_eq!(ApiError::not_found("x").status, StatusCode::NOT_FOUND); + assert_eq!(ApiError::unauthorized("x").status, StatusCode::UNAUTHORIZED); + assert_eq!(ApiError::forbidden("x").status, StatusCode::FORBIDDEN); + assert_eq!(ApiError::internal("x").status, StatusCode::INTERNAL_SERVER_ERROR); + assert_eq!(ApiError::unprocessable_entity("x").status, StatusCode::UNPROCESSABLE_ENTITY); + } +} diff --git a/apps/indexer/src/analyzer.rs b/apps/indexer/src/analyzer.rs index 8082d38..139045e 100644 --- a/apps/indexer/src/analyzer.rs +++ b/apps/indexer/src/analyzer.rs @@ -396,13 +396,15 @@ pub async fn analyze_library_books( const BATCH_SIZE: usize = 200; let phase_a_start = std::time::Instant::now(); - let _ = sqlx::query( + if let Err(e) = sqlx::query( "UPDATE index_jobs SET status = 'extracting_pages', total_files = $2, processed_files = 0, current_file = NULL WHERE id = $1", ) .bind(job_id) .bind(total) .execute(&state.pool) - .await; + .await { + warn!("[ANALYZER] Failed to update job status to extracting_pages: {e}"); + } let extracted_count = Arc::new(AtomicI32::new(0)); let mut all_extracted: Vec<(Uuid, String, i32)> = Vec::new(); @@ -538,14 +540,16 @@ pub async fn analyze_library_books( } let processed = extracted_count.fetch_add(1, Ordering::Relaxed) + 1; let percent = (processed as f64 / total as f64 * 50.0) as i32; - let _ = sqlx::query( + if let Err(e) = sqlx::query( "UPDATE index_jobs SET processed_files = $2, progress_percent = $3 WHERE id = $1", ) .bind(job_id) .bind(processed) .bind(percent) .execute(&pool) - .await; + .await { + warn!("[ANALYZER] Failed to update job progress: {e}"); + } if processed % 25 == 0 || processed == total { info!( @@ -588,14 +592,16 @@ pub async fn analyze_library_books( let processed = extracted_count.fetch_add(1, Ordering::Relaxed) + 1; let percent = (processed as f64 / total as f64 * 50.0) as i32; // first 50% - let _ = sqlx::query( + if let Err(e) = sqlx::query( "UPDATE index_jobs SET processed_files = $2, progress_percent = $3 WHERE id = $1", ) .bind(job_id) .bind(processed) .bind(percent) .execute(&pool) - .await; + .await { + warn!("[ANALYZER] Failed to update job progress: {e}"); + } if processed % 25 == 0 || processed == total { info!( @@ -649,13 +655,15 @@ pub async fn analyze_library_books( // CPU bound — can run at higher concurrency than I/O phase // ------------------------------------------------------------------------- let phase_b_start = std::time::Instant::now(); - let _ = sqlx::query( + if let Err(e) = sqlx::query( "UPDATE index_jobs SET status = 'generating_thumbnails', generating_thumbnails_started_at = NOW(), total_files = $2, processed_files = 0, current_file = NULL WHERE id = $1", ) .bind(job_id) .bind(extracted_total) .execute(&state.pool) - .await; + .await { + warn!("[ANALYZER] Failed to update job status to generating_thumbnails: {e}"); + } let resize_count = Arc::new(AtomicI32::new(0)); @@ -706,14 +714,16 @@ pub async fn analyze_library_books( let processed = resize_count.fetch_add(1, Ordering::Relaxed) + 1; let percent = 50 + (processed as f64 / extracted_total as f64 * 50.0) as i32; // last 50% - let _ = sqlx::query( + if let Err(e) = sqlx::query( "UPDATE index_jobs SET processed_files = $2, progress_percent = $3 WHERE id = $1", ) .bind(job_id) .bind(processed) .bind(percent) .execute(&pool) - .await; + .await { + warn!("[ANALYZER] Failed to update job progress: {e}"); + } if processed % 25 == 0 || processed == extracted_total { info!( diff --git a/crates/parsers/src/lib.rs b/crates/parsers/src/lib.rs index 0fac5c5..6808641 100644 --- a/crates/parsers/src/lib.rs +++ b/crates/parsers/src/lib.rs @@ -338,18 +338,27 @@ fn analyze_cbz_streaming(path: &Path) -> Result<(i32, Vec)> { Ok((count, first_bytes)) } +/// Returns true if the error indicates the file is not a RAR archive (likely a ZIP with .cbr extension). +fn is_not_rar_error(err_str: &str) -> bool { + err_str.contains("Not a RAR archive") || err_str.contains("bad archive") +} + +/// Try to open a CBR file for listing. Returns the archive or an error string. +/// If the error indicates the file is not actually a RAR archive, the caller can fall back to CBZ. +fn open_cbr_listing(path: &Path) -> std::result::Result, String> { + unrar::Archive::new(path) + .open_for_listing() + .map_err(|e| format!("unrar listing failed for {}: {}", path.display(), e)) +} + fn analyze_cbr(path: &Path, allow_fallback: bool) -> Result<(i32, Vec)> { // Pass 1: list all image names via unrar (in-process, no subprocess) let mut image_names: Vec = { - let archive = unrar::Archive::new(path) - .open_for_listing() - .map_err(|e| anyhow::anyhow!("unrar listing failed for {}: {}", path.display(), e)); // Some .cbr files are actually ZIP archives with wrong extension — fallback to CBZ parser - let archive = match archive { + let archive = match open_cbr_listing(path) { Ok(a) => a, - Err(e) => { - let e_str = e.to_string(); - if allow_fallback && (e_str.contains("Not a RAR archive") || e_str.contains("bad archive")) { + Err(e_str) => { + if allow_fallback && is_not_rar_error(&e_str) { return analyze_cbz(path, false).map_err(|zip_err| { anyhow::anyhow!( "not a RAR archive and ZIP fallback also failed for {}: RAR={}, ZIP={}", @@ -359,7 +368,7 @@ fn analyze_cbr(path: &Path, allow_fallback: bool) -> Result<(i32, Vec)> { ) }); } - return Err(e); + return Err(anyhow::anyhow!("{}", e_str)); } }; let mut names = Vec::new(); @@ -482,18 +491,13 @@ fn parse_cbz_page_count_streaming(path: &Path) -> Result { } fn parse_cbr_page_count(path: &Path) -> Result { - let archive = unrar::Archive::new(path) - .open_for_listing() - .map_err(|e| anyhow::anyhow!("unrar listing failed for {}: {}", path.display(), e)); - // Some .cbr files are actually ZIP archives with wrong extension — fallback to CBZ parser - let archive = match archive { + let archive = match open_cbr_listing(path) { Ok(a) => a, - Err(e) => { - let e_str = e.to_string(); - if e_str.contains("Not a RAR archive") || e_str.contains("bad archive") { + Err(e_str) => { + if is_not_rar_error(&e_str) { return parse_cbz_page_count(path); } - return Err(e); + return Err(anyhow::anyhow!("{}", e_str)); } }; let count = archive @@ -603,17 +607,13 @@ fn list_cbz_images_streaming(path: &Path) -> Result> { } fn list_cbr_images(path: &Path) -> Result> { - let archive = unrar::Archive::new(path) - .open_for_listing() - .map_err(|e| anyhow::anyhow!("unrar listing failed for {}: {}", path.display(), e)); - let archive = match archive { + let archive = match open_cbr_listing(path) { Ok(a) => a, - Err(e) => { - let e_str = e.to_string(); - if e_str.contains("Not a RAR archive") || e_str.contains("bad archive") { + Err(e_str) => { + if is_not_rar_error(&e_str) { return list_cbz_images(path); } - return Err(e); + return Err(anyhow::anyhow!("{}", e_str)); } }; let mut names: Vec = Vec::new(); @@ -819,13 +819,13 @@ fn extract_cbr_page(path: &Path, page_number: u32, allow_fallback: bool) -> Resu let index = page_number as usize - 1; let mut image_names: Vec = { - let archive = match unrar::Archive::new(path).open_for_listing() { + let archive = match open_cbr_listing(path) { Ok(a) => a, - Err(e) => { - if allow_fallback { + Err(e_str) => { + if allow_fallback && is_not_rar_error(&e_str) { return extract_cbz_page(path, page_number, false); } - return Err(anyhow::anyhow!("unrar listing failed for {}: {}", path.display(), e)); + return Err(anyhow::anyhow!("{}", e_str)); } }; let mut names = Vec::new(); @@ -1386,3 +1386,72 @@ fn clean_title(filename: &str) -> String { cleaned.split_whitespace().collect::>().join(" ") } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn is_not_rar_detects_not_rar_archive() { + assert!(is_not_rar_error("unrar listing failed for /path/file.cbr: Not a RAR archive")); + } + + #[test] + fn is_not_rar_detects_bad_archive() { + assert!(is_not_rar_error("unrar listing failed for /path/file.cbr: bad archive")); + } + + #[test] + fn is_not_rar_ignores_other_errors() { + assert!(!is_not_rar_error("unrar listing failed for /path/file.cbr: file not found")); + assert!(!is_not_rar_error("some other error")); + assert!(!is_not_rar_error("")); + } + + #[test] + fn open_cbr_listing_nonexistent_file() { + let result = open_cbr_listing(Path::new("/nonexistent/file.cbr")); + assert!(result.is_err()); + } + + #[test] + fn detect_format_cbz() { + assert_eq!(detect_format(Path::new("test.cbz")), Some(BookFormat::Cbz)); + assert_eq!(detect_format(Path::new("test.CBZ")), Some(BookFormat::Cbz)); + } + + #[test] + fn detect_format_cbr() { + assert_eq!(detect_format(Path::new("test.cbr")), Some(BookFormat::Cbr)); + } + + #[test] + fn detect_format_pdf() { + assert_eq!(detect_format(Path::new("test.pdf")), Some(BookFormat::Pdf)); + } + + #[test] + fn detect_format_epub() { + assert_eq!(detect_format(Path::new("test.epub")), Some(BookFormat::Epub)); + } + + #[test] + fn detect_format_unknown() { + assert_eq!(detect_format(Path::new("test.txt")), None); + assert_eq!(detect_format(Path::new("no_extension")), None); + } + + #[test] + fn is_image_name_accepts_valid() { + assert!(is_image_name("page01.jpg")); + assert!(is_image_name("cover.png")); + assert!(is_image_name("image.webp")); + } + + #[test] + fn is_image_name_rejects_non_images() { + assert!(!is_image_name("readme.txt")); + assert!(!is_image_name("metadata.xml")); + assert!(!is_image_name("")); + } +}