diff --git a/apps/api/src/download_detection.rs b/apps/api/src/download_detection.rs index 01f7509..8d21c83 100644 --- a/apps/api/src/download_detection.rs +++ b/apps/api/src/download_detection.rs @@ -49,6 +49,8 @@ pub struct AvailableReleaseDto { pub indexer: Option, pub seeders: Option, pub matched_missing_volumes: Vec, + #[serde(default)] + pub all_volumes: Vec, } // --------------------------------------------------------------------------- @@ -714,7 +716,8 @@ async fn search_prowlarr_for_series( .filter_map(|r| { let title_volumes = prowlarr::extract_volumes_from_title_pub(&r.title); let matched_vols: Vec = title_volumes - .into_iter() + .iter() + .copied() .filter(|v| missing_volumes.contains(v)) .collect(); if matched_vols.is_empty() { @@ -727,6 +730,7 @@ async fn search_prowlarr_for_series( indexer: r.indexer, seeders: r.seeders, matched_missing_volumes: matched_vols, + all_volumes: title_volumes, }) } }) diff --git a/apps/api/src/prowlarr.rs b/apps/api/src/prowlarr.rs index db1832a..87bb0a5 100644 --- a/apps/api/src/prowlarr.rs +++ b/apps/api/src/prowlarr.rs @@ -175,26 +175,36 @@ fn extract_volumes_from_title(title: &str) -> Vec { } // Pass 2 — individual volumes not already captured by range expansion - let prefixes = ["tome", "vol.", "vol ", "t", "v", "#"]; + // Note: work entirely with char indices (not byte offsets) to avoid + // mismatches when the title contains multi-byte UTF-8 characters. + let prefixes: &[(&[char], bool)] = &[ + (&['t', 'o', 'm', 'e'], false), + (&['v', 'o', 'l', '.'], false), + (&['v', 'o', 'l', ' '], false), + (&['t'], true), + (&['v'], true), + (&['#'], false), + ]; let len = chars.len(); - for prefix in &prefixes { - let mut start = 0; - while let Some(pos) = lower[start..].find(prefix) { - let abs_pos = start + pos; - let after = abs_pos + prefix.len(); - - // For single-char prefixes (t, v), ensure it's at a word boundary - if prefix.len() == 1 && *prefix != "#" { - if abs_pos > 0 && chars[abs_pos - 1].is_alphanumeric() { - start = after; - continue; - } + for &(prefix, needs_boundary) in prefixes { + let plen = prefix.len(); + let mut ci = 0usize; + while ci + plen <= len { + if chars[ci..ci + plen] != *prefix { + ci += 1; + continue; } - // Skip optional spaces or dots after prefix - let mut i = after; - while i < len && (chars[i] == ' ' || chars[i] == '.') { + // For single-char prefixes (t, v), ensure it's at a word boundary + if needs_boundary && ci > 0 && chars[ci - 1].is_alphanumeric() { + ci += plen; + continue; + } + + // Skip optional spaces, dots, or '#' after prefix + let mut i = ci + plen; + while i < len && (chars[i] == ' ' || chars[i] == '.' || chars[i] == '#') { i += 1; } @@ -205,14 +215,15 @@ fn extract_volumes_from_title(title: &str) -> Vec { } if i > digit_start { - if let Ok(num) = lower[digit_start..i].parse::() { + let num_str: String = chars[digit_start..i].iter().collect(); + if let Ok(num) = num_str.parse::() { if !volumes.contains(&num) { volumes.push(num); } } } - start = after; + ci += plen; } } @@ -535,4 +546,22 @@ mod tests { let v = extract_volumes_from_title("tool v2.0 release"); assert!(!v.contains(&0) || v.len() == 1); // only v2 at most } + + #[test] + fn tome_hash_with_accented_chars() { + // Tome #097 with accented characters earlier in the string — the é in + // "Compressé" shifts byte offsets vs char offsets; this must not break parsing. + let v = sorted(extract_volumes_from_title( + "[Compressé] One Piece [Team Chromatique] - Tome #097 - [V2].cbz", + )); + assert!(v.contains(&97), "expected 97 in {:?}", v); + } + + #[test] + fn tome_hash_single_digit() { + let v = sorted(extract_volumes_from_title( + "[Compressé] One Piece [Team Chromatique] - Tome #003 (Perfect Edition).cbz", + )); + assert!(v.contains(&3), "expected 3 in {:?}", v); + } } diff --git a/apps/api/src/qbittorrent.rs b/apps/api/src/qbittorrent.rs index b721d53..5696168 100644 --- a/apps/api/src/qbittorrent.rs +++ b/apps/api/src/qbittorrent.rs @@ -17,6 +17,9 @@ pub struct QBittorrentAddRequest { pub library_id: Option, pub series_name: Option, pub expected_volumes: Option>, + /// When true, overwrite existing files at destination during import. + #[serde(default)] + pub replace_existing: bool, } #[derive(Serialize, ToSchema)] @@ -203,14 +206,15 @@ pub async fn add_torrent( let id = download_id.unwrap(); sqlx::query( - "INSERT INTO torrent_downloads (id, library_id, series_name, expected_volumes, qb_hash) \ - VALUES ($1, $2, $3, $4, $5)", + "INSERT INTO torrent_downloads (id, library_id, series_name, expected_volumes, qb_hash, replace_existing) \ + VALUES ($1, $2, $3, $4, $5, $6)", ) .bind(id) .bind(library_id) .bind(series_name) .bind(expected_volumes) .bind(qb_hash.as_deref()) + .bind(body.replace_existing) .execute(&state.pool) .await?; diff --git a/apps/api/src/torrent_import.rs b/apps/api/src/torrent_import.rs index 8babe21..f107687 100644 --- a/apps/api/src/torrent_import.rs +++ b/apps/api/src/torrent_import.rs @@ -406,7 +406,7 @@ async fn is_torrent_import_enabled(pool: &PgPool) -> bool { async fn process_torrent_import(pool: PgPool, torrent_id: Uuid) -> anyhow::Result<()> { let row = sqlx::query( - "SELECT library_id, series_name, expected_volumes, content_path, qb_hash \ + "SELECT library_id, series_name, expected_volumes, content_path, qb_hash, replace_existing \ FROM torrent_downloads WHERE id = $1", ) .bind(torrent_id) @@ -418,6 +418,7 @@ async fn process_torrent_import(pool: PgPool, torrent_id: Uuid) -> anyhow::Resul let expected_volumes: Vec = row.get("expected_volumes"); let content_path: Option = row.get("content_path"); let qb_hash: Option = row.get("qb_hash"); + let replace_existing: bool = row.get("replace_existing"); let content_path = content_path.ok_or_else(|| anyhow::anyhow!("content_path not set on torrent_download"))?; @@ -428,7 +429,7 @@ async fn process_torrent_import(pool: PgPool, torrent_id: Uuid) -> anyhow::Resul .execute(&pool) .await?; - match do_import(&pool, library_id, &series_name, &expected_volumes, &content_path).await { + match do_import(&pool, library_id, &series_name, &expected_volumes, &content_path, replace_existing).await { Ok(imported) => { let json = serde_json::to_value(&imported).unwrap_or(serde_json::json!([])); sqlx::query( @@ -526,19 +527,19 @@ async fn process_torrent_import(pool: PgPool, torrent_id: Uuid) -> anyhow::Resul } } - // Clean up: remove source directory if it's a subdirectory of /downloads - let physical_content = remap_downloads_path(&content_path); + // Clean up: remove the sl-{id} category directory and all its contents let downloads_root = remap_downloads_path("/downloads"); - let content_p = std::path::Path::new(&physical_content); + let category_dir = remap_downloads_path(&format!("/downloads/sl-{torrent_id}")); + let category_p = std::path::Path::new(&category_dir); let downloads_p = std::path::Path::new(&downloads_root); - if content_p.is_dir() && content_p != downloads_p && content_p.starts_with(downloads_p) { - match std::fs::remove_dir_all(content_p) { - Ok(()) => info!("[IMPORT] Cleaned up source directory: {}", physical_content), - Err(e) => warn!("[IMPORT] Failed to clean up {}: {}", physical_content, e), + if category_p.is_dir() && category_p != downloads_p && category_p.starts_with(downloads_p) { + match std::fs::remove_dir_all(category_p) { + Ok(()) => info!("[IMPORT] Cleaned up category directory: {}", category_dir), + Err(e) => warn!("[IMPORT] Failed to clean up {}: {}", category_dir, e), } } - // Remove torrent from qBittorrent + // Remove torrent and category from qBittorrent if let Some(ref hash) = qb_hash { if let Ok((base_url, username, password)) = load_qbittorrent_config(&pool).await { if let Ok(client) = reqwest::Client::builder().timeout(Duration::from_secs(10)).build() { @@ -550,6 +551,15 @@ async fn process_torrent_import(pool: PgPool, torrent_id: Uuid) -> anyhow::Resul .send() .await; info!("[IMPORT] Removed torrent {} from qBittorrent", hash); + + // Remove the sl-{id} category + let cat = format!("sl-{torrent_id}"); + let _ = client + .post(format!("{base_url}/api/v2/torrents/removeCategories")) + .header("Cookie", format!("SID={sid}")) + .form(&[("categories", cat.as_str())]) + .send() + .await; } } } @@ -584,6 +594,7 @@ async fn do_import( series_name: &str, expected_volumes: &[i32], content_path: &str, + replace_existing: bool, ) -> anyhow::Result> { let physical_content = remap_downloads_path(content_path); @@ -645,6 +656,7 @@ async fn do_import( info!("[IMPORT] Final reference: {:?}", reference); let mut imported = Vec::new(); + let mut used_destinations: std::collections::HashSet = std::collections::HashSet::new(); for source_path in collect_book_files(&physical_content)? { let filename = std::path::Path::new(&source_path) @@ -656,26 +668,37 @@ async fn do_import( .and_then(|e| e.to_str()) .unwrap_or(""); - let matched: Vec = extract_volumes_from_title_pub(filename) - .into_iter() + let all_extracted = extract_volumes_from_title_pub(filename); + let matched: Vec = all_extracted + .iter() + .copied() .filter(|v| expected_set.contains(v)) .collect(); if matched.is_empty() { + info!("[IMPORT] Skipping '{}' (extracted volumes {:?}, none in expected set)", filename, all_extracted); continue; } let target_filename = if matched.len() == 1 { // Single volume: apply naming pattern from reference let vol = matched[0]; - if let Some((ref ref_path, ref_vol)) = reference { + let generated = if let Some((ref ref_path, ref_vol)) = reference { let built = build_target_filename(ref_path, ref_vol, vol, ext); - info!("[IMPORT] build_target_filename(ref={}, ref_vol={}, new_vol={}, ext={}) => {:?}", - ref_path, ref_vol, vol, ext, built); + info!("[IMPORT] build_target_filename(ref={}, ref_vol={}, new_vol={}, ext={}) => {:?} (source='{}')", + ref_path, ref_vol, vol, ext, built, filename); built.unwrap_or_else(|| default_filename(series_name, vol, ext)) } else { - info!("[IMPORT] No reference, using default_filename for vol {}", vol); + info!("[IMPORT] No reference, using default_filename for vol {} (source='{}')", vol, filename); default_filename(series_name, vol, ext) + }; + + // If this destination was already used in this batch, keep original filename + if used_destinations.contains(&generated) { + info!("[IMPORT] Destination '{}' already used in this batch, keeping original filename '{}'", generated, filename); + filename.to_string() + } else { + generated } } else { // Multi-volume pack: keep original filename (scanner handles ranges) @@ -684,13 +707,14 @@ async fn do_import( let dest = format!("{}/{}", target_dir, target_filename); - if std::path::Path::new(&dest).exists() { - info!("Skipping {} (already exists at destination)", dest); + if std::path::Path::new(&dest).exists() && !replace_existing { + info!("[IMPORT] Skipping '{}' → '{}' (already exists at destination)", filename, dest); continue; } move_file(&source_path, &dest)?; - info!("Imported {:?} → {}", matched, dest); + used_destinations.insert(target_filename); + info!("[IMPORT] Imported '{}' [{:?}] → {}", filename, matched, dest); imported.push(ImportedFile { volume: *matched.iter().min().unwrap(), @@ -699,6 +723,24 @@ async fn do_import( }); } + // Sanity check: warn if many source files collapsed into few volumes + // (symptom of a volume extraction bug) + let source_count = collect_book_files(&physical_content).map(|f| f.len()).unwrap_or(0); + let unique_volumes: std::collections::HashSet = imported.iter().map(|f| f.volume).collect(); + if source_count > 5 && unique_volumes.len() > 0 && source_count > unique_volumes.len() * 3 { + warn!( + "[IMPORT] Suspicious: {} source files mapped to only {} unique volumes ({:?}). \ + Possible volume extraction issue for series '{}'", + source_count, unique_volumes.len(), + { + let mut v: Vec = unique_volumes.into_iter().collect(); + v.sort(); + v + }, + series_name, + ); + } + Ok(imported) } diff --git a/apps/backoffice/app/(app)/downloads/DownloadsPage.tsx b/apps/backoffice/app/(app)/downloads/DownloadsPage.tsx index bf9deb7..4b840ea 100644 --- a/apps/backoffice/app/(app)/downloads/DownloadsPage.tsx +++ b/apps/backoffice/app/(app)/downloads/DownloadsPage.tsx @@ -383,7 +383,7 @@ function AvailableLibraryCard({ lib }: { lib: LatestFoundPerLibraryDto }) { {release.seeders}S )} {(release.size / 1024 / 1024).toFixed(0)} MB -
+
{release.matched_missing_volumes.map(vol => ( T{vol} ))} @@ -398,6 +398,7 @@ function AvailableLibraryCard({ lib }: { lib: LatestFoundPerLibraryDto }) { libraryId={lib.library_id} seriesName={r.series_name} expectedVolumes={release.matched_missing_volumes} + allVolumes={release.all_volumes} />
)} diff --git a/apps/backoffice/app/(app)/jobs/[id]/components/DownloadDetectionCards.tsx b/apps/backoffice/app/(app)/jobs/[id]/components/DownloadDetectionCards.tsx index d62e6ea..a8f717b 100644 --- a/apps/backoffice/app/(app)/jobs/[id]/components/DownloadDetectionCards.tsx +++ b/apps/backoffice/app/(app)/jobs/[id]/components/DownloadDetectionCards.tsx @@ -113,6 +113,7 @@ export function DownloadDetectionResultsCard({ results, libraryId, qbConfigured, libraryId={libraryId ?? undefined} seriesName={r.series_name} expectedVolumes={release.matched_missing_volumes} + allVolumes={release.all_volumes} /> )}
diff --git a/apps/backoffice/app/components/QbittorrentDownloadButton.tsx b/apps/backoffice/app/components/QbittorrentDownloadButton.tsx index 0786809..2553ac0 100644 --- a/apps/backoffice/app/components/QbittorrentDownloadButton.tsx +++ b/apps/backoffice/app/components/QbittorrentDownloadButton.tsx @@ -1,7 +1,8 @@ "use client"; import { useState, useEffect, createContext, useContext, type ReactNode } from "react"; -import { Icon } from "./ui"; +import { createPortal } from "react-dom"; +import { Icon, Button } from "./ui"; import { useTranslation } from "@/lib/i18n/context"; interface QbContextValue { @@ -34,22 +35,28 @@ export function QbittorrentDownloadButton({ libraryId, seriesName, expectedVolumes, + allVolumes, }: { downloadUrl: string; releaseId: string; libraryId?: string; seriesName?: string; expectedVolumes?: number[]; + allVolumes?: number[]; }) { const { t } = useTranslation(); const { configured, onDownloadStarted } = useContext(QbConfigContext); const [sending, setSending] = useState(false); const [sent, setSent] = useState(false); const [error, setError] = useState(null); + const [showConfirm, setShowConfirm] = useState(false); if (!configured) return null; - async function handleSend() { + const hasExistingVolumes = allVolumes && expectedVolumes + && allVolumes.length > expectedVolumes.length; + + async function handleSend(volumes?: number[], replaceExisting = false) { setSending(true); setError(null); try { @@ -60,7 +67,8 @@ export function QbittorrentDownloadButton({ url: downloadUrl, ...(libraryId && { library_id: libraryId }), ...(seriesName && { series_name: seriesName }), - ...(expectedVolumes && { expected_volumes: expectedVolumes }), + ...((volumes || expectedVolumes) && { expected_volumes: volumes || expectedVolumes }), + ...(replaceExisting && { replace_existing: true }), }), }); const data = await resp.json(); @@ -81,28 +89,71 @@ export function QbittorrentDownloadButton({ } return ( - + + {hasExistingVolumes && ( + + )} + + + {showConfirm && createPortal( + <> +
setShowConfirm(false)} /> +
+
+
+

+ {t("prowlarr.replaceAndDownload")} +

+

+ {t("prowlarr.confirmReplace")} +

+
+
+ + +
+
+
+ , + document.body )} - + ); } diff --git a/apps/backoffice/lib/api.ts b/apps/backoffice/lib/api.ts index db87944..007472c 100644 --- a/apps/backoffice/lib/api.ts +++ b/apps/backoffice/lib/api.ts @@ -1162,6 +1162,7 @@ export type AvailableReleaseDto = { indexer: string | null; seeders: number | null; matched_missing_volumes: number[]; + all_volumes: number[]; }; export type DownloadDetectionReportDto = { diff --git a/apps/backoffice/lib/i18n/en.ts b/apps/backoffice/lib/i18n/en.ts index 732c05f..9a95837 100644 --- a/apps/backoffice/lib/i18n/en.ts +++ b/apps/backoffice/lib/i18n/en.ts @@ -645,6 +645,8 @@ const en: Record = { "prowlarr.sending": "Sending...", "prowlarr.sentSuccess": "Sent to qBittorrent", "prowlarr.sentError": "Failed to send to qBittorrent", + "prowlarr.replaceAndDownload": "Download and replace existing", + "prowlarr.confirmReplace": "This will re-download all volumes in the pack, including those already present. Continue?", "prowlarr.missingVol": "Vol. {{vol}} missing", // Settings - qBittorrent diff --git a/apps/backoffice/lib/i18n/fr.ts b/apps/backoffice/lib/i18n/fr.ts index 47bf8e2..a817cd6 100644 --- a/apps/backoffice/lib/i18n/fr.ts +++ b/apps/backoffice/lib/i18n/fr.ts @@ -643,6 +643,8 @@ const fr = { "prowlarr.sending": "Envoi...", "prowlarr.sentSuccess": "Envoyé à qBittorrent", "prowlarr.sentError": "Échec de l'envoi à qBittorrent", + "prowlarr.replaceAndDownload": "Télécharger et remplacer les existants", + "prowlarr.confirmReplace": "Cela va retélécharger tous les volumes du pack, y compris ceux déjà présents. Continuer ?", "prowlarr.missingVol": "T{{vol}} manquant", // Settings - qBittorrent diff --git a/infra/migrations/0068_add_torrent_replace_existing.sql b/infra/migrations/0068_add_torrent_replace_existing.sql new file mode 100644 index 0000000..ff1a02f --- /dev/null +++ b/infra/migrations/0068_add_torrent_replace_existing.sql @@ -0,0 +1,2 @@ +ALTER TABLE torrent_downloads + ADD COLUMN replace_existing BOOLEAN NOT NULL DEFAULT FALSE;