diff --git a/apps/api/src/download_detection.rs b/apps/api/src/download_detection.rs index 106eb98..d988612 100644 --- a/apps/api/src/download_detection.rs +++ b/apps/api/src/download_detection.rs @@ -343,6 +343,97 @@ pub async fn get_detection_results( Ok(Json(results)) } +// --------------------------------------------------------------------------- +// GET /download-detection/latest-found +// --------------------------------------------------------------------------- + +#[derive(Serialize, ToSchema)] +pub struct LatestFoundPerLibraryDto { + #[schema(value_type = String)] + pub library_id: Uuid, + pub library_name: String, + #[schema(value_type = String)] + pub job_id: Uuid, + pub job_date: String, + pub results: Vec, +} + +/// Returns "found" results from the latest detection job per library. +#[utoipa::path( + get, + path = "/download-detection/latest-found", + tag = "download_detection", + responses( + (status = 200, body = Vec), + ), + security(("Bearer" = [])) +)] +pub async fn get_latest_found( + State(state): State, +) -> Result>, ApiError> { + // Get latest completed detection job per library + let jobs = sqlx::query( + "SELECT DISTINCT ON (j.library_id) j.id, j.library_id, j.created_at, l.name as library_name \ + FROM index_jobs j \ + JOIN libraries l ON l.id = j.library_id \ + WHERE j.type = 'download_detection' AND j.status = 'success' \ + ORDER BY j.library_id, j.created_at DESC", + ) + .fetch_all(&state.pool) + .await?; + + let mut output = Vec::new(); + + for job in &jobs { + let job_id: Uuid = job.get("id"); + let library_id: Uuid = job.get("library_id"); + let library_name: String = job.get("library_name"); + let created_at: chrono::DateTime = job.get("created_at"); + + let rows = sqlx::query( + "SELECT id, series_name, status, missing_count, available_releases, error_message \ + FROM download_detection_results \ + WHERE job_id = $1 AND status = 'found' \ + ORDER BY series_name", + ) + .bind(job_id) + .fetch_all(&state.pool) + .await?; + + if rows.is_empty() { + continue; + } + + let results = rows + .iter() + .map(|row| { + let releases_json: Option = row.get("available_releases"); + let available_releases = releases_json.and_then(|v| { + serde_json::from_value::>(v).ok() + }); + DownloadDetectionResultDto { + id: row.get("id"), + series_name: row.get("series_name"), + status: row.get("status"), + missing_count: row.get("missing_count"), + available_releases, + error_message: row.get("error_message"), + } + }) + .collect(); + + output.push(LatestFoundPerLibraryDto { + library_id, + library_name, + job_id, + job_date: created_at.to_rfc3339(), + results, + }); + } + + Ok(Json(output)) +} + // --------------------------------------------------------------------------- // Background processing // --------------------------------------------------------------------------- diff --git a/apps/api/src/main.rs b/apps/api/src/main.rs index 90b7bb7..44b952d 100644 --- a/apps/api/src/main.rs +++ b/apps/api/src/main.rs @@ -158,6 +158,7 @@ async fn main() -> anyhow::Result<()> { .route("/reading-status/push/:id/report", get(reading_status_push::get_push_report)) .route("/reading-status/push/:id/results", get(reading_status_push::get_push_results)) .route("/download-detection/start", axum::routing::post(download_detection::start_detection)) + .route("/download-detection/latest-found", get(download_detection::get_latest_found)) .route("/download-detection/:id/report", get(download_detection::get_detection_report)) .route("/download-detection/:id/results", get(download_detection::get_detection_results)) .merge(settings::settings_routes()) diff --git a/apps/api/src/prowlarr.rs b/apps/api/src/prowlarr.rs index 4daac24..db1832a 100644 --- a/apps/api/src/prowlarr.rs +++ b/apps/api/src/prowlarr.rs @@ -192,9 +192,9 @@ fn extract_volumes_from_title(title: &str) -> Vec { } } - // Skip optional spaces after prefix + // Skip optional spaces or dots after prefix let mut i = after; - while i < len && chars[i] == ' ' { + while i < len && (chars[i] == ' ' || chars[i] == '.') { i += 1; } @@ -256,7 +256,7 @@ fn read_vol_prefix_number(chars: &[char], pos: usize) -> Option<(i32, usize)> { } let mut i = pos + prefix_char_count; - while i < chars.len() && chars[i] == ' ' { + while i < chars.len() && (chars[i] == ' ' || chars[i] == '.') { i += 1; } diff --git a/apps/api/src/torrent_import.rs b/apps/api/src/torrent_import.rs index 2d49bbe..a47d64a 100644 --- a/apps/api/src/torrent_import.rs +++ b/apps/api/src/torrent_import.rs @@ -453,16 +453,19 @@ async fn do_import( ) -> anyhow::Result> { let physical_content = remap_downloads_path(content_path); - // Find the target directory and reference file (latest volume) from existing book_files. + // Find the target directory and reference file from existing book_files. + // Exclude volumes we're about to import so we get a different file as naming reference. let ref_row = sqlx::query( "SELECT bf.abs_path, b.volume \ FROM book_files bf \ JOIN books b ON b.id = bf.book_id \ - WHERE b.library_id = $1 AND b.series = $2 AND b.volume IS NOT NULL \ + WHERE b.library_id = $1 AND LOWER(b.series) = LOWER($2) AND b.volume IS NOT NULL \ + AND b.volume != ALL($3) \ ORDER BY b.volume DESC LIMIT 1", ) .bind(library_id) .bind(series_name) + .bind(expected_volumes) .fetch_optional(pool) .await?; @@ -474,9 +477,11 @@ async fn do_import( .parent() .map(|p| p.to_string_lossy().into_owned()) .unwrap_or(physical); + info!("[IMPORT] DB reference found: {} (volume {}), target_dir={}", abs_path, volume, parent); (parent, Some((abs_path, volume))) } else { - // No existing files: create series directory inside library root + // No existing files in DB: create series directory inside library root + info!("[IMPORT] No DB reference for series '{}' in library {}", series_name, library_id); let lib_row = sqlx::query("SELECT root_path FROM libraries WHERE id = $1") .bind(library_id) .fetch_one(pool) @@ -490,6 +495,21 @@ async fn do_import( std::fs::create_dir_all(&target_dir)?; let expected_set: std::collections::HashSet = expected_volumes.iter().copied().collect(); + + // If DB didn't give us a reference, try to find one from existing files on disk + let reference = if reference.is_some() { + reference + } else { + info!("[IMPORT] Trying disk fallback in {}", target_dir); + let disk_ref = find_reference_from_disk(&target_dir, &expected_set); + if disk_ref.is_none() { + info!("[IMPORT] No disk reference found either, using default naming"); + } + disk_ref + }; + + info!("[IMPORT] Final reference: {:?}", reference); + let mut imported = Vec::new(); for source_path in collect_book_files(&physical_content)? { @@ -515,9 +535,12 @@ async fn do_import( // Single volume: apply naming pattern from reference let vol = matched[0]; if let Some((ref ref_path, ref_vol)) = reference { - build_target_filename(ref_path, ref_vol, vol, ext) - .unwrap_or_else(|| default_filename(series_name, vol, ext)) + let built = build_target_filename(ref_path, ref_vol, vol, ext); + info!("[IMPORT] build_target_filename(ref={}, ref_vol={}, new_vol={}, ext={}) => {:?}", + ref_path, ref_vol, vol, ext, built); + built.unwrap_or_else(|| default_filename(series_name, vol, ext)) } else { + info!("[IMPORT] No reference, using default_filename for vol {}", vol); default_filename(series_name, vol, ext) } } else { @@ -545,6 +568,42 @@ async fn do_import( Ok(imported) } +// ─── Reference from disk ────────────────────────────────────────────────────── + +/// Scan a directory for book files and pick the one with the highest extracted volume +/// as a naming reference, excluding certain volumes. Returns (abs_path, volume). +fn find_reference_from_disk(dir: &str, exclude_volumes: &std::collections::HashSet) -> Option<(String, i32)> { + let extensions = ["cbz", "cbr", "pdf", "epub"]; + let entries = std::fs::read_dir(dir).ok()?; + let mut best: Option<(String, i32)> = None; + + for entry in entries.flatten() { + let path = entry.path(); + if !path.is_file() { + continue; + } + let ext = path.extension().and_then(|e| e.to_str()).unwrap_or(""); + if !extensions.iter().any(|&e| e.eq_ignore_ascii_case(ext)) { + continue; + } + let filename = path.file_name().and_then(|n| n.to_str()).unwrap_or(""); + let volumes = extract_volumes_from_title_pub(filename); + if let Some(&vol) = volumes.iter().max() { + if exclude_volumes.contains(&vol) { + continue; + } + if best.as_ref().map_or(true, |(_, v)| vol > *v) { + best = Some((path.to_string_lossy().into_owned(), vol)); + } + } + } + + if let Some((ref path, vol)) = best { + info!("[IMPORT] Found disk reference: {} (volume {})", path, vol); + } + best +} + // ─── Filesystem helpers ─────────────────────────────────────────────────────── fn collect_book_files(root: &str) -> anyhow::Result> { @@ -670,7 +729,8 @@ fn build_target_filename( let (start, end) = last_match?; let digit_width = end - start; let new_digits = format!("{:0>width$}", new_volume, width = digit_width); - let new_stem = format!("{}{}{}", &stem[..start], new_digits, &stem[end..]); + // Truncate after the volume number (remove suffixes like ".FR-NoFace696") + let new_stem = format!("{}{}", &stem[..start], new_digits); Some(format!("{}.{}", new_stem, target_ext)) } @@ -758,4 +818,15 @@ mod tests { ); assert_eq!(result, Some("Code 451 - T05.cbz".to_string())); } + + #[test] + fn truncates_suffix_after_volume() { + let result = build_target_filename( + "/libraries/manga/Goblin slayer/Goblin.Slayer.Tome.007.FR-NoFace696.cbr", + 7, + 8, + "cbz", + ); + assert_eq!(result, Some("Goblin.Slayer.Tome.008.cbz".to_string())); + } } diff --git a/apps/backoffice/app/(app)/downloads/DownloadsPage.tsx b/apps/backoffice/app/(app)/downloads/DownloadsPage.tsx index f0ba25f..50abc6f 100644 --- a/apps/backoffice/app/(app)/downloads/DownloadsPage.tsx +++ b/apps/backoffice/app/(app)/downloads/DownloadsPage.tsx @@ -2,8 +2,9 @@ import { useState, useEffect, useCallback } from "react"; import { createPortal } from "react-dom"; -import { TorrentDownloadDto } from "@/lib/api"; -import { Card, CardContent, Button, Icon } from "@/app/components/ui"; +import { TorrentDownloadDto, LatestFoundPerLibraryDto } from "@/lib/api"; +import { Card, CardContent, CardHeader, CardTitle, Button, Icon } from "@/app/components/ui"; +import { QbittorrentProvider, QbittorrentDownloadButton } from "@/app/components/QbittorrentDownloadButton"; import { useTranslation } from "@/lib/i18n/context"; import type { TranslationKey } from "@/lib/i18n/fr"; @@ -62,9 +63,10 @@ function formatEta(seconds: number): string { interface DownloadsPageProps { initialDownloads: TorrentDownloadDto[]; + initialLatestFound: LatestFoundPerLibraryDto[]; } -export function DownloadsPage({ initialDownloads }: DownloadsPageProps) { +export function DownloadsPage({ initialDownloads, initialLatestFound }: DownloadsPageProps) { const { t } = useTranslation(); const [downloads, setDownloads] = useState(initialDownloads); const [filter, setFilter] = useState("all"); @@ -154,6 +156,23 @@ export function DownloadsPage({ initialDownloads }: DownloadsPageProps) { ))} )} + + {/* Available downloads from latest detection */} + {initialLatestFound.length > 0 && ( + +
+

+ + {t("downloads.availableTitle")} +

+
+ {initialLatestFound.map(lib => ( + + ))} +
+
+
+ )} ); } @@ -314,3 +333,77 @@ function DownloadCard({ dl, onDeleted }: { dl: TorrentDownloadDto; onDeleted: () ); } + +function AvailableLibraryCard({ lib }: { lib: LatestFoundPerLibraryDto }) { + const { t } = useTranslation(); + const [collapsed, setCollapsed] = useState(true); + const displayResults = collapsed ? lib.results.slice(0, 5) : lib.results; + + return ( + + +
+ {lib.library_name} + + {t("downloads.detectedSeries", { count: lib.results.length })} — {formatDate(lib.job_date)} + +
+
+ + {displayResults.map(r => ( +
+
+ {r.series_name} + + {r.missing_count} {t("downloads.missing")} + +
+ {r.available_releases && r.available_releases.length > 0 && ( +
+ {r.available_releases.map((release, idx) => ( +
+
+

{release.title}

+
+ {release.indexer && {release.indexer}} + {release.seeders != null && ( + {release.seeders} seeders + )} + {(release.size / 1024 / 1024).toFixed(0)} MB +
+ {release.matched_missing_volumes.map(vol => ( + T.{vol} + ))} +
+
+
+ {release.download_url && ( + + )} +
+ ))} +
+ )} +
+ ))} + {lib.results.length > 5 && ( + + )} +
+
+ ); +} diff --git a/apps/backoffice/app/(app)/downloads/page.tsx b/apps/backoffice/app/(app)/downloads/page.tsx index 2b66800..f96bb49 100644 --- a/apps/backoffice/app/(app)/downloads/page.tsx +++ b/apps/backoffice/app/(app)/downloads/page.tsx @@ -1,9 +1,12 @@ -import { fetchTorrentDownloads, TorrentDownloadDto } from "@/lib/api"; +import { fetchTorrentDownloads, TorrentDownloadDto, LatestFoundPerLibraryDto, apiFetch } from "@/lib/api"; import { DownloadsPage } from "./DownloadsPage"; export const dynamic = "force-dynamic"; export default async function Page() { - const downloads = await fetchTorrentDownloads().catch(() => [] as TorrentDownloadDto[]); - return ; + const [downloads, latestFound] = await Promise.all([ + fetchTorrentDownloads().catch(() => [] as TorrentDownloadDto[]), + apiFetch("/download-detection/latest-found").catch(() => [] as LatestFoundPerLibraryDto[]), + ]); + return ; } diff --git a/apps/backoffice/app/api/download-detection/latest-found/route.ts b/apps/backoffice/app/api/download-detection/latest-found/route.ts new file mode 100644 index 0000000..4da643f --- /dev/null +++ b/apps/backoffice/app/api/download-detection/latest-found/route.ts @@ -0,0 +1,11 @@ +import { NextResponse } from "next/server"; +import { apiFetch } from "@/lib/api"; + +export async function GET() { + try { + const data = await apiFetch("/download-detection/latest-found"); + return NextResponse.json(data); + } catch { + return NextResponse.json({ error: "Failed to fetch latest detection results" }, { status: 500 }); + } +} diff --git a/apps/backoffice/lib/api.ts b/apps/backoffice/lib/api.ts index 2ef279b..156db7d 100644 --- a/apps/backoffice/lib/api.ts +++ b/apps/backoffice/lib/api.ts @@ -1184,6 +1184,14 @@ export type DownloadDetectionResultDto = { error_message: string | null; }; +export type LatestFoundPerLibraryDto = { + library_id: string; + library_name: string; + job_id: string; + job_date: string; + results: DownloadDetectionResultDto[]; +}; + export async function getDownloadDetectionReport(jobId: string) { return apiFetch(`/download-detection/${jobId}/report`); } diff --git a/apps/backoffice/lib/i18n/en.ts b/apps/backoffice/lib/i18n/en.ts index b79d46f..bb7c201 100644 --- a/apps/backoffice/lib/i18n/en.ts +++ b/apps/backoffice/lib/i18n/en.ts @@ -902,6 +902,11 @@ const en: Record = { "downloads.cancel": "Cancel download", "downloads.confirmDelete": "Delete this download?", "downloads.confirmCancel": "Cancel this download? The torrent will also be removed from qBittorrent.", + "downloads.availableTitle": "Available for download", + "downloads.detectedSeries": "{{count}} series detected", + "downloads.missing": "missing", + "downloads.showMore": "Show {{count}} more…", + "downloads.showLess": "Show less", // Settings - Torrent Import "settings.torrentImport": "Auto import", diff --git a/apps/backoffice/lib/i18n/fr.ts b/apps/backoffice/lib/i18n/fr.ts index 0b2ad52..8de682d 100644 --- a/apps/backoffice/lib/i18n/fr.ts +++ b/apps/backoffice/lib/i18n/fr.ts @@ -900,6 +900,11 @@ const fr = { "downloads.cancel": "Annuler le téléchargement", "downloads.confirmDelete": "Supprimer ce téléchargement ?", "downloads.confirmCancel": "Annuler ce téléchargement ? Le torrent sera aussi supprimé de qBittorrent.", + "downloads.availableTitle": "Disponibles au téléchargement", + "downloads.detectedSeries": "{{count}} séries détectées", + "downloads.missing": "manquant(s)", + "downloads.showMore": "Voir {{count}} de plus…", + "downloads.showLess": "Réduire", // Settings - Torrent Import "settings.torrentImport": "Import automatique",