feat: section disponibles au téléchargement + fix nommage import
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 43s

- Endpoint GET /download-detection/latest-found : résultats "found" du
  dernier job de détection par bibliothèque
- Section dans la page Téléchargements avec les releases disponibles
  groupées par bibliothèque, bouton qBittorrent intégré
- Fix nommage import : exclut les volumes importés de la recherche de
  référence (évite le cercle vicieux vol 8 → ref vol 8 → même nom)
- Fix extraction volumes : gère "Tome.007" (point après préfixe) en
  plus de "Tome 007" dans extract_volumes_from_title
- Fallback disque pour la référence de nommage quand la DB ne matche pas
- Logging détaillé du processus d'import pour debug

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-26 22:38:31 +01:00
parent 888db484fb
commit 32078c715a
10 changed files with 303 additions and 15 deletions

View File

@@ -343,6 +343,97 @@ pub async fn get_detection_results(
Ok(Json(results)) Ok(Json(results))
} }
// ---------------------------------------------------------------------------
// GET /download-detection/latest-found
// ---------------------------------------------------------------------------
#[derive(Serialize, ToSchema)]
pub struct LatestFoundPerLibraryDto {
#[schema(value_type = String)]
pub library_id: Uuid,
pub library_name: String,
#[schema(value_type = String)]
pub job_id: Uuid,
pub job_date: String,
pub results: Vec<DownloadDetectionResultDto>,
}
/// Returns "found" results from the latest detection job per library.
#[utoipa::path(
get,
path = "/download-detection/latest-found",
tag = "download_detection",
responses(
(status = 200, body = Vec<LatestFoundPerLibraryDto>),
),
security(("Bearer" = []))
)]
pub async fn get_latest_found(
State(state): State<AppState>,
) -> Result<Json<Vec<LatestFoundPerLibraryDto>>, ApiError> {
// Get latest completed detection job per library
let jobs = sqlx::query(
"SELECT DISTINCT ON (j.library_id) j.id, j.library_id, j.created_at, l.name as library_name \
FROM index_jobs j \
JOIN libraries l ON l.id = j.library_id \
WHERE j.type = 'download_detection' AND j.status = 'success' \
ORDER BY j.library_id, j.created_at DESC",
)
.fetch_all(&state.pool)
.await?;
let mut output = Vec::new();
for job in &jobs {
let job_id: Uuid = job.get("id");
let library_id: Uuid = job.get("library_id");
let library_name: String = job.get("library_name");
let created_at: chrono::DateTime<chrono::Utc> = job.get("created_at");
let rows = sqlx::query(
"SELECT id, series_name, status, missing_count, available_releases, error_message \
FROM download_detection_results \
WHERE job_id = $1 AND status = 'found' \
ORDER BY series_name",
)
.bind(job_id)
.fetch_all(&state.pool)
.await?;
if rows.is_empty() {
continue;
}
let results = rows
.iter()
.map(|row| {
let releases_json: Option<serde_json::Value> = row.get("available_releases");
let available_releases = releases_json.and_then(|v| {
serde_json::from_value::<Vec<AvailableReleaseDto>>(v).ok()
});
DownloadDetectionResultDto {
id: row.get("id"),
series_name: row.get("series_name"),
status: row.get("status"),
missing_count: row.get("missing_count"),
available_releases,
error_message: row.get("error_message"),
}
})
.collect();
output.push(LatestFoundPerLibraryDto {
library_id,
library_name,
job_id,
job_date: created_at.to_rfc3339(),
results,
});
}
Ok(Json(output))
}
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Background processing // Background processing
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------

View File

@@ -158,6 +158,7 @@ async fn main() -> anyhow::Result<()> {
.route("/reading-status/push/:id/report", get(reading_status_push::get_push_report)) .route("/reading-status/push/:id/report", get(reading_status_push::get_push_report))
.route("/reading-status/push/:id/results", get(reading_status_push::get_push_results)) .route("/reading-status/push/:id/results", get(reading_status_push::get_push_results))
.route("/download-detection/start", axum::routing::post(download_detection::start_detection)) .route("/download-detection/start", axum::routing::post(download_detection::start_detection))
.route("/download-detection/latest-found", get(download_detection::get_latest_found))
.route("/download-detection/:id/report", get(download_detection::get_detection_report)) .route("/download-detection/:id/report", get(download_detection::get_detection_report))
.route("/download-detection/:id/results", get(download_detection::get_detection_results)) .route("/download-detection/:id/results", get(download_detection::get_detection_results))
.merge(settings::settings_routes()) .merge(settings::settings_routes())

View File

@@ -192,9 +192,9 @@ fn extract_volumes_from_title(title: &str) -> Vec<i32> {
} }
} }
// Skip optional spaces after prefix // Skip optional spaces or dots after prefix
let mut i = after; let mut i = after;
while i < len && chars[i] == ' ' { while i < len && (chars[i] == ' ' || chars[i] == '.') {
i += 1; i += 1;
} }
@@ -256,7 +256,7 @@ fn read_vol_prefix_number(chars: &[char], pos: usize) -> Option<(i32, usize)> {
} }
let mut i = pos + prefix_char_count; let mut i = pos + prefix_char_count;
while i < chars.len() && chars[i] == ' ' { while i < chars.len() && (chars[i] == ' ' || chars[i] == '.') {
i += 1; i += 1;
} }

View File

@@ -453,16 +453,19 @@ async fn do_import(
) -> anyhow::Result<Vec<ImportedFile>> { ) -> anyhow::Result<Vec<ImportedFile>> {
let physical_content = remap_downloads_path(content_path); let physical_content = remap_downloads_path(content_path);
// Find the target directory and reference file (latest volume) from existing book_files. // Find the target directory and reference file from existing book_files.
// Exclude volumes we're about to import so we get a different file as naming reference.
let ref_row = sqlx::query( let ref_row = sqlx::query(
"SELECT bf.abs_path, b.volume \ "SELECT bf.abs_path, b.volume \
FROM book_files bf \ FROM book_files bf \
JOIN books b ON b.id = bf.book_id \ JOIN books b ON b.id = bf.book_id \
WHERE b.library_id = $1 AND b.series = $2 AND b.volume IS NOT NULL \ WHERE b.library_id = $1 AND LOWER(b.series) = LOWER($2) AND b.volume IS NOT NULL \
AND b.volume != ALL($3) \
ORDER BY b.volume DESC LIMIT 1", ORDER BY b.volume DESC LIMIT 1",
) )
.bind(library_id) .bind(library_id)
.bind(series_name) .bind(series_name)
.bind(expected_volumes)
.fetch_optional(pool) .fetch_optional(pool)
.await?; .await?;
@@ -474,9 +477,11 @@ async fn do_import(
.parent() .parent()
.map(|p| p.to_string_lossy().into_owned()) .map(|p| p.to_string_lossy().into_owned())
.unwrap_or(physical); .unwrap_or(physical);
info!("[IMPORT] DB reference found: {} (volume {}), target_dir={}", abs_path, volume, parent);
(parent, Some((abs_path, volume))) (parent, Some((abs_path, volume)))
} else { } else {
// No existing files: create series directory inside library root // No existing files in DB: create series directory inside library root
info!("[IMPORT] No DB reference for series '{}' in library {}", series_name, library_id);
let lib_row = sqlx::query("SELECT root_path FROM libraries WHERE id = $1") let lib_row = sqlx::query("SELECT root_path FROM libraries WHERE id = $1")
.bind(library_id) .bind(library_id)
.fetch_one(pool) .fetch_one(pool)
@@ -490,6 +495,21 @@ async fn do_import(
std::fs::create_dir_all(&target_dir)?; std::fs::create_dir_all(&target_dir)?;
let expected_set: std::collections::HashSet<i32> = expected_volumes.iter().copied().collect(); let expected_set: std::collections::HashSet<i32> = expected_volumes.iter().copied().collect();
// If DB didn't give us a reference, try to find one from existing files on disk
let reference = if reference.is_some() {
reference
} else {
info!("[IMPORT] Trying disk fallback in {}", target_dir);
let disk_ref = find_reference_from_disk(&target_dir, &expected_set);
if disk_ref.is_none() {
info!("[IMPORT] No disk reference found either, using default naming");
}
disk_ref
};
info!("[IMPORT] Final reference: {:?}", reference);
let mut imported = Vec::new(); let mut imported = Vec::new();
for source_path in collect_book_files(&physical_content)? { for source_path in collect_book_files(&physical_content)? {
@@ -515,9 +535,12 @@ async fn do_import(
// Single volume: apply naming pattern from reference // Single volume: apply naming pattern from reference
let vol = matched[0]; let vol = matched[0];
if let Some((ref ref_path, ref_vol)) = reference { if let Some((ref ref_path, ref_vol)) = reference {
build_target_filename(ref_path, ref_vol, vol, ext) let built = build_target_filename(ref_path, ref_vol, vol, ext);
.unwrap_or_else(|| default_filename(series_name, vol, ext)) info!("[IMPORT] build_target_filename(ref={}, ref_vol={}, new_vol={}, ext={}) => {:?}",
ref_path, ref_vol, vol, ext, built);
built.unwrap_or_else(|| default_filename(series_name, vol, ext))
} else { } else {
info!("[IMPORT] No reference, using default_filename for vol {}", vol);
default_filename(series_name, vol, ext) default_filename(series_name, vol, ext)
} }
} else { } else {
@@ -545,6 +568,42 @@ async fn do_import(
Ok(imported) Ok(imported)
} }
// ─── Reference from disk ──────────────────────────────────────────────────────
/// Scan a directory for book files and pick the one with the highest extracted volume
/// as a naming reference, excluding certain volumes. Returns (abs_path, volume).
fn find_reference_from_disk(dir: &str, exclude_volumes: &std::collections::HashSet<i32>) -> Option<(String, i32)> {
let extensions = ["cbz", "cbr", "pdf", "epub"];
let entries = std::fs::read_dir(dir).ok()?;
let mut best: Option<(String, i32)> = None;
for entry in entries.flatten() {
let path = entry.path();
if !path.is_file() {
continue;
}
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
if !extensions.iter().any(|&e| e.eq_ignore_ascii_case(ext)) {
continue;
}
let filename = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
let volumes = extract_volumes_from_title_pub(filename);
if let Some(&vol) = volumes.iter().max() {
if exclude_volumes.contains(&vol) {
continue;
}
if best.as_ref().map_or(true, |(_, v)| vol > *v) {
best = Some((path.to_string_lossy().into_owned(), vol));
}
}
}
if let Some((ref path, vol)) = best {
info!("[IMPORT] Found disk reference: {} (volume {})", path, vol);
}
best
}
// ─── Filesystem helpers ─────────────────────────────────────────────────────── // ─── Filesystem helpers ───────────────────────────────────────────────────────
fn collect_book_files(root: &str) -> anyhow::Result<Vec<String>> { fn collect_book_files(root: &str) -> anyhow::Result<Vec<String>> {
@@ -670,7 +729,8 @@ fn build_target_filename(
let (start, end) = last_match?; let (start, end) = last_match?;
let digit_width = end - start; let digit_width = end - start;
let new_digits = format!("{:0>width$}", new_volume, width = digit_width); let new_digits = format!("{:0>width$}", new_volume, width = digit_width);
let new_stem = format!("{}{}{}", &stem[..start], new_digits, &stem[end..]); // Truncate after the volume number (remove suffixes like ".FR-NoFace696")
let new_stem = format!("{}{}", &stem[..start], new_digits);
Some(format!("{}.{}", new_stem, target_ext)) Some(format!("{}.{}", new_stem, target_ext))
} }
@@ -758,4 +818,15 @@ mod tests {
); );
assert_eq!(result, Some("Code 451 - T05.cbz".to_string())); assert_eq!(result, Some("Code 451 - T05.cbz".to_string()));
} }
#[test]
fn truncates_suffix_after_volume() {
let result = build_target_filename(
"/libraries/manga/Goblin slayer/Goblin.Slayer.Tome.007.FR-NoFace696.cbr",
7,
8,
"cbz",
);
assert_eq!(result, Some("Goblin.Slayer.Tome.008.cbz".to_string()));
}
} }

View File

@@ -2,8 +2,9 @@
import { useState, useEffect, useCallback } from "react"; import { useState, useEffect, useCallback } from "react";
import { createPortal } from "react-dom"; import { createPortal } from "react-dom";
import { TorrentDownloadDto } from "@/lib/api"; import { TorrentDownloadDto, LatestFoundPerLibraryDto } from "@/lib/api";
import { Card, CardContent, Button, Icon } from "@/app/components/ui"; import { Card, CardContent, CardHeader, CardTitle, Button, Icon } from "@/app/components/ui";
import { QbittorrentProvider, QbittorrentDownloadButton } from "@/app/components/QbittorrentDownloadButton";
import { useTranslation } from "@/lib/i18n/context"; import { useTranslation } from "@/lib/i18n/context";
import type { TranslationKey } from "@/lib/i18n/fr"; import type { TranslationKey } from "@/lib/i18n/fr";
@@ -62,9 +63,10 @@ function formatEta(seconds: number): string {
interface DownloadsPageProps { interface DownloadsPageProps {
initialDownloads: TorrentDownloadDto[]; initialDownloads: TorrentDownloadDto[];
initialLatestFound: LatestFoundPerLibraryDto[];
} }
export function DownloadsPage({ initialDownloads }: DownloadsPageProps) { export function DownloadsPage({ initialDownloads, initialLatestFound }: DownloadsPageProps) {
const { t } = useTranslation(); const { t } = useTranslation();
const [downloads, setDownloads] = useState<TorrentDownloadDto[]>(initialDownloads); const [downloads, setDownloads] = useState<TorrentDownloadDto[]>(initialDownloads);
const [filter, setFilter] = useState<string>("all"); const [filter, setFilter] = useState<string>("all");
@@ -154,6 +156,23 @@ export function DownloadsPage({ initialDownloads }: DownloadsPageProps) {
))} ))}
</div> </div>
)} )}
{/* Available downloads from latest detection */}
{initialLatestFound.length > 0 && (
<QbittorrentProvider>
<div className="mt-10">
<h2 className="text-xl font-bold text-foreground mb-4 flex items-center gap-2">
<Icon name="search" size="lg" />
{t("downloads.availableTitle")}
</h2>
<div className="space-y-6">
{initialLatestFound.map(lib => (
<AvailableLibraryCard key={lib.library_id} lib={lib} />
))}
</div>
</div>
</QbittorrentProvider>
)}
</> </>
); );
} }
@@ -314,3 +333,77 @@ function DownloadCard({ dl, onDeleted }: { dl: TorrentDownloadDto; onDeleted: ()
</Card> </Card>
); );
} }
function AvailableLibraryCard({ lib }: { lib: LatestFoundPerLibraryDto }) {
const { t } = useTranslation();
const [collapsed, setCollapsed] = useState(true);
const displayResults = collapsed ? lib.results.slice(0, 5) : lib.results;
return (
<Card>
<CardHeader className="pb-3">
<div className="flex items-center justify-between">
<CardTitle className="text-base">{lib.library_name}</CardTitle>
<span className="text-xs text-muted-foreground">
{t("downloads.detectedSeries", { count: lib.results.length })} {formatDate(lib.job_date)}
</span>
</div>
</CardHeader>
<CardContent className="space-y-2">
{displayResults.map(r => (
<div key={r.id} className="rounded-lg border border-border/40 bg-background/60 p-3">
<div className="flex items-center justify-between gap-2 mb-1.5">
<span className="font-semibold text-sm text-foreground truncate">{r.series_name}</span>
<span className="text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap bg-warning/20 text-warning shrink-0">
{r.missing_count} {t("downloads.missing")}
</span>
</div>
{r.available_releases && r.available_releases.length > 0 && (
<div className="space-y-1">
{r.available_releases.map((release, idx) => (
<div key={idx} className="flex items-center gap-2 py-1 pl-2 rounded bg-muted/30">
<div className="flex-1 min-w-0">
<p className="text-xs font-mono text-foreground truncate" title={release.title}>{release.title}</p>
<div className="flex items-center gap-3 mt-0.5 flex-wrap">
{release.indexer && <span className="text-[10px] text-muted-foreground">{release.indexer}</span>}
{release.seeders != null && (
<span className="text-[10px] text-success font-medium">{release.seeders} seeders</span>
)}
<span className="text-[10px] text-muted-foreground">{(release.size / 1024 / 1024).toFixed(0)} MB</span>
<div className="flex items-center gap-1">
{release.matched_missing_volumes.map(vol => (
<span key={vol} className="text-[10px] px-1.5 py-0.5 rounded-full bg-success/20 text-success font-medium">T.{vol}</span>
))}
</div>
</div>
</div>
{release.download_url && (
<QbittorrentDownloadButton
downloadUrl={release.download_url}
releaseId={`${r.id}-${idx}`}
libraryId={lib.library_id}
seriesName={r.series_name}
expectedVolumes={release.matched_missing_volumes}
/>
)}
</div>
))}
</div>
)}
</div>
))}
{lib.results.length > 5 && (
<button
type="button"
onClick={() => setCollapsed(c => !c)}
className="text-xs text-primary hover:underline w-full text-center py-1"
>
{collapsed
? t("downloads.showMore", { count: lib.results.length - 5 })
: t("downloads.showLess")}
</button>
)}
</CardContent>
</Card>
);
}

View File

@@ -1,9 +1,12 @@
import { fetchTorrentDownloads, TorrentDownloadDto } from "@/lib/api"; import { fetchTorrentDownloads, TorrentDownloadDto, LatestFoundPerLibraryDto, apiFetch } from "@/lib/api";
import { DownloadsPage } from "./DownloadsPage"; import { DownloadsPage } from "./DownloadsPage";
export const dynamic = "force-dynamic"; export const dynamic = "force-dynamic";
export default async function Page() { export default async function Page() {
const downloads = await fetchTorrentDownloads().catch(() => [] as TorrentDownloadDto[]); const [downloads, latestFound] = await Promise.all([
return <DownloadsPage initialDownloads={downloads} />; fetchTorrentDownloads().catch(() => [] as TorrentDownloadDto[]),
apiFetch<LatestFoundPerLibraryDto[]>("/download-detection/latest-found").catch(() => [] as LatestFoundPerLibraryDto[]),
]);
return <DownloadsPage initialDownloads={downloads} initialLatestFound={latestFound} />;
} }

View File

@@ -0,0 +1,11 @@
import { NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function GET() {
try {
const data = await apiFetch("/download-detection/latest-found");
return NextResponse.json(data);
} catch {
return NextResponse.json({ error: "Failed to fetch latest detection results" }, { status: 500 });
}
}

View File

@@ -1184,6 +1184,14 @@ export type DownloadDetectionResultDto = {
error_message: string | null; error_message: string | null;
}; };
export type LatestFoundPerLibraryDto = {
library_id: string;
library_name: string;
job_id: string;
job_date: string;
results: DownloadDetectionResultDto[];
};
export async function getDownloadDetectionReport(jobId: string) { export async function getDownloadDetectionReport(jobId: string) {
return apiFetch<DownloadDetectionReportDto>(`/download-detection/${jobId}/report`); return apiFetch<DownloadDetectionReportDto>(`/download-detection/${jobId}/report`);
} }

View File

@@ -902,6 +902,11 @@ const en: Record<TranslationKey, string> = {
"downloads.cancel": "Cancel download", "downloads.cancel": "Cancel download",
"downloads.confirmDelete": "Delete this download?", "downloads.confirmDelete": "Delete this download?",
"downloads.confirmCancel": "Cancel this download? The torrent will also be removed from qBittorrent.", "downloads.confirmCancel": "Cancel this download? The torrent will also be removed from qBittorrent.",
"downloads.availableTitle": "Available for download",
"downloads.detectedSeries": "{{count}} series detected",
"downloads.missing": "missing",
"downloads.showMore": "Show {{count}} more…",
"downloads.showLess": "Show less",
// Settings - Torrent Import // Settings - Torrent Import
"settings.torrentImport": "Auto import", "settings.torrentImport": "Auto import",

View File

@@ -900,6 +900,11 @@ const fr = {
"downloads.cancel": "Annuler le téléchargement", "downloads.cancel": "Annuler le téléchargement",
"downloads.confirmDelete": "Supprimer ce téléchargement ?", "downloads.confirmDelete": "Supprimer ce téléchargement ?",
"downloads.confirmCancel": "Annuler ce téléchargement ? Le torrent sera aussi supprimé de qBittorrent.", "downloads.confirmCancel": "Annuler ce téléchargement ? Le torrent sera aussi supprimé de qBittorrent.",
"downloads.availableTitle": "Disponibles au téléchargement",
"downloads.detectedSeries": "{{count}} séries détectées",
"downloads.missing": "manquant(s)",
"downloads.showMore": "Voir {{count}} de plus…",
"downloads.showLess": "Réduire",
// Settings - Torrent Import // Settings - Torrent Import
"settings.torrentImport": "Import automatique", "settings.torrentImport": "Import automatique",