feat: section disponibles au téléchargement + fix nommage import
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 43s

- Endpoint GET /download-detection/latest-found : résultats "found" du
  dernier job de détection par bibliothèque
- Section dans la page Téléchargements avec les releases disponibles
  groupées par bibliothèque, bouton qBittorrent intégré
- Fix nommage import : exclut les volumes importés de la recherche de
  référence (évite le cercle vicieux vol 8 → ref vol 8 → même nom)
- Fix extraction volumes : gère "Tome.007" (point après préfixe) en
  plus de "Tome 007" dans extract_volumes_from_title
- Fallback disque pour la référence de nommage quand la DB ne matche pas
- Logging détaillé du processus d'import pour debug

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-26 22:38:31 +01:00
parent 888db484fb
commit 32078c715a
10 changed files with 303 additions and 15 deletions

View File

@@ -343,6 +343,97 @@ pub async fn get_detection_results(
Ok(Json(results))
}
// ---------------------------------------------------------------------------
// GET /download-detection/latest-found
// ---------------------------------------------------------------------------
#[derive(Serialize, ToSchema)]
pub struct LatestFoundPerLibraryDto {
#[schema(value_type = String)]
pub library_id: Uuid,
pub library_name: String,
#[schema(value_type = String)]
pub job_id: Uuid,
pub job_date: String,
pub results: Vec<DownloadDetectionResultDto>,
}
/// Returns "found" results from the latest detection job per library.
#[utoipa::path(
get,
path = "/download-detection/latest-found",
tag = "download_detection",
responses(
(status = 200, body = Vec<LatestFoundPerLibraryDto>),
),
security(("Bearer" = []))
)]
pub async fn get_latest_found(
State(state): State<AppState>,
) -> Result<Json<Vec<LatestFoundPerLibraryDto>>, ApiError> {
// Get latest completed detection job per library
let jobs = sqlx::query(
"SELECT DISTINCT ON (j.library_id) j.id, j.library_id, j.created_at, l.name as library_name \
FROM index_jobs j \
JOIN libraries l ON l.id = j.library_id \
WHERE j.type = 'download_detection' AND j.status = 'success' \
ORDER BY j.library_id, j.created_at DESC",
)
.fetch_all(&state.pool)
.await?;
let mut output = Vec::new();
for job in &jobs {
let job_id: Uuid = job.get("id");
let library_id: Uuid = job.get("library_id");
let library_name: String = job.get("library_name");
let created_at: chrono::DateTime<chrono::Utc> = job.get("created_at");
let rows = sqlx::query(
"SELECT id, series_name, status, missing_count, available_releases, error_message \
FROM download_detection_results \
WHERE job_id = $1 AND status = 'found' \
ORDER BY series_name",
)
.bind(job_id)
.fetch_all(&state.pool)
.await?;
if rows.is_empty() {
continue;
}
let results = rows
.iter()
.map(|row| {
let releases_json: Option<serde_json::Value> = row.get("available_releases");
let available_releases = releases_json.and_then(|v| {
serde_json::from_value::<Vec<AvailableReleaseDto>>(v).ok()
});
DownloadDetectionResultDto {
id: row.get("id"),
series_name: row.get("series_name"),
status: row.get("status"),
missing_count: row.get("missing_count"),
available_releases,
error_message: row.get("error_message"),
}
})
.collect();
output.push(LatestFoundPerLibraryDto {
library_id,
library_name,
job_id,
job_date: created_at.to_rfc3339(),
results,
});
}
Ok(Json(output))
}
// ---------------------------------------------------------------------------
// Background processing
// ---------------------------------------------------------------------------

View File

@@ -158,6 +158,7 @@ async fn main() -> anyhow::Result<()> {
.route("/reading-status/push/:id/report", get(reading_status_push::get_push_report))
.route("/reading-status/push/:id/results", get(reading_status_push::get_push_results))
.route("/download-detection/start", axum::routing::post(download_detection::start_detection))
.route("/download-detection/latest-found", get(download_detection::get_latest_found))
.route("/download-detection/:id/report", get(download_detection::get_detection_report))
.route("/download-detection/:id/results", get(download_detection::get_detection_results))
.merge(settings::settings_routes())

View File

@@ -192,9 +192,9 @@ fn extract_volumes_from_title(title: &str) -> Vec<i32> {
}
}
// Skip optional spaces after prefix
// Skip optional spaces or dots after prefix
let mut i = after;
while i < len && chars[i] == ' ' {
while i < len && (chars[i] == ' ' || chars[i] == '.') {
i += 1;
}
@@ -256,7 +256,7 @@ fn read_vol_prefix_number(chars: &[char], pos: usize) -> Option<(i32, usize)> {
}
let mut i = pos + prefix_char_count;
while i < chars.len() && chars[i] == ' ' {
while i < chars.len() && (chars[i] == ' ' || chars[i] == '.') {
i += 1;
}

View File

@@ -453,16 +453,19 @@ async fn do_import(
) -> anyhow::Result<Vec<ImportedFile>> {
let physical_content = remap_downloads_path(content_path);
// Find the target directory and reference file (latest volume) from existing book_files.
// Find the target directory and reference file from existing book_files.
// Exclude volumes we're about to import so we get a different file as naming reference.
let ref_row = sqlx::query(
"SELECT bf.abs_path, b.volume \
FROM book_files bf \
JOIN books b ON b.id = bf.book_id \
WHERE b.library_id = $1 AND b.series = $2 AND b.volume IS NOT NULL \
WHERE b.library_id = $1 AND LOWER(b.series) = LOWER($2) AND b.volume IS NOT NULL \
AND b.volume != ALL($3) \
ORDER BY b.volume DESC LIMIT 1",
)
.bind(library_id)
.bind(series_name)
.bind(expected_volumes)
.fetch_optional(pool)
.await?;
@@ -474,9 +477,11 @@ async fn do_import(
.parent()
.map(|p| p.to_string_lossy().into_owned())
.unwrap_or(physical);
info!("[IMPORT] DB reference found: {} (volume {}), target_dir={}", abs_path, volume, parent);
(parent, Some((abs_path, volume)))
} else {
// No existing files: create series directory inside library root
// No existing files in DB: create series directory inside library root
info!("[IMPORT] No DB reference for series '{}' in library {}", series_name, library_id);
let lib_row = sqlx::query("SELECT root_path FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_one(pool)
@@ -490,6 +495,21 @@ async fn do_import(
std::fs::create_dir_all(&target_dir)?;
let expected_set: std::collections::HashSet<i32> = expected_volumes.iter().copied().collect();
// If DB didn't give us a reference, try to find one from existing files on disk
let reference = if reference.is_some() {
reference
} else {
info!("[IMPORT] Trying disk fallback in {}", target_dir);
let disk_ref = find_reference_from_disk(&target_dir, &expected_set);
if disk_ref.is_none() {
info!("[IMPORT] No disk reference found either, using default naming");
}
disk_ref
};
info!("[IMPORT] Final reference: {:?}", reference);
let mut imported = Vec::new();
for source_path in collect_book_files(&physical_content)? {
@@ -515,9 +535,12 @@ async fn do_import(
// Single volume: apply naming pattern from reference
let vol = matched[0];
if let Some((ref ref_path, ref_vol)) = reference {
build_target_filename(ref_path, ref_vol, vol, ext)
.unwrap_or_else(|| default_filename(series_name, vol, ext))
let built = build_target_filename(ref_path, ref_vol, vol, ext);
info!("[IMPORT] build_target_filename(ref={}, ref_vol={}, new_vol={}, ext={}) => {:?}",
ref_path, ref_vol, vol, ext, built);
built.unwrap_or_else(|| default_filename(series_name, vol, ext))
} else {
info!("[IMPORT] No reference, using default_filename for vol {}", vol);
default_filename(series_name, vol, ext)
}
} else {
@@ -545,6 +568,42 @@ async fn do_import(
Ok(imported)
}
// ─── Reference from disk ──────────────────────────────────────────────────────
/// Scan a directory for book files and pick the one with the highest extracted volume
/// as a naming reference, excluding certain volumes. Returns (abs_path, volume).
fn find_reference_from_disk(dir: &str, exclude_volumes: &std::collections::HashSet<i32>) -> Option<(String, i32)> {
let extensions = ["cbz", "cbr", "pdf", "epub"];
let entries = std::fs::read_dir(dir).ok()?;
let mut best: Option<(String, i32)> = None;
for entry in entries.flatten() {
let path = entry.path();
if !path.is_file() {
continue;
}
let ext = path.extension().and_then(|e| e.to_str()).unwrap_or("");
if !extensions.iter().any(|&e| e.eq_ignore_ascii_case(ext)) {
continue;
}
let filename = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
let volumes = extract_volumes_from_title_pub(filename);
if let Some(&vol) = volumes.iter().max() {
if exclude_volumes.contains(&vol) {
continue;
}
if best.as_ref().map_or(true, |(_, v)| vol > *v) {
best = Some((path.to_string_lossy().into_owned(), vol));
}
}
}
if let Some((ref path, vol)) = best {
info!("[IMPORT] Found disk reference: {} (volume {})", path, vol);
}
best
}
// ─── Filesystem helpers ───────────────────────────────────────────────────────
fn collect_book_files(root: &str) -> anyhow::Result<Vec<String>> {
@@ -670,7 +729,8 @@ fn build_target_filename(
let (start, end) = last_match?;
let digit_width = end - start;
let new_digits = format!("{:0>width$}", new_volume, width = digit_width);
let new_stem = format!("{}{}{}", &stem[..start], new_digits, &stem[end..]);
// Truncate after the volume number (remove suffixes like ".FR-NoFace696")
let new_stem = format!("{}{}", &stem[..start], new_digits);
Some(format!("{}.{}", new_stem, target_ext))
}
@@ -758,4 +818,15 @@ mod tests {
);
assert_eq!(result, Some("Code 451 - T05.cbz".to_string()));
}
#[test]
fn truncates_suffix_after_volume() {
let result = build_target_filename(
"/libraries/manga/Goblin slayer/Goblin.Slayer.Tome.007.FR-NoFace696.cbr",
7,
8,
"cbz",
);
assert_eq!(result, Some("Goblin.Slayer.Tome.008.cbz".to_string()));
}
}