fix: import torrent — reconnaître les volumes nus (- 07 -, 06.) et gérer les torrents stalledDL

- Ajout Pass 3 dans extract_volumes_from_title pour les patterns "Nom - 07 - Titre.cbz"
  et "06. nom.cbz" (nombre nu entre tirets ou en début de nom)
- Gestion des états qBittorrent stalledDL/pausedDL/error/missingFiles → marqués en erreur
  en DB et supprimés de qBittorrent
- Ajout de logs de debug pour l'import (fichiers trouvés, volumes extraits, dedup)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-29 11:33:03 +02:00
parent b8ed77f3f2
commit 776ef679c2
2 changed files with 143 additions and 0 deletions

View File

@@ -239,6 +239,69 @@ fn extract_volumes_from_title(title: &str) -> Vec<i32> {
} }
} }
// Pass 3 — bare number patterns (only if passes 1 & 2 found nothing)
// Handles:
// "Les Géants - 07 - Moon.cbz" → 7
// "06. yatho.cbz" → 6
if volumes.is_empty() {
// Pattern A: " - NN - " or " - NN." (number between dash separators)
let dash_num_re = |chars: &[char]| -> Vec<i32> {
let mut found = Vec::new();
let mut i = 0;
while i + 4 < chars.len() {
// Look for " - "
if chars[i] == ' ' && chars[i + 1] == '-' && chars[i + 2] == ' ' {
let mut j = i + 3;
// Skip leading spaces
while j < chars.len() && chars[j] == ' ' {
j += 1;
}
let digit_start = j;
while j < chars.len() && chars[j].is_ascii_digit() {
j += 1;
}
if j > digit_start {
// Ensure followed by " - ", ".", or end-ish (space + non-digit)
let valid_end = j >= chars.len()
|| (j + 2 < chars.len() && chars[j] == ' ' && chars[j + 1] == '-' && chars[j + 2] == ' ')
|| chars[j] == '.'
|| (chars[j] == ' ' && (j + 1 >= chars.len() || !chars[j + 1].is_ascii_digit()));
if valid_end {
let num_str: String = chars[digit_start..j].iter().collect();
if let Ok(num) = num_str.parse::<i32>() {
if !found.contains(&num) {
found.push(num);
}
}
}
}
}
i += 1;
}
found
};
volumes.extend(dash_num_re(&chars));
// Pattern B: "NN. " or "NN - " at the very start of the string
if volumes.is_empty() {
let mut j = 0;
while j < chars.len() && chars[j].is_ascii_digit() {
j += 1;
}
if j > 0 && j < chars.len() {
let valid_sep = chars[j] == '.'
|| chars[j] == ' '
|| (j + 2 < chars.len() && chars[j] == ' ' && chars[j + 1] == '-');
if valid_sep {
let num_str: String = chars[..j].iter().collect();
if let Ok(num) = num_str.parse::<i32>() {
volumes.push(num);
}
}
}
}
}
volumes volumes
} }
@@ -615,4 +678,39 @@ mod tests {
)); ));
assert!(v.contains(&3), "expected 3 in {:?}", v); assert!(v.contains(&3), "expected 3 in {:?}", v);
} }
#[test]
fn bare_number_between_dashes() {
// "Les Géants - 07 - Moon.cbz" → 7
let v = extract_volumes_from_title("Les Géants - 07 - Moon.cbz");
assert_eq!(v, vec![7]);
}
#[test]
fn bare_number_dash_then_dot() {
// "Les Géants - 07.cbz" → 7
let v = extract_volumes_from_title("Les Géants - 07.cbz");
assert_eq!(v, vec![7]);
}
#[test]
fn bare_number_at_start_dot() {
// "06. yatho.cbz" → 6
let v = extract_volumes_from_title("06. yatho.cbz");
assert_eq!(v, vec![6]);
}
#[test]
fn bare_number_at_start_dash() {
// "07 - Moon.cbz" → 7
let v = extract_volumes_from_title("07 - Moon.cbz");
assert_eq!(v, vec![7]);
}
#[test]
fn bare_number_no_false_positive_with_prefix() {
// When a prefix match exists, bare number pass should NOT run
let v = extract_volumes_from_title("Naruto T05 - some 99 extra.cbz");
assert_eq!(v, vec![5], "should only find T05, not bare 99");
}
} }

View File

@@ -215,6 +215,11 @@ const QB_COMPLETED_STATES: &[&str] = &[
"uploading", "stalledUP", "pausedUP", "queuedUP", "checkingUP", "forcedUP", "uploading", "stalledUP", "pausedUP", "queuedUP", "checkingUP", "forcedUP",
]; ];
/// Failed/stalled states: torrent cannot make progress (no seeds, stalled download).
const QB_FAILED_STATES: &[&str] = &[
"stalledDL", "pausedDL", "error", "missingFiles",
];
pub async fn run_torrent_poller(pool: PgPool, interval_seconds: u64) { pub async fn run_torrent_poller(pool: PgPool, interval_seconds: u64) {
let idle_wait = Duration::from_secs(interval_seconds.max(5)); let idle_wait = Duration::from_secs(interval_seconds.max(5));
let active_wait = Duration::from_secs(2); let active_wait = Duration::from_secs(2);
@@ -316,6 +321,8 @@ async fn poll_qbittorrent_downloads(pool: &PgPool) -> anyhow::Result<bool> {
let infos: Vec<QbTorrentInfo> = resp.json().await?; let infos: Vec<QbTorrentInfo> = resp.json().await?;
for info in &infos { for info in &infos {
info!("[TORRENT_POLLER] Torrent {} state='{}' progress={:.2} name={:?}", info.hash, info.state, info.progress, info.name);
// Update progress for all active torrents // Update progress for all active torrents
let row = rows.iter().find(|r| { let row = rows.iter().find(|r| {
let h: String = r.get("qb_hash"); let h: String = r.get("qb_hash");
@@ -336,6 +343,35 @@ async fn poll_qbittorrent_downloads(pool: &PgPool) -> anyhow::Result<bool> {
.await; .await;
} }
if QB_FAILED_STATES.contains(&info.state.as_str()) {
let Some(row) = rows.iter().find(|r| {
let h: String = r.get("qb_hash");
h == info.hash
}) else { continue; };
let tid: Uuid = row.get("id");
let msg = format!("Torrent stalled in qBittorrent (state: {})", info.state);
warn!("[TORRENT_POLLER] Torrent {} failed: {}", info.hash, msg);
let _ = sqlx::query(
"UPDATE torrent_downloads SET status = 'error', error_message = $1, \
download_speed = 0, eta = 0, updated_at = NOW() \
WHERE id = $2 AND status = 'downloading'",
)
.bind(&msg)
.bind(tid)
.execute(pool)
.await;
// Remove torrent from qBittorrent
let _ = client
.post(format!("{base_url}/api/v2/torrents/delete"))
.header("Cookie", format!("SID={sid}"))
.form(&[("hashes", info.hash.as_str()), ("deleteFiles", "true")])
.send()
.await;
info!("[TORRENT_POLLER] Removed failed torrent {} from qBittorrent", info.hash);
continue;
}
if !QB_COMPLETED_STATES.contains(&info.state.as_str()) { if !QB_COMPLETED_STATES.contains(&info.state.as_str()) {
continue; continue;
} }
@@ -681,11 +717,20 @@ async fn do_import(
}; };
info!("[IMPORT] Final reference: {:?}", reference); info!("[IMPORT] Final reference: {:?}", reference);
info!("[IMPORT] Expected volumes: {:?}", expected_set);
info!("[IMPORT] Physical content path: {}", physical_content);
// Collect all candidate files, then deduplicate by volume keeping the best format. // Collect all candidate files, then deduplicate by volume keeping the best format.
// Priority: cbz > cbr > pdf > epub // Priority: cbz > cbr > pdf > epub
let all_source_files = collect_book_files(&physical_content)?; let all_source_files = collect_book_files(&physical_content)?;
info!("[IMPORT] Found {} source files: {:?}", all_source_files.len(), all_source_files);
for f in &all_source_files {
let fname = std::path::Path::new(f).file_name().and_then(|n| n.to_str()).unwrap_or("");
let extracted = extract_volumes_from_title_pub(fname);
info!("[IMPORT] '{}' => extracted volumes: {:?}", fname, extracted);
}
let source_files = deduplicate_by_format(&all_source_files, &expected_set); let source_files = deduplicate_by_format(&all_source_files, &expected_set);
info!("[IMPORT] After dedup: {} files kept", source_files.len());
let mut imported = Vec::new(); let mut imported = Vec::new();
let mut used_destinations: std::collections::HashSet<String> = std::collections::HashSet::new(); let mut used_destinations: std::collections::HashSet<String> = std::collections::HashSet::new();