feat: suppression de livres + import insensible aux accents
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 40s
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 40s
- Ajout DELETE /books/:id : supprime le fichier physique, la thumbnail, le book en DB et queue un scan de la lib. Bouton avec confirmation sur la page de détail du livre. - L'import torrent utilise unaccent() en SQL pour matcher les séries indépendamment des accents (ex: "les géants" = "les geants"). - Fallback filesystem avec strip_accents pour les séries sans livre en DB. - Migration 0069: activation de l'extension PostgreSQL unaccent. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -666,3 +666,81 @@ pub async fn get_thumbnail(
|
||||
|
||||
Ok((StatusCode::OK, headers, Body::from(data)))
|
||||
}
|
||||
|
||||
// ─── Delete book ───────────────────────────────────────────────────────────────
|
||||
|
||||
/// Delete a book: removes the physical file, the DB record, and queues a library scan.
|
||||
#[utoipa::path(
|
||||
delete,
|
||||
path = "/books/{id}",
|
||||
tag = "books",
|
||||
params(("id" = String, Path, description = "Book UUID")),
|
||||
responses(
|
||||
(status = 200, description = "Book deleted"),
|
||||
(status = 404, description = "Book not found"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn delete_book(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<Uuid>,
|
||||
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||
// Fetch the book and its file path
|
||||
let row = sqlx::query(
|
||||
"SELECT b.library_id, b.thumbnail_path, bf.abs_path \
|
||||
FROM books b \
|
||||
LEFT JOIN book_files bf ON bf.book_id = b.id \
|
||||
WHERE b.id = $1",
|
||||
)
|
||||
.bind(id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||
let library_id: Uuid = row.get("library_id");
|
||||
let abs_path: Option<String> = row.get("abs_path");
|
||||
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||
|
||||
// Delete the physical file
|
||||
if let Some(ref path) = abs_path {
|
||||
let physical = remap_libraries_path(path);
|
||||
match std::fs::remove_file(&physical) {
|
||||
Ok(()) => tracing::info!("[BOOKS] Deleted file: {}", physical),
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
tracing::warn!("[BOOKS] File already missing: {}", physical);
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!("[BOOKS] Failed to delete file {}: {}", physical, e);
|
||||
return Err(ApiError::internal(format!("failed to delete file: {e}")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the thumbnail file
|
||||
if let Some(ref path) = thumbnail_path {
|
||||
let _ = std::fs::remove_file(path);
|
||||
}
|
||||
|
||||
// Delete from DB (book_files cascade via ON DELETE CASCADE)
|
||||
sqlx::query("DELETE FROM books WHERE id = $1")
|
||||
.bind(id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
// Queue a scan job for the library so the index stays consistent
|
||||
let scan_job_id = Uuid::new_v4();
|
||||
sqlx::query(
|
||||
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, 'scan', 'pending')",
|
||||
)
|
||||
.bind(scan_job_id)
|
||||
.bind(library_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
tracing::info!(
|
||||
"[BOOKS] Deleted book {}, scan job {} queued for library {}",
|
||||
id, scan_job_id, library_id
|
||||
);
|
||||
|
||||
Ok(Json(serde_json::json!({ "ok": true })))
|
||||
}
|
||||
|
||||
@@ -100,7 +100,7 @@ async fn main() -> anyhow::Result<()> {
|
||||
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
|
||||
.route("/libraries/:id/metadata-provider", axum::routing::patch(libraries::update_metadata_provider))
|
||||
.route("/libraries/:id/reading-status-provider", axum::routing::patch(libraries::update_reading_status_provider))
|
||||
.route("/books/:id", axum::routing::patch(books::update_book))
|
||||
.route("/books/:id", axum::routing::patch(books::update_book).delete(books::delete_book))
|
||||
.route("/books/:id/convert", axum::routing::post(books::convert_book))
|
||||
.route("/libraries/:library_id/series/:name", axum::routing::patch(series::update_series))
|
||||
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
|
||||
|
||||
@@ -628,7 +628,9 @@ async fn do_import(
|
||||
"SELECT bf.abs_path, b.volume \
|
||||
FROM book_files bf \
|
||||
JOIN books b ON b.id = bf.book_id \
|
||||
WHERE b.library_id = $1 AND LOWER(b.series) = LOWER($2) AND b.volume IS NOT NULL \
|
||||
WHERE b.library_id = $1 \
|
||||
AND LOWER(unaccent(b.series)) = LOWER(unaccent($2)) \
|
||||
AND b.volume IS NOT NULL \
|
||||
ORDER BY b.volume DESC LIMIT 1",
|
||||
)
|
||||
.bind(library_id)
|
||||
@@ -647,7 +649,8 @@ async fn do_import(
|
||||
info!("[IMPORT] DB reference found: {} (volume {}), target_dir={}", abs_path, volume, parent);
|
||||
(parent, Some((abs_path, volume)))
|
||||
} else {
|
||||
// No existing files in DB: create series directory inside library root
|
||||
// No existing files in DB: look for an existing directory (case-insensitive)
|
||||
// inside the library root, then fall back to creating one.
|
||||
info!("[IMPORT] No DB reference for series '{}' in library {}", series_name, library_id);
|
||||
let lib_row = sqlx::query("SELECT root_path FROM libraries WHERE id = $1")
|
||||
.bind(library_id)
|
||||
@@ -655,7 +658,9 @@ async fn do_import(
|
||||
.await?;
|
||||
let root_path: String = lib_row.get("root_path");
|
||||
let physical_root = remap_libraries_path(&root_path);
|
||||
let dir = format!("{}/{}", physical_root.trim_end_matches('/'), series_name);
|
||||
let dir = find_existing_series_dir(&physical_root, series_name)
|
||||
.unwrap_or_else(|| format!("{}/{}", physical_root.trim_end_matches('/'), series_name));
|
||||
info!("[IMPORT] Target directory: {}", dir);
|
||||
(dir, None)
|
||||
};
|
||||
|
||||
@@ -771,6 +776,59 @@ async fn do_import(
|
||||
Ok(imported)
|
||||
}
|
||||
|
||||
// ─── Directory matching ───────────────────────────────────────────────────────
|
||||
|
||||
/// Find an existing directory in `root` whose name matches `series_name`
|
||||
/// case-insensitively and accent-insensitively (e.g. "les géants" matches "les geants").
|
||||
fn find_existing_series_dir(root: &str, series_name: &str) -> Option<String> {
|
||||
let target_norm = strip_accents(&series_name.to_lowercase());
|
||||
let entries = std::fs::read_dir(root).ok()?;
|
||||
let mut best: Option<(String, bool)> = None; // (path, is_exact_case_match)
|
||||
for entry in entries.flatten() {
|
||||
if !entry.file_type().ok().map_or(false, |t| t.is_dir()) {
|
||||
continue;
|
||||
}
|
||||
let name = entry.file_name();
|
||||
let name_str = name.to_string_lossy();
|
||||
let name_lower = name_str.to_lowercase();
|
||||
let name_norm = strip_accents(&name_lower);
|
||||
if name_norm == target_norm {
|
||||
let path = entry.path().to_string_lossy().into_owned();
|
||||
let exact = name_lower == series_name.to_lowercase();
|
||||
info!("[IMPORT] Found existing directory (normalized match): {} (exact={})", path, exact);
|
||||
// Prefer exact case match over accent-stripped match
|
||||
if exact || best.is_none() {
|
||||
best = Some((path, exact));
|
||||
}
|
||||
}
|
||||
}
|
||||
best.map(|(p, _)| p)
|
||||
}
|
||||
|
||||
/// Remove diacritical marks from a string (é→e, à→a, ü→u, etc.)
|
||||
fn strip_accents(s: &str) -> String {
|
||||
use std::fmt::Write;
|
||||
let mut result = String::with_capacity(s.len());
|
||||
for c in s.chars() {
|
||||
// Decompose the character and skip combining marks (U+0300..U+036F)
|
||||
// Map common accented chars to their base letter
|
||||
let _ = match c {
|
||||
'à' | 'á' | 'â' | 'ã' | 'ä' | 'å' => result.write_char('a'),
|
||||
'è' | 'é' | 'ê' | 'ë' => result.write_char('e'),
|
||||
'ì' | 'í' | 'î' | 'ï' => result.write_char('i'),
|
||||
'ò' | 'ó' | 'ô' | 'õ' | 'ö' => result.write_char('o'),
|
||||
'ù' | 'ú' | 'û' | 'ü' => result.write_char('u'),
|
||||
'ý' | 'ÿ' => result.write_char('y'),
|
||||
'ñ' => result.write_char('n'),
|
||||
'ç' => result.write_char('c'),
|
||||
'æ' => result.write_str("ae"),
|
||||
'œ' => result.write_str("oe"),
|
||||
_ => result.write_char(c),
|
||||
};
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
// ─── Format deduplication ─────────────────────────────────────────────────────
|
||||
|
||||
/// When a download contains the same volume in multiple formats (e.g. T01.cbz and T01.pdf),
|
||||
|
||||
Reference in New Issue
Block a user