feat: suppression de livres + import insensible aux accents
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 40s
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 40s
- Ajout DELETE /books/:id : supprime le fichier physique, la thumbnail, le book en DB et queue un scan de la lib. Bouton avec confirmation sur la page de détail du livre. - L'import torrent utilise unaccent() en SQL pour matcher les séries indépendamment des accents (ex: "les géants" = "les geants"). - Fallback filesystem avec strip_accents pour les séries sans livre en DB. - Migration 0069: activation de l'extension PostgreSQL unaccent. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -666,3 +666,81 @@ pub async fn get_thumbnail(
|
||||
|
||||
Ok((StatusCode::OK, headers, Body::from(data)))
|
||||
}
|
||||
|
||||
// ─── Delete book ───────────────────────────────────────────────────────────────
|
||||
|
||||
/// Delete a book: removes the physical file, the DB record, and queues a library scan.
|
||||
#[utoipa::path(
|
||||
delete,
|
||||
path = "/books/{id}",
|
||||
tag = "books",
|
||||
params(("id" = String, Path, description = "Book UUID")),
|
||||
responses(
|
||||
(status = 200, description = "Book deleted"),
|
||||
(status = 404, description = "Book not found"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn delete_book(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<Uuid>,
|
||||
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||
// Fetch the book and its file path
|
||||
let row = sqlx::query(
|
||||
"SELECT b.library_id, b.thumbnail_path, bf.abs_path \
|
||||
FROM books b \
|
||||
LEFT JOIN book_files bf ON bf.book_id = b.id \
|
||||
WHERE b.id = $1",
|
||||
)
|
||||
.bind(id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||
let library_id: Uuid = row.get("library_id");
|
||||
let abs_path: Option<String> = row.get("abs_path");
|
||||
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||
|
||||
// Delete the physical file
|
||||
if let Some(ref path) = abs_path {
|
||||
let physical = remap_libraries_path(path);
|
||||
match std::fs::remove_file(&physical) {
|
||||
Ok(()) => tracing::info!("[BOOKS] Deleted file: {}", physical),
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
tracing::warn!("[BOOKS] File already missing: {}", physical);
|
||||
}
|
||||
Err(e) => {
|
||||
tracing::error!("[BOOKS] Failed to delete file {}: {}", physical, e);
|
||||
return Err(ApiError::internal(format!("failed to delete file: {e}")));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the thumbnail file
|
||||
if let Some(ref path) = thumbnail_path {
|
||||
let _ = std::fs::remove_file(path);
|
||||
}
|
||||
|
||||
// Delete from DB (book_files cascade via ON DELETE CASCADE)
|
||||
sqlx::query("DELETE FROM books WHERE id = $1")
|
||||
.bind(id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
// Queue a scan job for the library so the index stays consistent
|
||||
let scan_job_id = Uuid::new_v4();
|
||||
sqlx::query(
|
||||
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, 'scan', 'pending')",
|
||||
)
|
||||
.bind(scan_job_id)
|
||||
.bind(library_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
tracing::info!(
|
||||
"[BOOKS] Deleted book {}, scan job {} queued for library {}",
|
||||
id, scan_job_id, library_id
|
||||
);
|
||||
|
||||
Ok(Json(serde_json::json!({ "ok": true })))
|
||||
}
|
||||
|
||||
@@ -100,7 +100,7 @@ async fn main() -> anyhow::Result<()> {
|
||||
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
|
||||
.route("/libraries/:id/metadata-provider", axum::routing::patch(libraries::update_metadata_provider))
|
||||
.route("/libraries/:id/reading-status-provider", axum::routing::patch(libraries::update_reading_status_provider))
|
||||
.route("/books/:id", axum::routing::patch(books::update_book))
|
||||
.route("/books/:id", axum::routing::patch(books::update_book).delete(books::delete_book))
|
||||
.route("/books/:id/convert", axum::routing::post(books::convert_book))
|
||||
.route("/libraries/:library_id/series/:name", axum::routing::patch(series::update_series))
|
||||
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
|
||||
|
||||
@@ -628,7 +628,9 @@ async fn do_import(
|
||||
"SELECT bf.abs_path, b.volume \
|
||||
FROM book_files bf \
|
||||
JOIN books b ON b.id = bf.book_id \
|
||||
WHERE b.library_id = $1 AND LOWER(b.series) = LOWER($2) AND b.volume IS NOT NULL \
|
||||
WHERE b.library_id = $1 \
|
||||
AND LOWER(unaccent(b.series)) = LOWER(unaccent($2)) \
|
||||
AND b.volume IS NOT NULL \
|
||||
ORDER BY b.volume DESC LIMIT 1",
|
||||
)
|
||||
.bind(library_id)
|
||||
@@ -647,7 +649,8 @@ async fn do_import(
|
||||
info!("[IMPORT] DB reference found: {} (volume {}), target_dir={}", abs_path, volume, parent);
|
||||
(parent, Some((abs_path, volume)))
|
||||
} else {
|
||||
// No existing files in DB: create series directory inside library root
|
||||
// No existing files in DB: look for an existing directory (case-insensitive)
|
||||
// inside the library root, then fall back to creating one.
|
||||
info!("[IMPORT] No DB reference for series '{}' in library {}", series_name, library_id);
|
||||
let lib_row = sqlx::query("SELECT root_path FROM libraries WHERE id = $1")
|
||||
.bind(library_id)
|
||||
@@ -655,7 +658,9 @@ async fn do_import(
|
||||
.await?;
|
||||
let root_path: String = lib_row.get("root_path");
|
||||
let physical_root = remap_libraries_path(&root_path);
|
||||
let dir = format!("{}/{}", physical_root.trim_end_matches('/'), series_name);
|
||||
let dir = find_existing_series_dir(&physical_root, series_name)
|
||||
.unwrap_or_else(|| format!("{}/{}", physical_root.trim_end_matches('/'), series_name));
|
||||
info!("[IMPORT] Target directory: {}", dir);
|
||||
(dir, None)
|
||||
};
|
||||
|
||||
@@ -771,6 +776,59 @@ async fn do_import(
|
||||
Ok(imported)
|
||||
}
|
||||
|
||||
// ─── Directory matching ───────────────────────────────────────────────────────
|
||||
|
||||
/// Find an existing directory in `root` whose name matches `series_name`
|
||||
/// case-insensitively and accent-insensitively (e.g. "les géants" matches "les geants").
|
||||
fn find_existing_series_dir(root: &str, series_name: &str) -> Option<String> {
|
||||
let target_norm = strip_accents(&series_name.to_lowercase());
|
||||
let entries = std::fs::read_dir(root).ok()?;
|
||||
let mut best: Option<(String, bool)> = None; // (path, is_exact_case_match)
|
||||
for entry in entries.flatten() {
|
||||
if !entry.file_type().ok().map_or(false, |t| t.is_dir()) {
|
||||
continue;
|
||||
}
|
||||
let name = entry.file_name();
|
||||
let name_str = name.to_string_lossy();
|
||||
let name_lower = name_str.to_lowercase();
|
||||
let name_norm = strip_accents(&name_lower);
|
||||
if name_norm == target_norm {
|
||||
let path = entry.path().to_string_lossy().into_owned();
|
||||
let exact = name_lower == series_name.to_lowercase();
|
||||
info!("[IMPORT] Found existing directory (normalized match): {} (exact={})", path, exact);
|
||||
// Prefer exact case match over accent-stripped match
|
||||
if exact || best.is_none() {
|
||||
best = Some((path, exact));
|
||||
}
|
||||
}
|
||||
}
|
||||
best.map(|(p, _)| p)
|
||||
}
|
||||
|
||||
/// Remove diacritical marks from a string (é→e, à→a, ü→u, etc.)
|
||||
fn strip_accents(s: &str) -> String {
|
||||
use std::fmt::Write;
|
||||
let mut result = String::with_capacity(s.len());
|
||||
for c in s.chars() {
|
||||
// Decompose the character and skip combining marks (U+0300..U+036F)
|
||||
// Map common accented chars to their base letter
|
||||
let _ = match c {
|
||||
'à' | 'á' | 'â' | 'ã' | 'ä' | 'å' => result.write_char('a'),
|
||||
'è' | 'é' | 'ê' | 'ë' => result.write_char('e'),
|
||||
'ì' | 'í' | 'î' | 'ï' => result.write_char('i'),
|
||||
'ò' | 'ó' | 'ô' | 'õ' | 'ö' => result.write_char('o'),
|
||||
'ù' | 'ú' | 'û' | 'ü' => result.write_char('u'),
|
||||
'ý' | 'ÿ' => result.write_char('y'),
|
||||
'ñ' => result.write_char('n'),
|
||||
'ç' => result.write_char('c'),
|
||||
'æ' => result.write_str("ae"),
|
||||
'œ' => result.write_str("oe"),
|
||||
_ => result.write_char(c),
|
||||
};
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
// ─── Format deduplication ─────────────────────────────────────────────────────
|
||||
|
||||
/// When a download contains the same volume in multiple formats (e.g. T01.cbz and T01.pdf),
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { fetchLibraries, getBookCoverUrl, BookDto, apiFetch, ReadingStatus } from "@/lib/api";
|
||||
import { BookPreview } from "@/app/components/BookPreview";
|
||||
import { ConvertButton } from "@/app/components/ConvertButton";
|
||||
import { DeleteBookButton } from "@/app/components/DeleteBookButton";
|
||||
import { MarkBookReadButton } from "@/app/components/MarkBookReadButton";
|
||||
import nextDynamic from "next/dynamic";
|
||||
import { SafeHtml } from "@/app/components/SafeHtml";
|
||||
@@ -147,6 +148,7 @@ export default async function BookDetailPage({
|
||||
)}
|
||||
<MarkBookReadButton bookId={book.id} currentStatus={book.reading_status} />
|
||||
{book.file_format === "cbr" && <ConvertButton bookId={book.id} />}
|
||||
<DeleteBookButton bookId={book.id} libraryId={book.library_id} />
|
||||
</div>
|
||||
|
||||
{/* Metadata pills */}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { updateBook } from "@/lib/api";
|
||||
import { updateBook, apiFetch } from "@/lib/api";
|
||||
|
||||
export async function PATCH(
|
||||
request: NextRequest,
|
||||
@@ -15,3 +15,17 @@ export async function PATCH(
|
||||
return NextResponse.json({ error: message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
|
||||
export async function DELETE(
|
||||
_request: NextRequest,
|
||||
{ params }: { params: Promise<{ bookId: string }> }
|
||||
) {
|
||||
const { bookId } = await params;
|
||||
try {
|
||||
const data = await apiFetch(`/books/${bookId}`, { method: "DELETE" });
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Failed to delete book";
|
||||
return NextResponse.json({ error: message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
|
||||
68
apps/backoffice/app/components/DeleteBookButton.tsx
Normal file
68
apps/backoffice/app/components/DeleteBookButton.tsx
Normal file
@@ -0,0 +1,68 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { createPortal } from "react-dom";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { Button, Icon } from "./ui";
|
||||
import { useTranslation } from "@/lib/i18n/context";
|
||||
|
||||
export function DeleteBookButton({ bookId, libraryId }: { bookId: string; libraryId: string }) {
|
||||
const { t } = useTranslation();
|
||||
const router = useRouter();
|
||||
const [showConfirm, setShowConfirm] = useState(false);
|
||||
const [deleting, setDeleting] = useState(false);
|
||||
|
||||
async function handleDelete() {
|
||||
setDeleting(true);
|
||||
setShowConfirm(false);
|
||||
try {
|
||||
const resp = await fetch(`/api/books/${bookId}`, { method: "DELETE" });
|
||||
if (resp.ok) {
|
||||
router.push(`/libraries/${libraryId}/series`);
|
||||
}
|
||||
} finally {
|
||||
setDeleting(false);
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<Button
|
||||
variant="destructive"
|
||||
size="sm"
|
||||
onClick={() => setShowConfirm(true)}
|
||||
disabled={deleting}
|
||||
>
|
||||
{deleting ? <Icon name="spinner" size="sm" className="animate-spin" /> : <Icon name="trash" size="sm" />}
|
||||
<span className="ml-1.5">{t("bookDetail.delete")}</span>
|
||||
</Button>
|
||||
|
||||
{showConfirm && createPortal(
|
||||
<>
|
||||
<div className="fixed inset-0 bg-black/30 backdrop-blur-sm z-50" onClick={() => setShowConfirm(false)} />
|
||||
<div className="fixed inset-0 flex items-center justify-center z-50 p-4">
|
||||
<div className="bg-card border border-border/50 rounded-xl shadow-2xl w-full max-w-sm overflow-hidden animate-in fade-in zoom-in-95 duration-200">
|
||||
<div className="p-6">
|
||||
<h3 className="text-lg font-semibold text-foreground mb-2">
|
||||
{t("bookDetail.delete")}
|
||||
</h3>
|
||||
<p className="text-sm text-muted-foreground">
|
||||
{t("bookDetail.confirmDelete")}
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex justify-end gap-2 px-6 pb-6">
|
||||
<Button variant="outline" size="sm" onClick={() => setShowConfirm(false)}>
|
||||
{t("common.cancel")}
|
||||
</Button>
|
||||
<Button variant="destructive" size="sm" onClick={handleDelete}>
|
||||
{t("bookDetail.delete")}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>,
|
||||
document.body
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -762,6 +762,8 @@ const en: Record<TranslationKey, string> = {
|
||||
"bookDetail.fileFormat": "File format",
|
||||
"bookDetail.parsing": "Parsing",
|
||||
"bookDetail.updatedAt": "Updated",
|
||||
"bookDetail.delete": "Delete",
|
||||
"bookDetail.confirmDelete": "The file will be permanently deleted from disk. This action cannot be undone.",
|
||||
|
||||
// Book preview
|
||||
"bookPreview.preview": "Preview",
|
||||
|
||||
@@ -760,6 +760,8 @@ const fr = {
|
||||
"bookDetail.fileFormat": "Format fichier",
|
||||
"bookDetail.parsing": "Parsing",
|
||||
"bookDetail.updatedAt": "Mis à jour",
|
||||
"bookDetail.delete": "Supprimer",
|
||||
"bookDetail.confirmDelete": "Le fichier sera définitivement supprimé du disque. Cette action est irréversible.",
|
||||
|
||||
// Book preview
|
||||
"bookPreview.preview": "Aperçu",
|
||||
|
||||
1
infra/migrations/0069_add_unaccent_extension.sql
Normal file
1
infra/migrations/0069_add_unaccent_extension.sql
Normal file
@@ -0,0 +1 @@
|
||||
CREATE EXTENSION IF NOT EXISTS unaccent;
|
||||
Reference in New Issue
Block a user