feat: conversion CBR → CBZ via job asynchrone
Ajoute la possibilité de convertir un livre CBR en CBZ depuis le backoffice. La conversion est sécurisée : le CBR original n'est supprimé qu'après vérification du CBZ généré et mise à jour de la base de données. - parsers: nouvelle fn `convert_cbr_to_cbz` (unar extract → zip pack → vérification → rename atomique) - api: `POST /books/:id/convert` crée un job `cbr_to_cbz` (vérifie format CBR, détecte collision) - indexer: nouveau `converter.rs` dispatché depuis `job.rs` - backoffice: bouton "Convert to CBZ" sur la page détail (visible si CBR), label dans JobRow - migrations: colonne `book_id` sur `index_jobs` + type `cbr_to_cbz` dans le check constraint Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -5,7 +5,7 @@ use sqlx::Row;
|
||||
use uuid::Uuid;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::{error::ApiError, state::AppState};
|
||||
use crate::{error::ApiError, index_jobs::IndexJobResponse, state::AppState};
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
pub struct ListBooksQuery {
|
||||
@@ -341,6 +341,113 @@ pub async fn list_series(
|
||||
}))
|
||||
}
|
||||
|
||||
fn remap_libraries_path(path: &str) -> String {
|
||||
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
||||
if path.starts_with("/libraries/") {
|
||||
return path.replacen("/libraries", &root, 1);
|
||||
}
|
||||
}
|
||||
path.to_string()
|
||||
}
|
||||
|
||||
fn unmap_libraries_path(path: &str) -> String {
|
||||
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
||||
if path.starts_with(&root) {
|
||||
return path.replacen(&root, "/libraries", 1);
|
||||
}
|
||||
}
|
||||
path.to_string()
|
||||
}
|
||||
|
||||
/// Enqueue a CBR → CBZ conversion job for a single book
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/books/{id}/convert",
|
||||
tag = "books",
|
||||
params(
|
||||
("id" = String, Path, description = "Book UUID"),
|
||||
),
|
||||
responses(
|
||||
(status = 200, body = IndexJobResponse),
|
||||
(status = 404, description = "Book not found"),
|
||||
(status = 409, description = "Book is not CBR, or target CBZ already exists"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn convert_book(
|
||||
State(state): State<AppState>,
|
||||
Path(book_id): Path<Uuid>,
|
||||
) -> Result<Json<IndexJobResponse>, ApiError> {
|
||||
// Fetch book file info
|
||||
let row = sqlx::query(
|
||||
r#"
|
||||
SELECT b.id, bf.abs_path, bf.format
|
||||
FROM books b
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT abs_path, format
|
||||
FROM book_files
|
||||
WHERE book_id = b.id
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT 1
|
||||
) bf ON TRUE
|
||||
WHERE b.id = $1
|
||||
"#,
|
||||
)
|
||||
.bind(book_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||
let abs_path: Option<String> = row.get("abs_path");
|
||||
let format: Option<String> = row.get("format");
|
||||
|
||||
if format.as_deref() != Some("cbr") {
|
||||
return Err(ApiError {
|
||||
status: axum::http::StatusCode::CONFLICT,
|
||||
message: "book is not in CBR format".to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
let abs_path = abs_path.ok_or_else(|| ApiError::not_found("book file path not found"))?;
|
||||
|
||||
// Check for existing CBZ with same stem
|
||||
let physical_path = remap_libraries_path(&abs_path);
|
||||
let cbr_path = std::path::Path::new(&physical_path);
|
||||
if let (Some(parent), Some(stem)) = (cbr_path.parent(), cbr_path.file_stem()) {
|
||||
let cbz_path = parent.join(format!("{}.cbz", stem.to_string_lossy()));
|
||||
if cbz_path.exists() {
|
||||
return Err(ApiError {
|
||||
status: axum::http::StatusCode::CONFLICT,
|
||||
message: format!(
|
||||
"CBZ file already exists: {}",
|
||||
unmap_libraries_path(&cbz_path.to_string_lossy())
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Create the conversion job
|
||||
let job_id = Uuid::new_v4();
|
||||
sqlx::query(
|
||||
"INSERT INTO index_jobs (id, book_id, type, status) VALUES ($1, $2, 'cbr_to_cbz', 'pending')",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(book_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
let job_row = sqlx::query(
|
||||
"SELECT id, library_id, book_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
|
||||
Ok(Json(crate::index_jobs::map_row(job_row)))
|
||||
}
|
||||
|
||||
use axum::{
|
||||
body::Body,
|
||||
http::{header, HeaderMap, HeaderValue, StatusCode},
|
||||
|
||||
@@ -24,6 +24,8 @@ pub struct IndexJobResponse {
|
||||
pub id: Uuid,
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub library_id: Option<Uuid>,
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub book_id: Option<Uuid>,
|
||||
pub r#type: String,
|
||||
pub status: String,
|
||||
#[schema(value_type = Option<String>)]
|
||||
@@ -122,7 +124,7 @@ pub async fn enqueue_rebuild(
|
||||
.await?;
|
||||
|
||||
let row = sqlx::query(
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs WHERE id = $1",
|
||||
"SELECT id, library_id, book_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs WHERE id = $1",
|
||||
)
|
||||
.bind(id)
|
||||
.fetch_one(&state.pool)
|
||||
@@ -145,7 +147,7 @@ pub async fn enqueue_rebuild(
|
||||
)]
|
||||
pub async fn list_index_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs ORDER BY created_at DESC LIMIT 100",
|
||||
"SELECT id, library_id, book_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs ORDER BY created_at DESC LIMIT 100",
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
@@ -185,7 +187,7 @@ pub async fn cancel_job(
|
||||
}
|
||||
|
||||
let row = sqlx::query(
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs WHERE id = $1",
|
||||
"SELECT id, library_id, book_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs WHERE id = $1",
|
||||
)
|
||||
.bind(id.0)
|
||||
.fetch_one(&state.pool)
|
||||
@@ -294,6 +296,7 @@ pub fn map_row(row: sqlx::postgres::PgRow) -> IndexJobResponse {
|
||||
IndexJobResponse {
|
||||
id: row.get("id"),
|
||||
library_id: row.get("library_id"),
|
||||
book_id: row.try_get("book_id").ok().flatten(),
|
||||
r#type: row.get("type"),
|
||||
status: row.get("status"),
|
||||
started_at: row.get("started_at"),
|
||||
@@ -339,9 +342,9 @@ fn map_row_detail(row: sqlx::postgres::PgRow) -> IndexJobDetailResponse {
|
||||
)]
|
||||
pub async fn get_active_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files
|
||||
FROM index_jobs
|
||||
WHERE status IN ('pending', 'running', 'generating_thumbnails')
|
||||
"SELECT id, library_id, book_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files
|
||||
FROM index_jobs
|
||||
WHERE status IN ('pending', 'running', 'generating_thumbnails')
|
||||
ORDER BY created_at ASC"
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
|
||||
@@ -69,6 +69,7 @@ async fn main() -> anyhow::Result<()> {
|
||||
.route("/libraries/:id", delete(libraries::delete_library))
|
||||
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
|
||||
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
|
||||
.route("/books/:id/convert", axum::routing::post(books::convert_book))
|
||||
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
|
||||
.route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild))
|
||||
.route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate))
|
||||
|
||||
@@ -8,6 +8,7 @@ use utoipa::OpenApi;
|
||||
crate::books::get_book,
|
||||
crate::books::get_thumbnail,
|
||||
crate::books::list_series,
|
||||
crate::books::convert_book,
|
||||
crate::pages::get_page,
|
||||
crate::search::search_books,
|
||||
crate::index_jobs::enqueue_rebuild,
|
||||
|
||||
17
apps/backoffice/app/api/books/[bookId]/convert/route.ts
Normal file
17
apps/backoffice/app/api/books/[bookId]/convert/route.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { convertBook } from "@/lib/api";
|
||||
|
||||
export async function POST(
|
||||
_request: NextRequest,
|
||||
{ params }: { params: Promise<{ bookId: string }> }
|
||||
) {
|
||||
const { bookId } = await params;
|
||||
try {
|
||||
const data = await convertBook(bookId);
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Failed to start conversion";
|
||||
const status = message.includes("409") ? 409 : 500;
|
||||
return NextResponse.json({ error: message }, { status });
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import { fetchLibraries, getBookCoverUrl, BookDto, apiFetch } from "../../../lib/api";
|
||||
import { BookPreview } from "../../components/BookPreview";
|
||||
import { ConvertButton } from "../../components/ConvertButton";
|
||||
import Image from "next/image";
|
||||
import Link from "next/link";
|
||||
import { notFound } from "next/navigation";
|
||||
@@ -115,7 +116,10 @@ export default async function BookDetailPage({
|
||||
{book.file_format && (
|
||||
<div className="flex items-center justify-between py-2 border-b border-border">
|
||||
<span className="text-sm text-muted-foreground">File Format:</span>
|
||||
<span className="text-sm text-foreground">{book.file_format.toUpperCase()}</span>
|
||||
<div className="flex items-center gap-3">
|
||||
<span className="text-sm text-foreground">{book.file_format.toUpperCase()}</span>
|
||||
{book.file_format === "cbr" && <ConvertButton bookId={book.id} />}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
71
apps/backoffice/app/components/ConvertButton.tsx
Normal file
71
apps/backoffice/app/components/ConvertButton.tsx
Normal file
@@ -0,0 +1,71 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import Link from "next/link";
|
||||
import { Button } from "./ui";
|
||||
|
||||
interface ConvertButtonProps {
|
||||
bookId: string;
|
||||
}
|
||||
|
||||
type ConvertState =
|
||||
| { type: "idle" }
|
||||
| { type: "loading" }
|
||||
| { type: "success"; jobId: string }
|
||||
| { type: "error"; message: string };
|
||||
|
||||
export function ConvertButton({ bookId }: ConvertButtonProps) {
|
||||
const [state, setState] = useState<ConvertState>({ type: "idle" });
|
||||
|
||||
const handleConvert = async () => {
|
||||
setState({ type: "loading" });
|
||||
try {
|
||||
const res = await fetch(`/api/books/${bookId}/convert`, { method: "POST" });
|
||||
if (!res.ok) {
|
||||
const body = await res.json().catch(() => ({ error: res.statusText }));
|
||||
setState({ type: "error", message: body.error || "Conversion failed" });
|
||||
return;
|
||||
}
|
||||
const job = await res.json();
|
||||
setState({ type: "success", jobId: job.id });
|
||||
} catch (err) {
|
||||
setState({ type: "error", message: err instanceof Error ? err.message : "Unknown error" });
|
||||
}
|
||||
};
|
||||
|
||||
if (state.type === "success") {
|
||||
return (
|
||||
<div className="flex items-center gap-2 text-sm text-success">
|
||||
<span>Conversion started.</span>
|
||||
<Link href={`/jobs/${state.jobId}`} className="text-primary hover:underline font-medium">
|
||||
View job →
|
||||
</Link>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (state.type === "error") {
|
||||
return (
|
||||
<div className="flex flex-col gap-1">
|
||||
<span className="text-sm text-destructive">{state.message}</span>
|
||||
<button
|
||||
className="text-xs text-muted-foreground hover:underline text-left"
|
||||
onClick={() => setState({ type: "idle" })}
|
||||
>
|
||||
Dismiss
|
||||
</button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="sm"
|
||||
onClick={handleConvert}
|
||||
disabled={state.type === "loading"}
|
||||
>
|
||||
{state.type === "loading" ? "Converting…" : "Convert to CBZ"}
|
||||
</Button>
|
||||
);
|
||||
}
|
||||
@@ -93,7 +93,9 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
|
||||
<td className="px-4 py-3 text-sm text-foreground">
|
||||
{job.library_id ? libraryName || job.library_id.slice(0, 8) : "—"}
|
||||
</td>
|
||||
<td className="px-4 py-3 text-sm text-foreground">{job.type}</td>
|
||||
<td className="px-4 py-3 text-sm text-foreground">
|
||||
{job.type === "cbr_to_cbz" ? "CBR → CBZ" : job.type}
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
<div className="flex items-center gap-2 flex-wrap">
|
||||
<StatusBadge status={job.status} />
|
||||
|
||||
@@ -13,6 +13,7 @@ export type LibraryDto = {
|
||||
export type IndexJobDto = {
|
||||
id: string;
|
||||
library_id: string | null;
|
||||
book_id: string | null;
|
||||
type: string;
|
||||
status: string;
|
||||
started_at: string | null;
|
||||
@@ -348,3 +349,7 @@ export async function clearCache() {
|
||||
export async function getThumbnailStats() {
|
||||
return apiFetch<ThumbnailStats>("/settings/thumbnail/stats");
|
||||
}
|
||||
|
||||
export async function convertBook(bookId: string) {
|
||||
return apiFetch<IndexJobDto>(`/books/${bookId}/convert`, { method: "POST" });
|
||||
}
|
||||
|
||||
108
apps/indexer/src/converter.rs
Normal file
108
apps/indexer/src/converter.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use anyhow::Result;
|
||||
use sqlx::Row;
|
||||
use tracing::{info, warn};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{utils, AppState};
|
||||
|
||||
/// Execute a `cbr_to_cbz` job for the given `book_id`.
|
||||
///
|
||||
/// Flow:
|
||||
/// 1. Read book file info from DB
|
||||
/// 2. Resolve physical path
|
||||
/// 3. Convert CBR → CBZ via `parsers::convert_cbr_to_cbz`
|
||||
/// 4. Update `book_files` and `books` in DB
|
||||
/// 5. Delete the original CBR (failure here does not fail the job)
|
||||
/// 6. Mark job as success
|
||||
pub async fn convert_book(state: &AppState, job_id: Uuid, book_id: Uuid) -> Result<()> {
|
||||
info!("[CONVERTER] Starting CBR→CBZ conversion for book {} (job {})", book_id, job_id);
|
||||
|
||||
// Fetch current file info
|
||||
let row = sqlx::query(
|
||||
r#"
|
||||
SELECT bf.id as file_id, bf.abs_path, bf.format
|
||||
FROM book_files bf
|
||||
WHERE bf.book_id = $1
|
||||
ORDER BY bf.updated_at DESC
|
||||
LIMIT 1
|
||||
"#,
|
||||
)
|
||||
.bind(book_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
let row = row.ok_or_else(|| anyhow::anyhow!("no book file found for book {}", book_id))?;
|
||||
|
||||
let file_id: Uuid = row.get("file_id");
|
||||
let abs_path: String = row.get("abs_path");
|
||||
let format: String = row.get("format");
|
||||
|
||||
if format != "cbr" {
|
||||
return Err(anyhow::anyhow!(
|
||||
"book {} is not CBR (format={}), skipping conversion",
|
||||
book_id,
|
||||
format
|
||||
));
|
||||
}
|
||||
|
||||
let physical_path = utils::remap_libraries_path(&abs_path);
|
||||
let cbr_path = std::path::Path::new(&physical_path);
|
||||
|
||||
info!("[CONVERTER] Converting {} → CBZ", cbr_path.display());
|
||||
|
||||
// Update job status to running (already set by claim_next_job, this updates current_file)
|
||||
sqlx::query(
|
||||
"UPDATE index_jobs SET current_file = $2 WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(&abs_path)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
// Do the conversion
|
||||
let cbz_path = parsers::convert_cbr_to_cbz(cbr_path)?;
|
||||
|
||||
info!("[CONVERTER] CBZ created at {}", cbz_path.display());
|
||||
|
||||
// Remap physical path back to /libraries/ canonical form
|
||||
let new_abs_path = utils::unmap_libraries_path(&cbz_path.to_string_lossy());
|
||||
|
||||
// Update book_files: abs_path + format
|
||||
sqlx::query(
|
||||
"UPDATE book_files SET abs_path = $2, format = 'cbz', updated_at = NOW() WHERE id = $1",
|
||||
)
|
||||
.bind(file_id)
|
||||
.bind(&new_abs_path)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
// Update books: kind stays 'comic', updated_at refreshed
|
||||
sqlx::query("UPDATE books SET updated_at = NOW() WHERE id = $1")
|
||||
.bind(book_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
info!("[CONVERTER] DB updated for book {}", book_id);
|
||||
|
||||
// Delete the original CBR file (best-effort)
|
||||
if let Err(e) = std::fs::remove_file(cbr_path) {
|
||||
warn!(
|
||||
"[CONVERTER] Could not delete original CBR {}: {} (non-fatal)",
|
||||
cbr_path.display(),
|
||||
e
|
||||
);
|
||||
} else {
|
||||
info!("[CONVERTER] Deleted original CBR {}", cbr_path.display());
|
||||
}
|
||||
|
||||
// Mark job success
|
||||
sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), progress_percent = 100, current_file = NULL WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
info!("[CONVERTER] Job {} completed successfully", job_id);
|
||||
Ok(())
|
||||
}
|
||||
@@ -4,7 +4,7 @@ use sqlx::{PgPool, Row};
|
||||
use tracing::{error, info};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{analyzer, meili, scanner, AppState};
|
||||
use crate::{analyzer, converter, meili, scanner, AppState};
|
||||
|
||||
pub async fn cleanup_stale_jobs(pool: &PgPool) -> Result<()> {
|
||||
let result = sqlx::query(
|
||||
@@ -137,10 +137,22 @@ pub async fn process_job(
|
||||
) -> Result<()> {
|
||||
info!("[JOB] Processing {} library={:?}", job_id, target_library_id);
|
||||
|
||||
let job_type: String = sqlx::query_scalar("SELECT type FROM index_jobs WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
let (job_type, book_id): (String, Option<Uuid>) = {
|
||||
let row = sqlx::query("SELECT type, book_id FROM index_jobs WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
(row.get("type"), row.get("book_id"))
|
||||
};
|
||||
|
||||
// CBR to CBZ conversion
|
||||
if job_type == "cbr_to_cbz" {
|
||||
let book_id = book_id.ok_or_else(|| {
|
||||
anyhow::anyhow!("cbr_to_cbz job {} has no book_id", job_id)
|
||||
})?;
|
||||
converter::convert_book(state, job_id, book_id).await?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Thumbnail rebuild: generate thumbnails for books missing them
|
||||
if job_type == "thumbnail_rebuild" {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
pub mod analyzer;
|
||||
pub mod api;
|
||||
pub mod batch;
|
||||
pub mod converter;
|
||||
pub mod job;
|
||||
pub mod meili;
|
||||
pub mod scheduler;
|
||||
|
||||
Reference in New Issue
Block a user