feat: add metadata refresh job to re-download metadata for linked series
Adds a new job type that refreshes metadata from external providers for all series already linked via approved external_metadata_links. Tracks and displays per-field diffs (series and book level), respects locked fields, and provides a detailed change report in the job detail page. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -7,6 +7,7 @@ mod komga;
|
||||
mod libraries;
|
||||
mod metadata;
|
||||
mod metadata_batch;
|
||||
mod metadata_refresh;
|
||||
mod metadata_providers;
|
||||
mod api_middleware;
|
||||
mod openapi;
|
||||
@@ -116,6 +117,8 @@ async fn main() -> anyhow::Result<()> {
|
||||
.route("/metadata/batch", axum::routing::post(metadata_batch::start_batch))
|
||||
.route("/metadata/batch/:id/report", get(metadata_batch::get_batch_report))
|
||||
.route("/metadata/batch/:id/results", get(metadata_batch::get_batch_results))
|
||||
.route("/metadata/refresh", axum::routing::post(metadata_refresh::start_refresh))
|
||||
.route("/metadata/refresh/:id/report", get(metadata_refresh::get_refresh_report))
|
||||
.merge(settings::settings_routes())
|
||||
.route_layer(middleware::from_fn_with_state(
|
||||
state.clone(),
|
||||
|
||||
@@ -1008,7 +1008,7 @@ async fn resolve_provider_name(pool: &PgPool, lib_provider: Option<&str>) -> Str
|
||||
"google_books".to_string()
|
||||
}
|
||||
|
||||
async fn load_provider_config_from_pool(
|
||||
pub(crate) async fn load_provider_config_from_pool(
|
||||
pool: &PgPool,
|
||||
provider_name: &str,
|
||||
) -> metadata_providers::ProviderConfig {
|
||||
@@ -1042,7 +1042,7 @@ async fn load_provider_config_from_pool(
|
||||
config
|
||||
}
|
||||
|
||||
async fn is_job_cancelled(pool: &PgPool, job_id: Uuid) -> bool {
|
||||
pub(crate) async fn is_job_cancelled(pool: &PgPool, job_id: Uuid) -> bool {
|
||||
sqlx::query_scalar::<_, bool>(
|
||||
"SELECT status = 'cancelled' FROM index_jobs WHERE id = $1",
|
||||
)
|
||||
@@ -1052,7 +1052,7 @@ async fn is_job_cancelled(pool: &PgPool, job_id: Uuid) -> bool {
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
async fn update_progress(pool: &PgPool, job_id: Uuid, processed: i32, total: i32, current: &str) {
|
||||
pub(crate) async fn update_progress(pool: &PgPool, job_id: Uuid, processed: i32, total: i32, current: &str) {
|
||||
let percent = if total > 0 {
|
||||
(processed as f64 / total as f64 * 100.0) as i32
|
||||
} else {
|
||||
|
||||
793
apps/api/src/metadata_refresh.rs
Normal file
793
apps/api/src/metadata_refresh.rs
Normal file
@@ -0,0 +1,793 @@
|
||||
use axum::{
|
||||
extract::{Path as AxumPath, State},
|
||||
Json,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::{PgPool, Row};
|
||||
use uuid::Uuid;
|
||||
use utoipa::ToSchema;
|
||||
use tracing::{info, warn};
|
||||
|
||||
use crate::{error::ApiError, metadata_providers, state::AppState};
|
||||
use crate::metadata_batch::{load_provider_config_from_pool, is_job_cancelled, update_progress};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// DTOs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
pub struct MetadataRefreshRequest {
|
||||
pub library_id: String,
|
||||
}
|
||||
|
||||
/// A single field change: old → new
|
||||
#[derive(Serialize, Clone)]
|
||||
struct FieldDiff {
|
||||
field: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
old: Option<serde_json::Value>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
new: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// Per-book changes
|
||||
#[derive(Serialize, Clone)]
|
||||
struct BookDiff {
|
||||
book_id: String,
|
||||
title: String,
|
||||
volume: Option<i32>,
|
||||
changes: Vec<FieldDiff>,
|
||||
}
|
||||
|
||||
/// Per-series change report
|
||||
#[derive(Serialize, Clone)]
|
||||
struct SeriesRefreshResult {
|
||||
series_name: String,
|
||||
provider: String,
|
||||
status: String, // "updated", "unchanged", "error"
|
||||
series_changes: Vec<FieldDiff>,
|
||||
book_changes: Vec<BookDiff>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
error: Option<String>,
|
||||
}
|
||||
|
||||
/// Response DTO for the report endpoint
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct MetadataRefreshReportDto {
|
||||
#[schema(value_type = String)]
|
||||
pub job_id: Uuid,
|
||||
pub status: String,
|
||||
pub total_links: i64,
|
||||
pub refreshed: i64,
|
||||
pub unchanged: i64,
|
||||
pub errors: i64,
|
||||
pub changes: serde_json::Value,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// POST /metadata/refresh — Trigger a metadata refresh job
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/metadata/refresh",
|
||||
tag = "metadata",
|
||||
request_body = MetadataRefreshRequest,
|
||||
responses(
|
||||
(status = 200, description = "Job created"),
|
||||
(status = 400, description = "Bad request"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn start_refresh(
|
||||
State(state): State<AppState>,
|
||||
Json(body): Json<MetadataRefreshRequest>,
|
||||
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||
let library_id: Uuid = body
|
||||
.library_id
|
||||
.parse()
|
||||
.map_err(|_| ApiError::bad_request("invalid library_id"))?;
|
||||
|
||||
// Verify library exists
|
||||
sqlx::query("SELECT 1 FROM libraries WHERE id = $1")
|
||||
.bind(library_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?
|
||||
.ok_or_else(|| ApiError::not_found("library not found"))?;
|
||||
|
||||
// Check no existing running metadata_refresh job for this library
|
||||
let existing: Option<Uuid> = sqlx::query_scalar(
|
||||
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'metadata_refresh' AND status IN ('pending', 'running') LIMIT 1",
|
||||
)
|
||||
.bind(library_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
if let Some(existing_id) = existing {
|
||||
return Ok(Json(serde_json::json!({
|
||||
"id": existing_id.to_string(),
|
||||
"status": "already_running",
|
||||
})));
|
||||
}
|
||||
|
||||
// Check there are approved links to refresh
|
||||
let link_count: i64 = sqlx::query_scalar(
|
||||
"SELECT COUNT(*) FROM external_metadata_links WHERE library_id = $1 AND status = 'approved'",
|
||||
)
|
||||
.bind(library_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
|
||||
if link_count == 0 {
|
||||
return Err(ApiError::bad_request("No approved metadata links to refresh for this library"));
|
||||
}
|
||||
|
||||
let job_id = Uuid::new_v4();
|
||||
sqlx::query(
|
||||
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, 'metadata_refresh', 'pending')",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(library_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
// Spawn the background processing task
|
||||
let pool = state.pool.clone();
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = process_metadata_refresh(&pool, job_id, library_id).await {
|
||||
warn!("[METADATA_REFRESH] job {job_id} failed: {e}");
|
||||
let _ = sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(e.to_string())
|
||||
.execute(&pool)
|
||||
.await;
|
||||
}
|
||||
});
|
||||
|
||||
Ok(Json(serde_json::json!({
|
||||
"id": job_id.to_string(),
|
||||
"status": "pending",
|
||||
})))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// GET /metadata/refresh/:id/report — Refresh report from stats_json
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/metadata/refresh/{id}/report",
|
||||
tag = "metadata",
|
||||
params(("id" = String, Path, description = "Job UUID")),
|
||||
responses(
|
||||
(status = 200, body = MetadataRefreshReportDto),
|
||||
(status = 404, description = "Job not found"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn get_refresh_report(
|
||||
State(state): State<AppState>,
|
||||
AxumPath(job_id): AxumPath<Uuid>,
|
||||
) -> Result<Json<MetadataRefreshReportDto>, ApiError> {
|
||||
let row = sqlx::query(
|
||||
"SELECT status, stats_json, total_files FROM index_jobs WHERE id = $1 AND type = 'metadata_refresh'",
|
||||
)
|
||||
.bind(job_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?
|
||||
.ok_or_else(|| ApiError::not_found("job not found"))?;
|
||||
|
||||
let job_status: String = row.get("status");
|
||||
let stats: Option<serde_json::Value> = row.get("stats_json");
|
||||
let total_files: Option<i32> = row.get("total_files");
|
||||
|
||||
let (refreshed, unchanged, errors, changes) = if let Some(ref s) = stats {
|
||||
(
|
||||
s.get("refreshed").and_then(|v| v.as_i64()).unwrap_or(0),
|
||||
s.get("unchanged").and_then(|v| v.as_i64()).unwrap_or(0),
|
||||
s.get("errors").and_then(|v| v.as_i64()).unwrap_or(0),
|
||||
s.get("changes").cloned().unwrap_or(serde_json::json!([])),
|
||||
)
|
||||
} else {
|
||||
(0, 0, 0, serde_json::json!([]))
|
||||
};
|
||||
|
||||
Ok(Json(MetadataRefreshReportDto {
|
||||
job_id,
|
||||
status: job_status,
|
||||
total_links: total_files.unwrap_or(0) as i64,
|
||||
refreshed,
|
||||
unchanged,
|
||||
errors,
|
||||
changes,
|
||||
}))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Background processing
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async fn process_metadata_refresh(
|
||||
pool: &PgPool,
|
||||
job_id: Uuid,
|
||||
library_id: Uuid,
|
||||
) -> Result<(), String> {
|
||||
// Set job to running
|
||||
sqlx::query("UPDATE index_jobs SET status = 'running', started_at = NOW() WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Get all approved links for this library
|
||||
let links: Vec<(Uuid, String, String, String)> = sqlx::query_as(
|
||||
r#"
|
||||
SELECT id, series_name, provider, external_id
|
||||
FROM external_metadata_links
|
||||
WHERE library_id = $1 AND status = 'approved'
|
||||
ORDER BY series_name
|
||||
"#,
|
||||
)
|
||||
.bind(library_id)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let total = links.len() as i32;
|
||||
sqlx::query("UPDATE index_jobs SET total_files = $2 WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.bind(total)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let mut processed = 0i32;
|
||||
let mut refreshed = 0i32;
|
||||
let mut unchanged = 0i32;
|
||||
let mut errors = 0i32;
|
||||
let mut all_results: Vec<SeriesRefreshResult> = Vec::new();
|
||||
|
||||
for (link_id, series_name, provider_name, external_id) in &links {
|
||||
// Check cancellation
|
||||
if is_job_cancelled(pool, job_id).await {
|
||||
sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'cancelled', finished_at = NOW() WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
match refresh_link(pool, *link_id, library_id, series_name, provider_name, external_id).await {
|
||||
Ok(result) => {
|
||||
if result.status == "updated" {
|
||||
refreshed += 1;
|
||||
info!("[METADATA_REFRESH] job={job_id} updated series='{series_name}' via {provider_name}");
|
||||
} else {
|
||||
unchanged += 1;
|
||||
}
|
||||
all_results.push(result);
|
||||
}
|
||||
Err(e) => {
|
||||
errors += 1;
|
||||
warn!("[METADATA_REFRESH] job={job_id} error on series='{series_name}': {e}");
|
||||
all_results.push(SeriesRefreshResult {
|
||||
series_name: series_name.clone(),
|
||||
provider: provider_name.clone(),
|
||||
status: "error".to_string(),
|
||||
series_changes: vec![],
|
||||
book_changes: vec![],
|
||||
error: Some(e),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
processed += 1;
|
||||
update_progress(pool, job_id, processed, total, series_name).await;
|
||||
|
||||
// Rate limit: 1s delay between provider calls
|
||||
tokio::time::sleep(std::time::Duration::from_millis(1000)).await;
|
||||
}
|
||||
|
||||
// Only keep series that have changes or errors (filter out "unchanged")
|
||||
let changes_only: Vec<&SeriesRefreshResult> = all_results
|
||||
.iter()
|
||||
.filter(|r| r.status != "unchanged")
|
||||
.collect();
|
||||
|
||||
// Build stats summary
|
||||
let stats = serde_json::json!({
|
||||
"total_links": total,
|
||||
"refreshed": refreshed,
|
||||
"unchanged": unchanged,
|
||||
"errors": errors,
|
||||
"changes": changes_only,
|
||||
});
|
||||
|
||||
sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), progress_percent = 100, stats_json = $2 WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(stats)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
info!("[METADATA_REFRESH] job={job_id} completed: {refreshed} updated, {unchanged} unchanged, {errors} errors");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Refresh a single approved metadata link: re-fetch from provider, compare, sync, return diff
|
||||
async fn refresh_link(
|
||||
pool: &PgPool,
|
||||
link_id: Uuid,
|
||||
library_id: Uuid,
|
||||
series_name: &str,
|
||||
provider_name: &str,
|
||||
external_id: &str,
|
||||
) -> Result<SeriesRefreshResult, String> {
|
||||
let provider = metadata_providers::get_provider(provider_name)
|
||||
.ok_or_else(|| format!("Unknown provider: {provider_name}"))?;
|
||||
|
||||
let config = load_provider_config_from_pool(pool, provider_name).await;
|
||||
|
||||
let mut series_changes: Vec<FieldDiff> = Vec::new();
|
||||
let mut book_changes: Vec<BookDiff> = Vec::new();
|
||||
|
||||
// ── Series-level refresh ──────────────────────────────────────────────
|
||||
let candidates = provider
|
||||
.search_series(series_name, &config)
|
||||
.await
|
||||
.map_err(|e| format!("provider search error: {e}"))?;
|
||||
|
||||
let candidate = candidates
|
||||
.iter()
|
||||
.find(|c| c.external_id == external_id)
|
||||
.or_else(|| candidates.first());
|
||||
|
||||
if let Some(candidate) = candidate {
|
||||
// Update link metadata_json
|
||||
sqlx::query(
|
||||
r#"
|
||||
UPDATE external_metadata_links
|
||||
SET metadata_json = $2,
|
||||
total_volumes_external = $3,
|
||||
updated_at = NOW()
|
||||
WHERE id = $1
|
||||
"#,
|
||||
)
|
||||
.bind(link_id)
|
||||
.bind(&candidate.metadata_json)
|
||||
.bind(candidate.total_volumes)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Diff + sync series metadata
|
||||
series_changes = sync_series_with_diff(pool, library_id, series_name, candidate).await?;
|
||||
}
|
||||
|
||||
// ── Book-level refresh ────────────────────────────────────────────────
|
||||
let books = provider
|
||||
.get_series_books(external_id, &config)
|
||||
.await
|
||||
.map_err(|e| format!("provider books error: {e}"))?;
|
||||
|
||||
// Delete existing external_book_metadata for this link
|
||||
sqlx::query("DELETE FROM external_book_metadata WHERE link_id = $1")
|
||||
.bind(link_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Pre-fetch local books
|
||||
let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as(
|
||||
r#"
|
||||
SELECT id, volume, title FROM books
|
||||
WHERE library_id = $1
|
||||
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2
|
||||
ORDER BY volume NULLS LAST,
|
||||
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''),
|
||||
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
|
||||
title ASC
|
||||
"#,
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(series_name)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let local_books_with_pos: Vec<(Uuid, i32, String)> = local_books
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, (id, vol, title))| (*id, vol.unwrap_or((idx + 1) as i32), title.clone()))
|
||||
.collect();
|
||||
|
||||
let mut matched_local_ids = std::collections::HashSet::new();
|
||||
|
||||
for (ext_idx, book) in books.iter().enumerate() {
|
||||
let ext_vol = book.volume_number.unwrap_or((ext_idx + 1) as i32);
|
||||
|
||||
// Match by volume number
|
||||
let mut local_book_id: Option<Uuid> = local_books_with_pos
|
||||
.iter()
|
||||
.find(|(id, v, _)| *v == ext_vol && !matched_local_ids.contains(id))
|
||||
.map(|(id, _, _)| *id);
|
||||
|
||||
// Match by title containment
|
||||
if local_book_id.is_none() {
|
||||
let ext_title_lower = book.title.to_lowercase();
|
||||
local_book_id = local_books_with_pos
|
||||
.iter()
|
||||
.find(|(id, _, local_title)| {
|
||||
if matched_local_ids.contains(id) {
|
||||
return false;
|
||||
}
|
||||
let local_lower = local_title.to_lowercase();
|
||||
local_lower.contains(&ext_title_lower) || ext_title_lower.contains(&local_lower)
|
||||
})
|
||||
.map(|(id, _, _)| *id);
|
||||
}
|
||||
|
||||
if let Some(id) = local_book_id {
|
||||
matched_local_ids.insert(id);
|
||||
}
|
||||
|
||||
// Insert external_book_metadata
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO external_book_metadata
|
||||
(link_id, book_id, external_book_id, volume_number, title, authors, isbn, summary, cover_url, page_count, language, publish_date, metadata_json)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
||||
"#,
|
||||
)
|
||||
.bind(link_id)
|
||||
.bind(local_book_id)
|
||||
.bind(&book.external_book_id)
|
||||
.bind(book.volume_number)
|
||||
.bind(&book.title)
|
||||
.bind(&book.authors)
|
||||
.bind(&book.isbn)
|
||||
.bind(&book.summary)
|
||||
.bind(&book.cover_url)
|
||||
.bind(book.page_count)
|
||||
.bind(&book.language)
|
||||
.bind(&book.publish_date)
|
||||
.bind(&book.metadata_json)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Diff + push metadata to matched local book
|
||||
if let Some(book_id) = local_book_id {
|
||||
let diffs = sync_book_with_diff(pool, book_id, book).await?;
|
||||
if !diffs.is_empty() {
|
||||
let local_title = local_books_with_pos
|
||||
.iter()
|
||||
.find(|(id, _, _)| *id == book_id)
|
||||
.map(|(_, _, t)| t.clone())
|
||||
.unwrap_or_default();
|
||||
book_changes.push(BookDiff {
|
||||
book_id: book_id.to_string(),
|
||||
title: local_title,
|
||||
volume: book.volume_number,
|
||||
changes: diffs,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update synced_at on the link
|
||||
sqlx::query("UPDATE external_metadata_links SET synced_at = NOW(), updated_at = NOW() WHERE id = $1")
|
||||
.bind(link_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let has_changes = !series_changes.is_empty() || !book_changes.is_empty();
|
||||
|
||||
Ok(SeriesRefreshResult {
|
||||
series_name: series_name.to_string(),
|
||||
provider: provider_name.to_string(),
|
||||
status: if has_changes { "updated".to_string() } else { "unchanged".to_string() },
|
||||
series_changes,
|
||||
book_changes,
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Diff helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Compare old/new for a nullable string field. Returns Some(FieldDiff) only if value actually changed.
|
||||
fn diff_opt_str(field: &str, old: Option<&str>, new: Option<&str>) -> Option<FieldDiff> {
|
||||
let new_val = new.filter(|s| !s.is_empty());
|
||||
// Only report a change if there is a new non-empty value AND it differs from old
|
||||
match (old, new_val) {
|
||||
(Some(o), Some(n)) if o != n => Some(FieldDiff {
|
||||
field: field.to_string(),
|
||||
old: Some(serde_json::Value::String(o.to_string())),
|
||||
new: Some(serde_json::Value::String(n.to_string())),
|
||||
}),
|
||||
(None, Some(n)) => Some(FieldDiff {
|
||||
field: field.to_string(),
|
||||
old: None,
|
||||
new: Some(serde_json::Value::String(n.to_string())),
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn diff_opt_i32(field: &str, old: Option<i32>, new: Option<i32>) -> Option<FieldDiff> {
|
||||
match (old, new) {
|
||||
(Some(o), Some(n)) if o != n => Some(FieldDiff {
|
||||
field: field.to_string(),
|
||||
old: Some(serde_json::json!(o)),
|
||||
new: Some(serde_json::json!(n)),
|
||||
}),
|
||||
(None, Some(n)) => Some(FieldDiff {
|
||||
field: field.to_string(),
|
||||
old: None,
|
||||
new: Some(serde_json::json!(n)),
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn diff_str_vec(field: &str, old: &[String], new: &[String]) -> Option<FieldDiff> {
|
||||
if new.is_empty() {
|
||||
return None;
|
||||
}
|
||||
if old != new {
|
||||
Some(FieldDiff {
|
||||
field: field.to_string(),
|
||||
old: Some(serde_json::json!(old)),
|
||||
new: Some(serde_json::json!(new)),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Series sync with diff tracking
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async fn sync_series_with_diff(
|
||||
pool: &PgPool,
|
||||
library_id: Uuid,
|
||||
series_name: &str,
|
||||
candidate: &metadata_providers::SeriesCandidate,
|
||||
) -> Result<Vec<FieldDiff>, String> {
|
||||
let new_description = candidate.metadata_json
|
||||
.get("description")
|
||||
.and_then(|d| d.as_str())
|
||||
.or(candidate.description.as_deref());
|
||||
let new_authors = &candidate.authors;
|
||||
let new_publishers = &candidate.publishers;
|
||||
let new_start_year = candidate.start_year;
|
||||
let new_total_volumes = candidate.total_volumes;
|
||||
let new_status = candidate.metadata_json
|
||||
.get("status")
|
||||
.and_then(|s| s.as_str());
|
||||
|
||||
// Fetch existing series metadata for diffing
|
||||
let existing = sqlx::query(
|
||||
r#"SELECT description, publishers, start_year, total_volumes, status, authors, locked_fields
|
||||
FROM series_metadata WHERE library_id = $1 AND name = $2"#,
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(series_name)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let locked = existing
|
||||
.as_ref()
|
||||
.map(|r| r.get::<serde_json::Value, _>("locked_fields"))
|
||||
.unwrap_or(serde_json::json!({}));
|
||||
let is_locked = |field: &str| -> bool {
|
||||
locked.get(field).and_then(|v| v.as_bool()).unwrap_or(false)
|
||||
};
|
||||
|
||||
// Build diffs (only for unlocked fields that actually change)
|
||||
let mut diffs: Vec<FieldDiff> = Vec::new();
|
||||
|
||||
if !is_locked("description") {
|
||||
let old_desc: Option<String> = existing.as_ref().and_then(|r| r.get("description"));
|
||||
if let Some(d) = diff_opt_str("description", old_desc.as_deref(), new_description) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("authors") {
|
||||
let old_authors: Vec<String> = existing.as_ref().map(|r| r.get("authors")).unwrap_or_default();
|
||||
if let Some(d) = diff_str_vec("authors", &old_authors, new_authors) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("publishers") {
|
||||
let old_publishers: Vec<String> = existing.as_ref().map(|r| r.get("publishers")).unwrap_or_default();
|
||||
if let Some(d) = diff_str_vec("publishers", &old_publishers, new_publishers) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("start_year") {
|
||||
let old_year: Option<i32> = existing.as_ref().and_then(|r| r.get("start_year"));
|
||||
if let Some(d) = diff_opt_i32("start_year", old_year, new_start_year) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("total_volumes") {
|
||||
let old_vols: Option<i32> = existing.as_ref().and_then(|r| r.get("total_volumes"));
|
||||
if let Some(d) = diff_opt_i32("total_volumes", old_vols, new_total_volumes) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("status") {
|
||||
let old_status: Option<String> = existing.as_ref().and_then(|r| r.get("status"));
|
||||
if let Some(d) = diff_opt_str("status", old_status.as_deref(), new_status) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
|
||||
// Now do the actual upsert
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO series_metadata (library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
|
||||
ON CONFLICT (library_id, name)
|
||||
DO UPDATE SET
|
||||
description = CASE
|
||||
WHEN (series_metadata.locked_fields->>'description')::boolean IS TRUE THEN series_metadata.description
|
||||
ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series_metadata.description)
|
||||
END,
|
||||
publishers = CASE
|
||||
WHEN (series_metadata.locked_fields->>'publishers')::boolean IS TRUE THEN series_metadata.publishers
|
||||
WHEN array_length(EXCLUDED.publishers, 1) > 0 THEN EXCLUDED.publishers
|
||||
ELSE series_metadata.publishers
|
||||
END,
|
||||
start_year = CASE
|
||||
WHEN (series_metadata.locked_fields->>'start_year')::boolean IS TRUE THEN series_metadata.start_year
|
||||
ELSE COALESCE(EXCLUDED.start_year, series_metadata.start_year)
|
||||
END,
|
||||
total_volumes = CASE
|
||||
WHEN (series_metadata.locked_fields->>'total_volumes')::boolean IS TRUE THEN series_metadata.total_volumes
|
||||
ELSE COALESCE(EXCLUDED.total_volumes, series_metadata.total_volumes)
|
||||
END,
|
||||
status = CASE
|
||||
WHEN (series_metadata.locked_fields->>'status')::boolean IS TRUE THEN series_metadata.status
|
||||
ELSE COALESCE(EXCLUDED.status, series_metadata.status)
|
||||
END,
|
||||
authors = CASE
|
||||
WHEN (series_metadata.locked_fields->>'authors')::boolean IS TRUE THEN series_metadata.authors
|
||||
WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors
|
||||
ELSE series_metadata.authors
|
||||
END,
|
||||
updated_at = NOW()
|
||||
"#,
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(series_name)
|
||||
.bind(new_description)
|
||||
.bind(new_publishers)
|
||||
.bind(new_start_year)
|
||||
.bind(new_total_volumes)
|
||||
.bind(new_status)
|
||||
.bind(new_authors)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(diffs)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Book sync with diff tracking
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async fn sync_book_with_diff(
|
||||
pool: &PgPool,
|
||||
book_id: Uuid,
|
||||
ext_book: &metadata_providers::BookCandidate,
|
||||
) -> Result<Vec<FieldDiff>, String> {
|
||||
// Fetch current book state
|
||||
let current = sqlx::query(
|
||||
"SELECT summary, isbn, publish_date, language, authors, locked_fields FROM books WHERE id = $1",
|
||||
)
|
||||
.bind(book_id)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let locked = current.get::<serde_json::Value, _>("locked_fields");
|
||||
let is_locked = |field: &str| -> bool {
|
||||
locked.get(field).and_then(|v| v.as_bool()).unwrap_or(false)
|
||||
};
|
||||
|
||||
// Build diffs
|
||||
let mut diffs: Vec<FieldDiff> = Vec::new();
|
||||
|
||||
if !is_locked("summary") {
|
||||
let old: Option<String> = current.get("summary");
|
||||
if let Some(d) = diff_opt_str("summary", old.as_deref(), ext_book.summary.as_deref()) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("isbn") {
|
||||
let old: Option<String> = current.get("isbn");
|
||||
if let Some(d) = diff_opt_str("isbn", old.as_deref(), ext_book.isbn.as_deref()) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("publish_date") {
|
||||
let old: Option<String> = current.get("publish_date");
|
||||
if let Some(d) = diff_opt_str("publish_date", old.as_deref(), ext_book.publish_date.as_deref()) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("language") {
|
||||
let old: Option<String> = current.get("language");
|
||||
if let Some(d) = diff_opt_str("language", old.as_deref(), ext_book.language.as_deref()) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("authors") {
|
||||
let old: Vec<String> = current.get("authors");
|
||||
if let Some(d) = diff_str_vec("authors", &old, &ext_book.authors) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
|
||||
// Do the actual update
|
||||
sqlx::query(
|
||||
r#"
|
||||
UPDATE books SET
|
||||
summary = CASE
|
||||
WHEN (locked_fields->>'summary')::boolean IS TRUE THEN summary
|
||||
ELSE COALESCE(NULLIF($2, ''), summary)
|
||||
END,
|
||||
isbn = CASE
|
||||
WHEN (locked_fields->>'isbn')::boolean IS TRUE THEN isbn
|
||||
ELSE COALESCE(NULLIF($3, ''), isbn)
|
||||
END,
|
||||
publish_date = CASE
|
||||
WHEN (locked_fields->>'publish_date')::boolean IS TRUE THEN publish_date
|
||||
ELSE COALESCE(NULLIF($4, ''), publish_date)
|
||||
END,
|
||||
language = CASE
|
||||
WHEN (locked_fields->>'language')::boolean IS TRUE THEN language
|
||||
ELSE COALESCE(NULLIF($5, ''), language)
|
||||
END,
|
||||
authors = CASE
|
||||
WHEN (locked_fields->>'authors')::boolean IS TRUE THEN authors
|
||||
WHEN CARDINALITY($6::text[]) > 0 THEN $6
|
||||
ELSE authors
|
||||
END,
|
||||
author = CASE
|
||||
WHEN (locked_fields->>'authors')::boolean IS TRUE THEN author
|
||||
WHEN CARDINALITY($6::text[]) > 0 THEN $6[1]
|
||||
ELSE author
|
||||
END,
|
||||
updated_at = NOW()
|
||||
WHERE id = $1
|
||||
"#,
|
||||
)
|
||||
.bind(book_id)
|
||||
.bind(&ext_book.summary)
|
||||
.bind(&ext_book.isbn)
|
||||
.bind(&ext_book.publish_date)
|
||||
.bind(&ext_book.language)
|
||||
.bind(&ext_book.authors)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(diffs)
|
||||
}
|
||||
16
apps/backoffice/app/api/metadata/refresh/report/route.ts
Normal file
16
apps/backoffice/app/api/metadata/refresh/report/route.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { apiFetch } from "@/lib/api";
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const jobId = request.nextUrl.searchParams.get("job_id");
|
||||
if (!jobId) {
|
||||
return NextResponse.json({ error: "job_id required" }, { status: 400 });
|
||||
}
|
||||
const data = await apiFetch(`/metadata/refresh/${jobId}/report`);
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Failed to get report";
|
||||
return NextResponse.json({ error: message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
16
apps/backoffice/app/api/metadata/refresh/route.ts
Normal file
16
apps/backoffice/app/api/metadata/refresh/route.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { apiFetch } from "@/lib/api";
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json();
|
||||
const data = await apiFetch<{ id: string; status: string }>("/metadata/refresh", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Failed to start refresh";
|
||||
return NextResponse.json({ error: message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import { useState } from "react";
|
||||
import Link from "next/link";
|
||||
import { useTranslation } from "../../lib/i18n/context";
|
||||
import { JobProgress } from "./JobProgress";
|
||||
import { StatusBadge, JobTypeBadge, Button, MiniProgressBar } from "./ui";
|
||||
import { StatusBadge, JobTypeBadge, Button, MiniProgressBar, Icon } from "./ui";
|
||||
|
||||
interface JobRowProps {
|
||||
job: {
|
||||
@@ -59,28 +59,11 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
|
||||
const isThumbnailJob = job.type === "thumbnail_rebuild" || job.type === "thumbnail_regenerate";
|
||||
const hasThumbnailPhase = isPhase2 || isThumbnailJob;
|
||||
|
||||
// Files column: index-phase stats only (Phase 1 discovery)
|
||||
const filesDisplay =
|
||||
job.status === "running" && !isPhase2
|
||||
? job.total_files != null
|
||||
? `${job.processed_files ?? 0}/${job.total_files}`
|
||||
: scanned > 0
|
||||
? t("jobRow.scanned", { count: scanned })
|
||||
: "-"
|
||||
: job.status === "success" && (indexed > 0 || removed > 0 || errors > 0)
|
||||
? null // rendered below as ✓ / − / ⚠
|
||||
: scanned > 0
|
||||
? t("jobRow.scanned", { count: scanned })
|
||||
: "—";
|
||||
const isMetadataBatch = job.type === "metadata_batch";
|
||||
const isMetadataRefresh = job.type === "metadata_refresh";
|
||||
|
||||
// Thumbnails column (Phase 2: extracting_pages + generating_thumbnails)
|
||||
// Thumbnails progress (Phase 2: extracting_pages + generating_thumbnails)
|
||||
const thumbInProgress = hasThumbnailPhase && (job.status === "running" || isPhase2);
|
||||
const thumbDisplay =
|
||||
thumbInProgress && job.total_files != null
|
||||
? `${job.processed_files ?? 0}/${job.total_files}`
|
||||
: job.status === "success" && job.total_files != null && hasThumbnailPhase
|
||||
? `✓ ${job.total_files}`
|
||||
: "—";
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -122,25 +105,67 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
<div className="flex flex-col gap-1">
|
||||
{filesDisplay !== null ? (
|
||||
<span className="text-sm text-foreground">{filesDisplay}</span>
|
||||
) : (
|
||||
<div className="flex items-center gap-2 text-xs">
|
||||
<span className="text-success">✓ {indexed}</span>
|
||||
{removed > 0 && <span className="text-warning">− {removed}</span>}
|
||||
{errors > 0 && <span className="text-error">⚠ {errors}</span>}
|
||||
{/* Running progress */}
|
||||
{isActive && job.total_files != null && (
|
||||
<div className="flex flex-col gap-1">
|
||||
<span className="text-sm text-foreground">{job.processed_files ?? 0}/{job.total_files}</span>
|
||||
<MiniProgressBar value={job.processed_files ?? 0} max={job.total_files} className="w-24" />
|
||||
</div>
|
||||
)}
|
||||
{job.status === "running" && !isPhase2 && job.total_files != null && (
|
||||
<MiniProgressBar value={job.processed_files ?? 0} max={job.total_files} className="w-24" />
|
||||
)}
|
||||
</div>
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
<div className="flex flex-col gap-1">
|
||||
<span className="text-sm text-foreground">{thumbDisplay}</span>
|
||||
{thumbInProgress && job.total_files != null && (
|
||||
<MiniProgressBar value={job.processed_files ?? 0} max={job.total_files} className="w-24" />
|
||||
{/* Completed stats with icons */}
|
||||
{!isActive && (
|
||||
<div className="flex items-center gap-3 text-xs">
|
||||
{/* Files: indexed count */}
|
||||
{indexed > 0 && (
|
||||
<span className="inline-flex items-center gap-1 text-success" title={t("jobRow.filesIndexed", { count: indexed })}>
|
||||
<Icon name="document" size="sm" />
|
||||
{indexed}
|
||||
</span>
|
||||
)}
|
||||
{/* Removed files */}
|
||||
{removed > 0 && (
|
||||
<span className="inline-flex items-center gap-1 text-warning" title={t("jobRow.filesRemoved", { count: removed })}>
|
||||
<Icon name="trash" size="sm" />
|
||||
{removed}
|
||||
</span>
|
||||
)}
|
||||
{/* Thumbnails */}
|
||||
{hasThumbnailPhase && job.total_files != null && job.total_files > 0 && (
|
||||
<span className="inline-flex items-center gap-1 text-primary" title={t("jobRow.thumbnailsGenerated", { count: job.total_files })}>
|
||||
<Icon name="image" size="sm" />
|
||||
{job.total_files}
|
||||
</span>
|
||||
)}
|
||||
{/* Metadata batch: series processed */}
|
||||
{isMetadataBatch && job.total_files != null && job.total_files > 0 && (
|
||||
<span className="inline-flex items-center gap-1 text-info" title={t("jobRow.metadataProcessed", { count: job.total_files })}>
|
||||
<Icon name="tag" size="sm" />
|
||||
{job.total_files}
|
||||
</span>
|
||||
)}
|
||||
{/* Metadata refresh: links refreshed */}
|
||||
{isMetadataRefresh && job.total_files != null && job.total_files > 0 && (
|
||||
<span className="inline-flex items-center gap-1 text-info" title={t("jobRow.metadataRefreshed", { count: job.total_files })}>
|
||||
<Icon name="tag" size="sm" />
|
||||
{job.total_files}
|
||||
</span>
|
||||
)}
|
||||
{/* Errors */}
|
||||
{errors > 0 && (
|
||||
<span className="inline-flex items-center gap-1 text-error" title={t("jobRow.errors", { count: errors })}>
|
||||
<Icon name="warning" size="sm" />
|
||||
{errors}
|
||||
</span>
|
||||
)}
|
||||
{/* Scanned only (no other stats) */}
|
||||
{indexed === 0 && removed === 0 && errors === 0 && !hasThumbnailPhase && !isMetadataBatch && !isMetadataRefresh && scanned > 0 && (
|
||||
<span className="text-sm text-muted-foreground">{t("jobRow.scanned", { count: scanned })}</span>
|
||||
)}
|
||||
{/* Nothing to show */}
|
||||
{indexed === 0 && removed === 0 && errors === 0 && scanned === 0 && !hasThumbnailPhase && !isMetadataBatch && !isMetadataRefresh && (
|
||||
<span className="text-sm text-muted-foreground">—</span>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</td>
|
||||
@@ -172,7 +197,7 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
|
||||
</tr>
|
||||
{showProgress && isActive && (
|
||||
<tr>
|
||||
<td colSpan={9} className="px-4 py-3 bg-muted/50">
|
||||
<td colSpan={8} className="px-4 py-3 bg-muted/50">
|
||||
<JobProgress
|
||||
jobId={job.id}
|
||||
onComplete={handleComplete}
|
||||
|
||||
@@ -117,8 +117,7 @@ export function JobsList({ initialJobs, libraries, highlightJobId }: JobsListPro
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.library")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.type")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.status")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.files")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.thumbnails")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.stats")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.duration")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.created")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.actions")}</th>
|
||||
|
||||
@@ -114,6 +114,7 @@ export function JobTypeBadge({ type, className = "" }: JobTypeBadgeProps) {
|
||||
thumbnail_regenerate: t("jobType.thumbnail_regenerate"),
|
||||
cbr_to_cbz: t("jobType.cbr_to_cbz"),
|
||||
metadata_batch: t("jobType.metadata_batch"),
|
||||
metadata_refresh: t("jobType.metadata_refresh"),
|
||||
};
|
||||
const label = jobTypeLabels[key] ?? type;
|
||||
return <Badge variant={variant} className={className}>{label}</Badge>;
|
||||
|
||||
@@ -31,7 +31,9 @@ type IconName =
|
||||
| "play"
|
||||
| "stop"
|
||||
| "spinner"
|
||||
| "warning";
|
||||
| "warning"
|
||||
| "tag"
|
||||
| "document";
|
||||
|
||||
type IconSize = "sm" | "md" | "lg" | "xl";
|
||||
|
||||
@@ -82,6 +84,8 @@ const icons: Record<IconName, string> = {
|
||||
stop: "M21 12a9 9 0 11-18 0 9 9 0 0118 0z M9 10a1 1 0 011-1h4a1 1 0 011 1v4a1 1 0 01-1 1h-4a1 1 0 01-1-1v-4z",
|
||||
spinner: "M4 4v5h.582m15.582 0A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15",
|
||||
warning: "M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z",
|
||||
tag: "M7 7h.01M7 3h5a1.99 1.99 0 011.414.586l7 7a2 2 0 010 2.828l-7 7a2 2 0 01-2.828 0l-7-7A1.994 1.994 0 013 12V7a4 4 0 014-4z",
|
||||
document: "M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z",
|
||||
};
|
||||
|
||||
const colorClasses: Partial<Record<IconName, string>> = {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { notFound } from "next/navigation";
|
||||
import Link from "next/link";
|
||||
import { apiFetch, getMetadataBatchReport, getMetadataBatchResults, MetadataBatchReportDto, MetadataBatchResultDto } from "../../../lib/api";
|
||||
import { apiFetch, getMetadataBatchReport, getMetadataBatchResults, getMetadataRefreshReport, MetadataBatchReportDto, MetadataBatchResultDto, MetadataRefreshReportDto } from "../../../lib/api";
|
||||
import {
|
||||
Card, CardHeader, CardTitle, CardDescription, CardContent,
|
||||
StatusBadge, JobTypeBadge, StatBox, ProgressBar
|
||||
@@ -119,9 +119,15 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
description: t("jobType.metadata_batchDesc"),
|
||||
isThumbnailOnly: false,
|
||||
},
|
||||
metadata_refresh: {
|
||||
label: t("jobType.metadata_refreshLabel"),
|
||||
description: t("jobType.metadata_refreshDesc"),
|
||||
isThumbnailOnly: false,
|
||||
},
|
||||
};
|
||||
|
||||
const isMetadataBatch = job.type === "metadata_batch";
|
||||
const isMetadataRefresh = job.type === "metadata_refresh";
|
||||
|
||||
// Fetch batch report & results for metadata_batch jobs
|
||||
let batchReport: MetadataBatchReportDto | null = null;
|
||||
@@ -133,6 +139,12 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
]);
|
||||
}
|
||||
|
||||
// Fetch refresh report for metadata_refresh jobs
|
||||
let refreshReport: MetadataRefreshReportDto | null = null;
|
||||
if (isMetadataRefresh) {
|
||||
refreshReport = await getMetadataRefreshReport(id).catch(() => null);
|
||||
}
|
||||
|
||||
const typeInfo = JOB_TYPE_INFO[job.type] ?? {
|
||||
label: job.type,
|
||||
description: null,
|
||||
@@ -154,6 +166,8 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
// Which label to use for the progress card
|
||||
const progressTitle = isMetadataBatch
|
||||
? t("jobDetail.metadataSearch")
|
||||
: isMetadataRefresh
|
||||
? t("jobDetail.metadataRefresh")
|
||||
: isThumbnailOnly
|
||||
? t("jobType.thumbnail_rebuild")
|
||||
: isExtractingPages
|
||||
@@ -164,6 +178,8 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
|
||||
const progressDescription = isMetadataBatch
|
||||
? t("jobDetail.metadataSearchDesc")
|
||||
: isMetadataRefresh
|
||||
? t("jobDetail.metadataRefreshDesc")
|
||||
: isThumbnailOnly
|
||||
? undefined
|
||||
: isExtractingPages
|
||||
@@ -209,7 +225,12 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
— {batchReport.auto_matched} {t("jobDetail.autoMatched").toLowerCase()}, {batchReport.already_linked} {t("jobDetail.alreadyLinked").toLowerCase()}, {batchReport.no_results} {t("jobDetail.noResults").toLowerCase()}, {batchReport.errors} {t("jobDetail.errors").toLowerCase()}
|
||||
</span>
|
||||
)}
|
||||
{!isMetadataBatch && job.stats_json && (
|
||||
{isMetadataRefresh && refreshReport && (
|
||||
<span className="ml-2 text-success/80">
|
||||
— {refreshReport.refreshed} {t("jobDetail.refreshed").toLowerCase()}, {refreshReport.unchanged} {t("jobDetail.unchanged").toLowerCase()}, {refreshReport.errors} {t("jobDetail.errors").toLowerCase()}
|
||||
</span>
|
||||
)}
|
||||
{!isMetadataBatch && !isMetadataRefresh && job.stats_json && (
|
||||
<span className="ml-2 text-success/80">
|
||||
— {job.stats_json.scanned_files} {t("jobDetail.scanned").toLowerCase()}, {job.stats_json.indexed_files} {t("jobDetail.indexed").toLowerCase()}
|
||||
{job.stats_json.removed_files > 0 && `, ${job.stats_json.removed_files} ${t("jobDetail.removed").toLowerCase()}`}
|
||||
@@ -218,7 +239,7 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
{job.total_files != null && job.total_files > 0 && `, ${job.total_files} ${t("jobType.thumbnail_rebuild").toLowerCase()}`}
|
||||
</span>
|
||||
)}
|
||||
{!isMetadataBatch && !job.stats_json && isThumbnailOnly && job.total_files != null && (
|
||||
{!isMetadataBatch && !isMetadataRefresh && !job.stats_json && isThumbnailOnly && job.total_files != null && (
|
||||
<span className="ml-2 text-success/80">
|
||||
— {job.processed_files ?? job.total_files} {t("jobDetail.generated").toLowerCase()}
|
||||
</span>
|
||||
@@ -483,7 +504,7 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
)}
|
||||
|
||||
{/* Index Statistics — index jobs only */}
|
||||
{job.stats_json && !isThumbnailOnly && !isMetadataBatch && (
|
||||
{job.stats_json && !isThumbnailOnly && !isMetadataBatch && !isMetadataRefresh && (
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>{t("jobDetail.indexStats")}</CardTitle>
|
||||
@@ -547,6 +568,132 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Metadata refresh report */}
|
||||
{isMetadataRefresh && refreshReport && (
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>{t("jobDetail.refreshReport")}</CardTitle>
|
||||
<CardDescription>{t("jobDetail.refreshReportDesc", { count: String(refreshReport.total_links) })}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="grid grid-cols-2 sm:grid-cols-4 gap-4">
|
||||
<StatBox value={refreshReport.refreshed} label={t("jobDetail.refreshed")} variant="success" />
|
||||
<StatBox value={refreshReport.unchanged} label={t("jobDetail.unchanged")} />
|
||||
<StatBox value={refreshReport.errors} label={t("jobDetail.errors")} variant={refreshReport.errors > 0 ? "error" : "default"} />
|
||||
<StatBox value={refreshReport.total_links} label={t("jobDetail.total")} />
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Metadata refresh changes detail */}
|
||||
{isMetadataRefresh && refreshReport && refreshReport.changes.length > 0 && (
|
||||
<Card className="lg:col-span-2">
|
||||
<CardHeader>
|
||||
<CardTitle>{t("jobDetail.refreshChanges")}</CardTitle>
|
||||
<CardDescription>{t("jobDetail.refreshChangesDesc", { count: String(refreshReport.changes.length) })}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-3 max-h-[600px] overflow-y-auto">
|
||||
{refreshReport.changes.map((r, idx) => (
|
||||
<div
|
||||
key={idx}
|
||||
className={`p-3 rounded-lg border ${
|
||||
r.status === "updated" ? "bg-success/10 border-success/20" :
|
||||
r.status === "error" ? "bg-destructive/10 border-destructive/20" :
|
||||
"bg-muted/50 border-border/60"
|
||||
}`}
|
||||
>
|
||||
<div className="flex items-center justify-between gap-2">
|
||||
{job.library_id ? (
|
||||
<Link
|
||||
href={`/libraries/${job.library_id}/series/${encodeURIComponent(r.series_name)}`}
|
||||
className="font-medium text-sm text-primary hover:underline truncate"
|
||||
>
|
||||
{r.series_name}
|
||||
</Link>
|
||||
) : (
|
||||
<span className="font-medium text-sm text-foreground truncate">{r.series_name}</span>
|
||||
)}
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-[10px] text-muted-foreground">{r.provider}</span>
|
||||
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap ${
|
||||
r.status === "updated" ? "bg-success/20 text-success" :
|
||||
r.status === "error" ? "bg-destructive/20 text-destructive" :
|
||||
"bg-muted text-muted-foreground"
|
||||
}`}>
|
||||
{r.status === "updated" ? t("jobDetail.refreshed") :
|
||||
r.status === "error" ? t("common.error") :
|
||||
t("jobDetail.unchanged")}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{r.error && (
|
||||
<p className="text-xs text-destructive/80 mt-1">{r.error}</p>
|
||||
)}
|
||||
|
||||
{/* Series field changes */}
|
||||
{r.series_changes.length > 0 && (
|
||||
<div className="mt-2">
|
||||
<span className="text-[10px] uppercase tracking-wide text-muted-foreground font-semibold">{t("metadata.seriesLabel")}</span>
|
||||
<div className="mt-1 space-y-1">
|
||||
{r.series_changes.map((c, ci) => (
|
||||
<div key={ci} className="flex items-start gap-2 text-xs">
|
||||
<span className="font-medium text-foreground shrink-0 w-24">{t(`field.${c.field}` as never) || c.field}</span>
|
||||
<span className="text-muted-foreground line-through truncate max-w-[200px]" title={String(c.old ?? "—")}>
|
||||
{c.old != null ? (Array.isArray(c.old) ? (c.old as string[]).join(", ") : String(c.old)) : "—"}
|
||||
</span>
|
||||
<span className="text-success shrink-0">→</span>
|
||||
<span className="text-success truncate max-w-[200px]" title={String(c.new ?? "—")}>
|
||||
{c.new != null ? (Array.isArray(c.new) ? (c.new as string[]).join(", ") : String(c.new)) : "—"}
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Book field changes */}
|
||||
{r.book_changes.length > 0 && (
|
||||
<div className="mt-2">
|
||||
<span className="text-[10px] uppercase tracking-wide text-muted-foreground font-semibold">
|
||||
{t("metadata.booksLabel")} ({r.book_changes.length})
|
||||
</span>
|
||||
<div className="mt-1 space-y-2">
|
||||
{r.book_changes.map((b, bi) => (
|
||||
<div key={bi} className="pl-2 border-l-2 border-border/60">
|
||||
<Link
|
||||
href={`/books/${b.book_id}`}
|
||||
className="text-xs text-primary hover:underline font-medium"
|
||||
>
|
||||
{b.volume != null && <span className="text-muted-foreground mr-1">T.{b.volume}</span>}
|
||||
{b.title}
|
||||
</Link>
|
||||
<div className="mt-0.5 space-y-0.5">
|
||||
{b.changes.map((c, ci) => (
|
||||
<div key={ci} className="flex items-start gap-2 text-xs">
|
||||
<span className="font-medium text-foreground shrink-0 w-24">{t(`field.${c.field}` as never) || c.field}</span>
|
||||
<span className="text-muted-foreground line-through truncate max-w-[150px]" title={String(c.old ?? "—")}>
|
||||
{c.old != null ? (Array.isArray(c.old) ? (c.old as string[]).join(", ") : String(c.old).substring(0, 60)) : "—"}
|
||||
</span>
|
||||
<span className="text-success shrink-0">→</span>
|
||||
<span className="text-success truncate max-w-[150px]" title={String(c.new ?? "—")}>
|
||||
{c.new != null ? (Array.isArray(c.new) ? (c.new as string[]).join(", ") : String(c.new).substring(0, 60)) : "—"}
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Metadata batch results */}
|
||||
{isMetadataBatch && batchResults.length > 0 && (
|
||||
<Card className="lg:col-span-2">
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { revalidatePath } from "next/cache";
|
||||
import { redirect } from "next/navigation";
|
||||
import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, IndexJobDto, LibraryDto } from "../../lib/api";
|
||||
import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, startMetadataRefresh, IndexJobDto, LibraryDto } from "../../lib/api";
|
||||
import { JobsList } from "../components/JobsList";
|
||||
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormSelect, FormRow } from "../components/ui";
|
||||
import { getServerTranslations } from "../../lib/i18n/server";
|
||||
@@ -58,6 +58,15 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
||||
redirect(`/jobs?highlight=${result.id}`);
|
||||
}
|
||||
|
||||
async function triggerMetadataRefresh(formData: FormData) {
|
||||
"use server";
|
||||
const libraryId = formData.get("library_id") as string;
|
||||
if (!libraryId) return;
|
||||
const result = await startMetadataRefresh(libraryId);
|
||||
revalidatePath("/jobs");
|
||||
redirect(`/jobs?highlight=${result.id}`);
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="mb-6">
|
||||
@@ -116,6 +125,12 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
||||
</svg>
|
||||
{t("jobs.batchMetadata")}
|
||||
</Button>
|
||||
<Button type="submit" formAction={triggerMetadataRefresh} variant="secondary">
|
||||
<svg className="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||
</svg>
|
||||
{t("jobs.refreshMetadata")}
|
||||
</Button>
|
||||
</div>
|
||||
</FormRow>
|
||||
</form>
|
||||
@@ -184,6 +199,17 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
||||
<p className="text-muted-foreground text-xs mt-0.5" dangerouslySetInnerHTML={{ __html: t("jobs.batchMetadataDescription") }} />
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex gap-3">
|
||||
<div className="shrink-0 mt-0.5">
|
||||
<svg className="w-5 h-5 text-muted-foreground" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||
</svg>
|
||||
</div>
|
||||
<div>
|
||||
<span className="font-medium text-foreground">{t("jobs.refreshMetadata")}</span>
|
||||
<p className="text-muted-foreground text-xs mt-0.5" dangerouslySetInnerHTML={{ __html: t("jobs.refreshMetadataDescription") }} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
@@ -803,6 +803,49 @@ export async function startMetadataBatch(libraryId: string) {
|
||||
});
|
||||
}
|
||||
|
||||
export async function startMetadataRefresh(libraryId: string) {
|
||||
return apiFetch<{ id: string; status: string }>("/metadata/refresh", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ library_id: libraryId }),
|
||||
});
|
||||
}
|
||||
|
||||
export type RefreshFieldDiff = {
|
||||
field: string;
|
||||
old?: unknown;
|
||||
new?: unknown;
|
||||
};
|
||||
|
||||
export type RefreshBookDiff = {
|
||||
book_id: string;
|
||||
title: string;
|
||||
volume: number | null;
|
||||
changes: RefreshFieldDiff[];
|
||||
};
|
||||
|
||||
export type RefreshSeriesResult = {
|
||||
series_name: string;
|
||||
provider: string;
|
||||
status: string; // "updated" | "unchanged" | "error"
|
||||
series_changes: RefreshFieldDiff[];
|
||||
book_changes: RefreshBookDiff[];
|
||||
error?: string;
|
||||
};
|
||||
|
||||
export type MetadataRefreshReportDto = {
|
||||
job_id: string;
|
||||
status: string;
|
||||
total_links: number;
|
||||
refreshed: number;
|
||||
unchanged: number;
|
||||
errors: number;
|
||||
changes: RefreshSeriesResult[];
|
||||
};
|
||||
|
||||
export async function getMetadataRefreshReport(jobId: string) {
|
||||
return apiFetch<MetadataRefreshReportDto>(`/metadata/refresh/${jobId}/report`);
|
||||
}
|
||||
|
||||
export async function getMetadataBatchReport(jobId: string) {
|
||||
return apiFetch<MetadataBatchReportDto>(`/metadata/batch/${jobId}/report`);
|
||||
}
|
||||
|
||||
@@ -173,6 +173,8 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobs.generateThumbnails": "Generate thumbnails",
|
||||
"jobs.regenerateThumbnails": "Regenerate thumbnails",
|
||||
"jobs.batchMetadata": "Batch metadata",
|
||||
"jobs.refreshMetadata": "Refresh metadata",
|
||||
"jobs.refreshMetadataDescription": "Refreshes metadata for all series already linked to an external provider. Re-downloads information from the provider and updates series and books in the database (respecting locked fields). Series without an approved link are ignored. <strong>Requires a specific library</strong> (does not work on \"All libraries\").",
|
||||
"jobs.referenceTitle": "Job types reference",
|
||||
"jobs.rebuildDescription": "Incremental scan: detects files added, modified, or deleted since the last scan, indexes them, and generates missing thumbnails. Existing unmodified data is preserved. This is the most common and fastest action.",
|
||||
"jobs.fullRebuildDescription": "Deletes all indexed data (books, series, thumbnails) then performs a full scan from scratch. Useful if the database is out of sync or corrupted. Long and destructive operation: reading statuses and manual metadata will be lost.",
|
||||
@@ -185,8 +187,7 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobsList.library": "Library",
|
||||
"jobsList.type": "Type",
|
||||
"jobsList.status": "Status",
|
||||
"jobsList.files": "Files",
|
||||
"jobsList.thumbnails": "Thumbnails",
|
||||
"jobsList.stats": "Stats",
|
||||
"jobsList.duration": "Duration",
|
||||
"jobsList.created": "Created",
|
||||
"jobsList.actions": "Actions",
|
||||
@@ -195,6 +196,12 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobRow.showProgress": "Show progress",
|
||||
"jobRow.hideProgress": "Hide progress",
|
||||
"jobRow.scanned": "{{count}} scanned",
|
||||
"jobRow.filesIndexed": "{{count}} files indexed",
|
||||
"jobRow.filesRemoved": "{{count}} files removed",
|
||||
"jobRow.thumbnailsGenerated": "{{count}} thumbnails generated",
|
||||
"jobRow.metadataProcessed": "{{count}} series processed",
|
||||
"jobRow.metadataRefreshed": "{{count}} series refreshed",
|
||||
"jobRow.errors": "{{count}} errors",
|
||||
"jobRow.view": "View",
|
||||
|
||||
// Job progress
|
||||
@@ -234,6 +241,14 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobDetail.phase2b": "Phase 2b — Thumbnail generation",
|
||||
"jobDetail.metadataSearch": "Metadata search",
|
||||
"jobDetail.metadataSearchDesc": "Searching external providers for each series",
|
||||
"jobDetail.metadataRefresh": "Metadata refresh",
|
||||
"jobDetail.metadataRefreshDesc": "Re-downloading metadata from providers for already linked series",
|
||||
"jobDetail.refreshReport": "Refresh report",
|
||||
"jobDetail.refreshReportDesc": "{{count}} linked series processed",
|
||||
"jobDetail.refreshed": "Refreshed",
|
||||
"jobDetail.unchanged": "Unchanged",
|
||||
"jobDetail.refreshChanges": "Changes detail",
|
||||
"jobDetail.refreshChangesDesc": "{{count}} series with changes",
|
||||
"jobDetail.phase1Desc": "Scanning and indexing library files",
|
||||
"jobDetail.phase2aDesc": "Extracting the first page of each archive (page count + raw image)",
|
||||
"jobDetail.phase2bDesc": "Generating thumbnails for scanned books",
|
||||
@@ -273,6 +288,7 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobType.thumbnail_regenerate": "Regen. thumbnails",
|
||||
"jobType.cbr_to_cbz": "CBR → CBZ",
|
||||
"jobType.metadata_batch": "Batch metadata",
|
||||
"jobType.metadata_refresh": "Refresh meta.",
|
||||
"jobType.rebuildLabel": "Incremental indexing",
|
||||
"jobType.rebuildDesc": "Scans new/modified files, analyzes them, and generates missing thumbnails.",
|
||||
"jobType.full_rebuildLabel": "Full reindexing",
|
||||
@@ -285,6 +301,8 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobType.cbr_to_cbzDesc": "Converts a CBR archive to the open CBZ format.",
|
||||
"jobType.metadata_batchLabel": "Batch metadata",
|
||||
"jobType.metadata_batchDesc": "Searches external metadata providers for all series in the library and automatically applies 100% confidence matches.",
|
||||
"jobType.metadata_refreshLabel": "Metadata refresh",
|
||||
"jobType.metadata_refreshDesc": "Re-downloads and updates metadata for all series already linked to an external provider.",
|
||||
|
||||
// Status badges
|
||||
"statusBadge.extracting_pages": "Extracting pages",
|
||||
|
||||
@@ -171,6 +171,8 @@ const fr = {
|
||||
"jobs.generateThumbnails": "Générer les miniatures",
|
||||
"jobs.regenerateThumbnails": "Regénérer les miniatures",
|
||||
"jobs.batchMetadata": "Métadonnées en lot",
|
||||
"jobs.refreshMetadata": "Rafraîchir métadonnées",
|
||||
"jobs.refreshMetadataDescription": "Rafraîchit les métadonnées de toutes les séries déjà liées à un fournisseur externe. Re-télécharge les informations depuis le fournisseur et met à jour les séries et livres en base (en respectant les champs verrouillés). Les séries sans lien approuvé sont ignorées. <strong>Requiert une bibliothèque spécifique</strong> (ne fonctionne pas sur \u00ab Toutes les bibliothèques \u00bb).",
|
||||
"jobs.referenceTitle": "Référence des types de tâches",
|
||||
"jobs.rebuildDescription": "Scan incrémental : détecte les fichiers ajoutés, modifiés ou supprimés depuis le dernier scan, les indexe et génère les miniatures manquantes. Les données existantes non modifiées sont conservées. C'est l'action la plus courante et la plus rapide.",
|
||||
"jobs.fullRebuildDescription": "Supprime toutes les données indexées (livres, séries, miniatures) puis effectue un scan complet depuis zéro. Utile si la base de données est désynchronisée ou corrompue. Opération longue et destructive : les statuts de lecture et les métadonnées manuelles seront perdus.",
|
||||
@@ -183,8 +185,7 @@ const fr = {
|
||||
"jobsList.library": "Bibliothèque",
|
||||
"jobsList.type": "Type",
|
||||
"jobsList.status": "Statut",
|
||||
"jobsList.files": "Fichiers",
|
||||
"jobsList.thumbnails": "Miniatures",
|
||||
"jobsList.stats": "Stats",
|
||||
"jobsList.duration": "Durée",
|
||||
"jobsList.created": "Créé",
|
||||
"jobsList.actions": "Actions",
|
||||
@@ -193,6 +194,12 @@ const fr = {
|
||||
"jobRow.showProgress": "Afficher la progression",
|
||||
"jobRow.hideProgress": "Masquer la progression",
|
||||
"jobRow.scanned": "{{count}} analysés",
|
||||
"jobRow.filesIndexed": "{{count}} fichiers indexés",
|
||||
"jobRow.filesRemoved": "{{count}} fichiers supprimés",
|
||||
"jobRow.thumbnailsGenerated": "{{count}} miniatures générées",
|
||||
"jobRow.metadataProcessed": "{{count}} séries traitées",
|
||||
"jobRow.metadataRefreshed": "{{count}} séries rafraîchies",
|
||||
"jobRow.errors": "{{count}} erreurs",
|
||||
"jobRow.view": "Voir",
|
||||
|
||||
// Job progress
|
||||
@@ -232,6 +239,14 @@ const fr = {
|
||||
"jobDetail.phase2b": "Phase 2b — Génération des miniatures",
|
||||
"jobDetail.metadataSearch": "Recherche de métadonnées",
|
||||
"jobDetail.metadataSearchDesc": "Recherche auprès des fournisseurs externes pour chaque série",
|
||||
"jobDetail.metadataRefresh": "Rafraîchissement des métadonnées",
|
||||
"jobDetail.metadataRefreshDesc": "Re-téléchargement des métadonnées depuis les fournisseurs pour les séries déjà liées",
|
||||
"jobDetail.refreshReport": "Rapport de rafraîchissement",
|
||||
"jobDetail.refreshReportDesc": "{{count}} séries liées traitées",
|
||||
"jobDetail.refreshed": "Rafraîchies",
|
||||
"jobDetail.unchanged": "Inchangées",
|
||||
"jobDetail.refreshChanges": "Détail des changements",
|
||||
"jobDetail.refreshChangesDesc": "{{count}} séries avec des modifications",
|
||||
"jobDetail.phase1Desc": "Scan et indexation des fichiers de la bibliothèque",
|
||||
"jobDetail.phase2aDesc": "Extraction de la première page de chaque archive (nombre de pages + image brute)",
|
||||
"jobDetail.phase2bDesc": "Génération des miniatures pour les livres analysés",
|
||||
@@ -271,6 +286,7 @@ const fr = {
|
||||
"jobType.thumbnail_regenerate": "Régén. miniatures",
|
||||
"jobType.cbr_to_cbz": "CBR → CBZ",
|
||||
"jobType.metadata_batch": "Métadonnées en lot",
|
||||
"jobType.metadata_refresh": "Rafraîchir méta.",
|
||||
"jobType.rebuildLabel": "Indexation incrémentale",
|
||||
"jobType.rebuildDesc": "Scanne les fichiers nouveaux/modifiés, les analyse et génère les miniatures manquantes.",
|
||||
"jobType.full_rebuildLabel": "Réindexation complète",
|
||||
@@ -283,6 +299,8 @@ const fr = {
|
||||
"jobType.cbr_to_cbzDesc": "Convertit une archive CBR au format ouvert CBZ.",
|
||||
"jobType.metadata_batchLabel": "Métadonnées en lot",
|
||||
"jobType.metadata_batchDesc": "Recherche les métadonnées auprès des fournisseurs externes pour toutes les séries de la bibliothèque et applique automatiquement les correspondances à 100% de confiance.",
|
||||
"jobType.metadata_refreshLabel": "Rafraîchissement métadonnées",
|
||||
"jobType.metadata_refreshDesc": "Re-télécharge et met à jour les métadonnées pour toutes les séries déjà liées à un fournisseur externe.",
|
||||
|
||||
// Status badges
|
||||
"statusBadge.extracting_pages": "Extraction des pages",
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -37,7 +37,7 @@ pub async fn cleanup_stale_jobs(pool: &PgPool) -> Result<()> {
|
||||
}
|
||||
|
||||
/// Job types processed by the API, not the indexer.
|
||||
const API_ONLY_JOB_TYPES: &[&str] = &["metadata_batch"];
|
||||
const API_ONLY_JOB_TYPES: &[&str] = &["metadata_batch", "metadata_refresh"];
|
||||
|
||||
/// Job types that modify book/thumbnail data and must not run concurrently.
|
||||
const EXCLUSIVE_JOB_TYPES: &[&str] = &[
|
||||
|
||||
Reference in New Issue
Block a user