feat: refresh metadata ciblé par série après import et dans la modale

- Après import torrent, refresh automatique des métadonnées uniquement
  sur la série importée (via refresh_link) au lieu d'un job complet
- Nouvel endpoint POST /metadata/refresh-link/:id pour rafraîchir un
  seul lien metadata approuvé
- Bouton "Rafraîchir" dans la modale metadata (état linked) avec
  spinner et confirmation visuelle

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-26 23:05:06 +01:00
parent 072d6870fe
commit ca17d02116
7 changed files with 120 additions and 6 deletions

View File

@@ -150,6 +150,7 @@ async fn main() -> anyhow::Result<()> {
.route("/metadata/batch/:id/report", get(metadata_batch::get_batch_report))
.route("/metadata/batch/:id/results", get(metadata_batch::get_batch_results))
.route("/metadata/refresh", axum::routing::post(metadata_refresh::start_refresh))
.route("/metadata/refresh-link/:id", axum::routing::post(metadata_refresh::refresh_single_link))
.route("/metadata/refresh/:id/report", get(metadata_refresh::get_refresh_report))
.route("/reading-status/match", axum::routing::post(reading_status_match::start_match))
.route("/reading-status/match/:id/report", get(reading_status_match::get_match_report))

View File

@@ -41,7 +41,7 @@ struct BookDiff {
/// Per-series change report
#[derive(Serialize, Clone)]
struct SeriesRefreshResult {
pub(crate) struct SeriesRefreshResult {
series_name: String,
provider: String,
status: String, // "updated", "unchanged", "error"
@@ -299,6 +299,45 @@ pub async fn get_refresh_report(
}))
}
// ---------------------------------------------------------------------------
// POST /metadata/refresh-link/:id — Refresh a single metadata link
// ---------------------------------------------------------------------------
/// Refresh a single approved metadata link by its ID.
pub async fn refresh_single_link(
State(state): State<AppState>,
AxumPath(link_id): AxumPath<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let row = sqlx::query(
"SELECT library_id, series_name, provider, external_id, status \
FROM external_metadata_links WHERE id = $1",
)
.bind(link_id)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found("metadata link not found"))?;
let status: String = row.get("status");
if status != "approved" {
return Err(ApiError::bad_request("only approved links can be refreshed"));
}
let library_id: Uuid = row.get("library_id");
let series_name: String = row.get("series_name");
let provider: String = row.get("provider");
let external_id: String = row.get("external_id");
match refresh_link(&state.pool, link_id, library_id, &series_name, &provider, &external_id).await {
Ok(result) => {
Ok(Json(serde_json::json!({
"ok": true,
"status": result.status,
})))
}
Err(e) => Err(ApiError::internal(format!("refresh failed: {e}"))),
}
}
// ---------------------------------------------------------------------------
// Background processing
// ---------------------------------------------------------------------------
@@ -437,7 +476,7 @@ pub(crate) async fn process_metadata_refresh(
}
/// Refresh a single approved metadata link: re-fetch from provider, compare, sync, return diff
async fn refresh_link(
pub(crate) async fn refresh_link(
pool: &PgPool,
link_id: Uuid,
library_id: Uuid,

View File

@@ -6,7 +6,7 @@ use std::time::Duration;
use tracing::{info, trace, warn};
use uuid::Uuid;
use crate::{error::ApiError, prowlarr::extract_volumes_from_title_pub, qbittorrent::{load_qbittorrent_config, qbittorrent_login}, state::AppState};
use crate::{error::ApiError, metadata_refresh, prowlarr::extract_volumes_from_title_pub, qbittorrent::{load_qbittorrent_config, qbittorrent_login}, state::AppState};
// ─── Types ──────────────────────────────────────────────────────────────────
@@ -412,20 +412,46 @@ async fn process_torrent_import(pool: PgPool, torrent_id: Uuid) -> anyhow::Resul
.await?;
// Queue a scan job so the indexer picks up the new files
let job_id = Uuid::new_v4();
let scan_job_id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, 'scan', 'pending')",
)
.bind(job_id)
.bind(scan_job_id)
.bind(library_id)
.execute(&pool)
.await?;
// Refresh metadata for this series if it has an approved metadata link
let link_row = sqlx::query(
"SELECT id, provider, external_id FROM external_metadata_links \
WHERE library_id = $1 AND LOWER(series_name) = LOWER($2) AND status = 'approved' LIMIT 1",
)
.bind(library_id)
.bind(&series_name)
.fetch_optional(&pool)
.await?;
if let Some(link) = link_row {
let link_id: Uuid = link.get("id");
let provider: String = link.get("provider");
let external_id: String = link.get("external_id");
let pool2 = pool.clone();
let sn = series_name.clone();
tokio::spawn(async move {
let result = metadata_refresh::refresh_link(&pool2, link_id, library_id, &sn, &provider, &external_id).await;
if let Err(e) = result {
warn!("[IMPORT] Metadata refresh for '{}' failed: {}", sn, e);
} else {
info!("[IMPORT] Metadata refresh for '{}' done", sn);
}
});
}
info!(
"Torrent import {} done: {} files imported, scan job {} queued",
torrent_id,
imported.len(),
job_id
scan_job_id
);
}
Err(e) => {