feat: add reading_status_push job — differential push to AniList
Push reading statuses (PLANNING/CURRENT/COMPLETED) to AniList for all linked series that changed since last sync, or have new books/no sync yet. - Migration 0057: adds reading_status_push to index_jobs type constraint - Migration 0058: creates reading_status_push_results table (pushed/skipped/no_books/error) - API: new reading_status_push module with start_push, get_push_report, get_push_results - Differential detection: synced_at IS NULL OR reading progress updated OR new books added - Same 429 retry logic as reading_status_match (wait 10s, retry once, abort on 2nd 429) - Notifications: ReadingStatusPushCompleted/Failed events - Backoffice: push button in reading status group, job detail report with per-series list - Replay support, badge label, i18n (FR + EN) Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -19,6 +19,7 @@ mod prowlarr;
|
||||
mod qbittorrent;
|
||||
mod reading_progress;
|
||||
mod reading_status_match;
|
||||
mod reading_status_push;
|
||||
mod search;
|
||||
mod series;
|
||||
mod settings;
|
||||
@@ -149,6 +150,9 @@ async fn main() -> anyhow::Result<()> {
|
||||
.route("/reading-status/match", axum::routing::post(reading_status_match::start_match))
|
||||
.route("/reading-status/match/:id/report", get(reading_status_match::get_match_report))
|
||||
.route("/reading-status/match/:id/results", get(reading_status_match::get_match_results))
|
||||
.route("/reading-status/push", axum::routing::post(reading_status_push::start_push))
|
||||
.route("/reading-status/push/:id/report", get(reading_status_push::get_push_report))
|
||||
.route("/reading-status/push/:id/results", get(reading_status_push::get_push_results))
|
||||
.merge(settings::settings_routes())
|
||||
.route_layer(middleware::from_fn_with_state(
|
||||
state.clone(),
|
||||
|
||||
642
apps/api/src/reading_status_push.rs
Normal file
642
apps/api/src/reading_status_push.rs
Normal file
@@ -0,0 +1,642 @@
|
||||
use axum::{extract::State, Json};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::{PgPool, Row};
|
||||
use std::time::Duration;
|
||||
use tracing::{info, warn};
|
||||
use utoipa::ToSchema;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{anilist, error::ApiError, state::AppState};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// DTOs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
pub struct ReadingStatusPushRequest {
|
||||
pub library_id: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct ReadingStatusPushReportDto {
|
||||
#[schema(value_type = String)]
|
||||
pub job_id: Uuid,
|
||||
pub status: String,
|
||||
pub total_series: i64,
|
||||
pub pushed: i64,
|
||||
pub skipped: i64,
|
||||
pub no_books: i64,
|
||||
pub errors: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct ReadingStatusPushResultDto {
|
||||
#[schema(value_type = String)]
|
||||
pub id: Uuid,
|
||||
pub series_name: String,
|
||||
/// 'pushed' | 'skipped' | 'no_books' | 'error'
|
||||
pub status: String,
|
||||
pub anilist_id: Option<i32>,
|
||||
pub anilist_title: Option<String>,
|
||||
pub anilist_url: Option<String>,
|
||||
/// PLANNING | CURRENT | COMPLETED
|
||||
pub anilist_status: Option<String>,
|
||||
pub progress_volumes: Option<i32>,
|
||||
pub error_message: Option<String>,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// POST /reading-status/push — Trigger a reading status push job
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/reading-status/push",
|
||||
tag = "reading_status",
|
||||
request_body = ReadingStatusPushRequest,
|
||||
responses(
|
||||
(status = 200, description = "Job created"),
|
||||
(status = 400, description = "Bad request"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn start_push(
|
||||
State(state): State<AppState>,
|
||||
Json(body): Json<ReadingStatusPushRequest>,
|
||||
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||
let library_id: Uuid = body
|
||||
.library_id
|
||||
.parse()
|
||||
.map_err(|_| ApiError::bad_request("invalid library_id"))?;
|
||||
|
||||
// Verify library exists and has AniList configured
|
||||
let lib_row = sqlx::query("SELECT reading_status_provider FROM libraries WHERE id = $1")
|
||||
.bind(library_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?
|
||||
.ok_or_else(|| ApiError::not_found("library not found"))?;
|
||||
|
||||
let provider: Option<String> = lib_row.get("reading_status_provider");
|
||||
if provider.as_deref() != Some("anilist") {
|
||||
return Err(ApiError::bad_request(
|
||||
"This library has no AniList reading status provider configured",
|
||||
));
|
||||
}
|
||||
|
||||
// Check AniList is configured globally with a local_user_id
|
||||
let (_, _, local_user_id) = anilist::load_anilist_settings(&state.pool).await?;
|
||||
if local_user_id.is_none() {
|
||||
return Err(ApiError::bad_request(
|
||||
"AniList local_user_id not configured — required for reading status push",
|
||||
));
|
||||
}
|
||||
|
||||
// Check no existing running job for this library
|
||||
let existing: Option<Uuid> = sqlx::query_scalar(
|
||||
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'reading_status_push' AND status IN ('pending', 'running') LIMIT 1",
|
||||
)
|
||||
.bind(library_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
if let Some(existing_id) = existing {
|
||||
return Ok(Json(serde_json::json!({
|
||||
"id": existing_id.to_string(),
|
||||
"status": "already_running",
|
||||
})));
|
||||
}
|
||||
|
||||
let job_id = Uuid::new_v4();
|
||||
sqlx::query(
|
||||
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'reading_status_push', 'running', NOW())",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(library_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
let pool = state.pool.clone();
|
||||
let library_name: Option<String> =
|
||||
sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||
.bind(library_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = process_reading_status_push(&pool, job_id, library_id).await {
|
||||
warn!("[READING_STATUS_PUSH] job {job_id} failed: {e}");
|
||||
let _ = sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(e.to_string())
|
||||
.execute(&pool)
|
||||
.await;
|
||||
notifications::notify(
|
||||
pool.clone(),
|
||||
notifications::NotificationEvent::ReadingStatusPushFailed {
|
||||
library_name,
|
||||
error: e.to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
Ok(Json(serde_json::json!({
|
||||
"id": job_id.to_string(),
|
||||
"status": "running",
|
||||
})))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// GET /reading-status/push/:id/report
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/reading-status/push/{id}/report",
|
||||
tag = "reading_status",
|
||||
params(("id" = String, Path, description = "Job UUID")),
|
||||
responses(
|
||||
(status = 200, body = ReadingStatusPushReportDto),
|
||||
(status = 404, description = "Job not found"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn get_push_report(
|
||||
State(state): State<AppState>,
|
||||
axum::extract::Path(job_id): axum::extract::Path<Uuid>,
|
||||
) -> Result<Json<ReadingStatusPushReportDto>, ApiError> {
|
||||
let row = sqlx::query(
|
||||
"SELECT status, total_files FROM index_jobs WHERE id = $1 AND type = 'reading_status_push'",
|
||||
)
|
||||
.bind(job_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?
|
||||
.ok_or_else(|| ApiError::not_found("job not found"))?;
|
||||
|
||||
let job_status: String = row.get("status");
|
||||
let total_files: Option<i32> = row.get("total_files");
|
||||
|
||||
let counts = sqlx::query(
|
||||
"SELECT status, COUNT(*) as cnt FROM reading_status_push_results WHERE job_id = $1 GROUP BY status",
|
||||
)
|
||||
.bind(job_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
let mut pushed = 0i64;
|
||||
let mut skipped = 0i64;
|
||||
let mut no_books = 0i64;
|
||||
let mut errors = 0i64;
|
||||
|
||||
for r in &counts {
|
||||
let status: String = r.get("status");
|
||||
let cnt: i64 = r.get("cnt");
|
||||
match status.as_str() {
|
||||
"pushed" => pushed = cnt,
|
||||
"skipped" => skipped = cnt,
|
||||
"no_books" => no_books = cnt,
|
||||
"error" => errors = cnt,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Json(ReadingStatusPushReportDto {
|
||||
job_id,
|
||||
status: job_status,
|
||||
total_series: total_files.unwrap_or(0) as i64,
|
||||
pushed,
|
||||
skipped,
|
||||
no_books,
|
||||
errors,
|
||||
}))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// GET /reading-status/push/:id/results
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct PushResultsQuery {
|
||||
pub status: Option<String>,
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/reading-status/push/{id}/results",
|
||||
tag = "reading_status",
|
||||
params(
|
||||
("id" = String, Path, description = "Job UUID"),
|
||||
("status" = Option<String>, Query, description = "Filter by status"),
|
||||
),
|
||||
responses(
|
||||
(status = 200, body = Vec<ReadingStatusPushResultDto>),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn get_push_results(
|
||||
State(state): State<AppState>,
|
||||
axum::extract::Path(job_id): axum::extract::Path<Uuid>,
|
||||
axum::extract::Query(query): axum::extract::Query<PushResultsQuery>,
|
||||
) -> Result<Json<Vec<ReadingStatusPushResultDto>>, ApiError> {
|
||||
let rows = if let Some(status_filter) = &query.status {
|
||||
sqlx::query(
|
||||
"SELECT id, series_name, status, anilist_id, anilist_title, anilist_url, anilist_status, progress_volumes, error_message
|
||||
FROM reading_status_push_results
|
||||
WHERE job_id = $1 AND status = $2
|
||||
ORDER BY series_name",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(status_filter)
|
||||
.fetch_all(&state.pool)
|
||||
.await?
|
||||
} else {
|
||||
sqlx::query(
|
||||
"SELECT id, series_name, status, anilist_id, anilist_title, anilist_url, anilist_status, progress_volumes, error_message
|
||||
FROM reading_status_push_results
|
||||
WHERE job_id = $1
|
||||
ORDER BY status, series_name",
|
||||
)
|
||||
.bind(job_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await?
|
||||
};
|
||||
|
||||
let results = rows
|
||||
.iter()
|
||||
.map(|row| ReadingStatusPushResultDto {
|
||||
id: row.get("id"),
|
||||
series_name: row.get("series_name"),
|
||||
status: row.get("status"),
|
||||
anilist_id: row.get("anilist_id"),
|
||||
anilist_title: row.get("anilist_title"),
|
||||
anilist_url: row.get("anilist_url"),
|
||||
anilist_status: row.get("anilist_status"),
|
||||
progress_volumes: row.get("progress_volumes"),
|
||||
error_message: row.get("error_message"),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(results))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Background processing
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
struct SeriesInfo {
|
||||
series_name: String,
|
||||
anilist_id: i32,
|
||||
anilist_title: Option<String>,
|
||||
anilist_url: Option<String>,
|
||||
}
|
||||
|
||||
async fn process_reading_status_push(
|
||||
pool: &PgPool,
|
||||
job_id: Uuid,
|
||||
library_id: Uuid,
|
||||
) -> Result<(), String> {
|
||||
let (token, _, local_user_id_opt) = anilist::load_anilist_settings(pool)
|
||||
.await
|
||||
.map_err(|e| e.message)?;
|
||||
|
||||
let local_user_id = local_user_id_opt
|
||||
.ok_or_else(|| "AniList local_user_id not configured".to_string())?;
|
||||
|
||||
// Find all linked series that need a push (differential)
|
||||
let series_to_push: Vec<SeriesInfo> = sqlx::query(
|
||||
r#"
|
||||
SELECT
|
||||
asl.series_name,
|
||||
asl.anilist_id,
|
||||
asl.anilist_title,
|
||||
asl.anilist_url
|
||||
FROM anilist_series_links asl
|
||||
WHERE asl.library_id = $1
|
||||
AND asl.anilist_id IS NOT NULL
|
||||
AND (
|
||||
asl.synced_at IS NULL
|
||||
OR EXISTS (
|
||||
SELECT 1
|
||||
FROM book_reading_progress brp
|
||||
JOIN books b2 ON b2.id = brp.book_id
|
||||
WHERE b2.library_id = asl.library_id
|
||||
AND COALESCE(NULLIF(b2.series, ''), 'unclassified') = asl.series_name
|
||||
AND brp.user_id = $2
|
||||
AND brp.updated_at > asl.synced_at
|
||||
)
|
||||
OR EXISTS (
|
||||
SELECT 1
|
||||
FROM books b2
|
||||
WHERE b2.library_id = asl.library_id
|
||||
AND COALESCE(NULLIF(b2.series, ''), 'unclassified') = asl.series_name
|
||||
AND b2.created_at > asl.synced_at
|
||||
)
|
||||
)
|
||||
ORDER BY asl.series_name
|
||||
"#,
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(local_user_id)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?
|
||||
.into_iter()
|
||||
.map(|row| SeriesInfo {
|
||||
series_name: row.get("series_name"),
|
||||
anilist_id: row.get("anilist_id"),
|
||||
anilist_title: row.get("anilist_title"),
|
||||
anilist_url: row.get("anilist_url"),
|
||||
})
|
||||
.collect();
|
||||
|
||||
let total = series_to_push.len() as i32;
|
||||
sqlx::query("UPDATE index_jobs SET total_files = $2 WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.bind(total)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let mut processed = 0i32;
|
||||
|
||||
for series in &series_to_push {
|
||||
if is_job_cancelled(pool, job_id).await {
|
||||
sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'cancelled', finished_at = NOW() WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
processed += 1;
|
||||
let progress = (processed * 100 / total.max(1)).min(100);
|
||||
sqlx::query(
|
||||
"UPDATE index_jobs SET processed_files = $2, progress_percent = $3, current_file = $4 WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(processed)
|
||||
.bind(progress)
|
||||
.bind(&series.series_name)
|
||||
.execute(pool)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
// Compute reading status for this series
|
||||
let stats_row = sqlx::query(
|
||||
r#"
|
||||
SELECT
|
||||
COUNT(b.id) AS total_books,
|
||||
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read
|
||||
FROM books b
|
||||
LEFT JOIN book_reading_progress brp
|
||||
ON brp.book_id = b.id AND brp.user_id = $3
|
||||
WHERE b.library_id = $1
|
||||
AND COALESCE(NULLIF(b.series, ''), 'unclassified') = $2
|
||||
"#,
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(&series.series_name)
|
||||
.bind(local_user_id)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let total_books: i64 = stats_row.get("total_books");
|
||||
let books_read: i64 = stats_row.get("books_read");
|
||||
|
||||
if total_books == 0 {
|
||||
insert_push_result(
|
||||
pool, job_id, library_id, &series.series_name, "no_books",
|
||||
Some(series.anilist_id), series.anilist_title.as_deref(), series.anilist_url.as_deref(),
|
||||
None, None, None,
|
||||
).await;
|
||||
tokio::time::sleep(Duration::from_millis(700)).await;
|
||||
continue;
|
||||
}
|
||||
|
||||
let anilist_status = if books_read == 0 {
|
||||
"PLANNING"
|
||||
} else if books_read >= total_books {
|
||||
"COMPLETED"
|
||||
} else {
|
||||
"CURRENT"
|
||||
};
|
||||
let progress_volumes = books_read as i32;
|
||||
|
||||
match push_to_anilist(
|
||||
&token,
|
||||
series.anilist_id,
|
||||
anilist_status,
|
||||
progress_volumes,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(()) => {
|
||||
// Update synced_at
|
||||
let _ = sqlx::query(
|
||||
"UPDATE anilist_series_links SET synced_at = NOW() WHERE library_id = $1 AND series_name = $2",
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(&series.series_name)
|
||||
.execute(pool)
|
||||
.await;
|
||||
|
||||
insert_push_result(
|
||||
pool, job_id, library_id, &series.series_name, "pushed",
|
||||
Some(series.anilist_id), series.anilist_title.as_deref(), series.anilist_url.as_deref(),
|
||||
Some(anilist_status), Some(progress_volumes), None,
|
||||
).await;
|
||||
}
|
||||
Err(e) if e.contains("429") || e.contains("Too Many Requests") => {
|
||||
warn!("[READING_STATUS_PUSH] rate limit hit for '{}', waiting 10s before retry", series.series_name);
|
||||
tokio::time::sleep(Duration::from_secs(10)).await;
|
||||
match push_to_anilist(&token, series.anilist_id, anilist_status, progress_volumes).await {
|
||||
Ok(()) => {
|
||||
let _ = sqlx::query(
|
||||
"UPDATE anilist_series_links SET synced_at = NOW() WHERE library_id = $1 AND series_name = $2",
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(&series.series_name)
|
||||
.execute(pool)
|
||||
.await;
|
||||
|
||||
insert_push_result(
|
||||
pool, job_id, library_id, &series.series_name, "pushed",
|
||||
Some(series.anilist_id), series.anilist_title.as_deref(), series.anilist_url.as_deref(),
|
||||
Some(anilist_status), Some(progress_volumes), None,
|
||||
).await;
|
||||
}
|
||||
Err(e2) => {
|
||||
return Err(format!(
|
||||
"AniList rate limit exceeded (429) — job stopped after {processed}/{total} series: {e2}"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("[READING_STATUS_PUSH] series '{}': {e}", series.series_name);
|
||||
insert_push_result(
|
||||
pool, job_id, library_id, &series.series_name, "error",
|
||||
Some(series.anilist_id), series.anilist_title.as_deref(), series.anilist_url.as_deref(),
|
||||
None, None, Some(&e),
|
||||
).await;
|
||||
}
|
||||
}
|
||||
|
||||
// Respect AniList rate limit (~90 req/min)
|
||||
tokio::time::sleep(Duration::from_millis(700)).await;
|
||||
}
|
||||
|
||||
// Build final stats
|
||||
let counts = sqlx::query(
|
||||
"SELECT status, COUNT(*) as cnt FROM reading_status_push_results WHERE job_id = $1 GROUP BY status",
|
||||
)
|
||||
.bind(job_id)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let mut count_pushed = 0i64;
|
||||
let mut count_skipped = 0i64;
|
||||
let mut count_no_books = 0i64;
|
||||
let mut count_errors = 0i64;
|
||||
for row in &counts {
|
||||
let s: String = row.get("status");
|
||||
let c: i64 = row.get("cnt");
|
||||
match s.as_str() {
|
||||
"pushed" => count_pushed = c,
|
||||
"skipped" => count_skipped = c,
|
||||
"no_books" => count_no_books = c,
|
||||
"error" => count_errors = c,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let stats = serde_json::json!({
|
||||
"total_series": total as i64,
|
||||
"pushed": count_pushed,
|
||||
"skipped": count_skipped,
|
||||
"no_books": count_no_books,
|
||||
"errors": count_errors,
|
||||
});
|
||||
|
||||
sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), stats_json = $2, progress_percent = 100 WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(&stats)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
info!(
|
||||
"[READING_STATUS_PUSH] job={job_id} completed: {}/{} series, pushed={count_pushed}, no_books={count_no_books}, errors={count_errors}",
|
||||
processed, total
|
||||
);
|
||||
|
||||
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||
.bind(library_id)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
|
||||
notifications::notify(
|
||||
pool.clone(),
|
||||
notifications::NotificationEvent::ReadingStatusPushCompleted {
|
||||
library_name,
|
||||
total_series: total,
|
||||
pushed: count_pushed as i32,
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async fn push_to_anilist(
|
||||
token: &str,
|
||||
anilist_id: i32,
|
||||
status: &str,
|
||||
progress: i32,
|
||||
) -> Result<(), String> {
|
||||
let gql = r#"
|
||||
mutation SaveMediaListEntry($mediaId: Int, $status: MediaListStatus, $progress: Int) {
|
||||
SaveMediaListEntry(mediaId: $mediaId, status: $status, progress: $progress) {
|
||||
id
|
||||
status
|
||||
progress
|
||||
}
|
||||
}
|
||||
"#;
|
||||
|
||||
anilist::anilist_graphql(
|
||||
token,
|
||||
gql,
|
||||
serde_json::json!({
|
||||
"mediaId": anilist_id,
|
||||
"status": status,
|
||||
"progress": progress,
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| e.message)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn insert_push_result(
|
||||
pool: &PgPool,
|
||||
job_id: Uuid,
|
||||
library_id: Uuid,
|
||||
series_name: &str,
|
||||
status: &str,
|
||||
anilist_id: Option<i32>,
|
||||
anilist_title: Option<&str>,
|
||||
anilist_url: Option<&str>,
|
||||
anilist_status: Option<&str>,
|
||||
progress_volumes: Option<i32>,
|
||||
error_message: Option<&str>,
|
||||
) {
|
||||
let _ = sqlx::query(
|
||||
r#"
|
||||
INSERT INTO reading_status_push_results
|
||||
(job_id, library_id, series_name, status, anilist_id, anilist_title, anilist_url, anilist_status, progress_volumes, error_message)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
||||
"#,
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(library_id)
|
||||
.bind(series_name)
|
||||
.bind(status)
|
||||
.bind(anilist_id)
|
||||
.bind(anilist_title)
|
||||
.bind(anilist_url)
|
||||
.bind(anilist_status)
|
||||
.bind(progress_volumes)
|
||||
.bind(error_message)
|
||||
.execute(pool)
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn is_job_cancelled(pool: &PgPool, job_id: Uuid) -> bool {
|
||||
sqlx::query_scalar::<_, String>("SELECT status FROM index_jobs WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.ok()
|
||||
.flatten()
|
||||
.as_deref()
|
||||
== Some("cancelled")
|
||||
}
|
||||
@@ -2,7 +2,7 @@ export const dynamic = "force-dynamic";
|
||||
|
||||
import { notFound } from "next/navigation";
|
||||
import Link from "next/link";
|
||||
import { apiFetch, getMetadataBatchReport, getMetadataBatchResults, getMetadataRefreshReport, getReadingStatusMatchReport, getReadingStatusMatchResults, MetadataBatchReportDto, MetadataBatchResultDto, MetadataRefreshReportDto, ReadingStatusMatchReportDto, ReadingStatusMatchResultDto } from "@/lib/api";
|
||||
import { apiFetch, getMetadataBatchReport, getMetadataBatchResults, getMetadataRefreshReport, getReadingStatusMatchReport, getReadingStatusMatchResults, getReadingStatusPushReport, getReadingStatusPushResults, MetadataBatchReportDto, MetadataBatchResultDto, MetadataRefreshReportDto, ReadingStatusMatchReportDto, ReadingStatusMatchResultDto, ReadingStatusPushReportDto, ReadingStatusPushResultDto } from "@/lib/api";
|
||||
import {
|
||||
Card, CardHeader, CardTitle, CardDescription, CardContent,
|
||||
StatusBadge, JobTypeBadge, StatBox, ProgressBar
|
||||
@@ -137,11 +137,17 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
description: t("jobType.reading_status_matchDesc"),
|
||||
isThumbnailOnly: false,
|
||||
},
|
||||
reading_status_push: {
|
||||
label: t("jobType.reading_status_pushLabel"),
|
||||
description: t("jobType.reading_status_pushDesc"),
|
||||
isThumbnailOnly: false,
|
||||
},
|
||||
};
|
||||
|
||||
const isMetadataBatch = job.type === "metadata_batch";
|
||||
const isMetadataRefresh = job.type === "metadata_refresh";
|
||||
const isReadingStatusMatch = job.type === "reading_status_match";
|
||||
const isReadingStatusPush = job.type === "reading_status_push";
|
||||
|
||||
// Fetch batch report & results for metadata_batch jobs
|
||||
let batchReport: MetadataBatchReportDto | null = null;
|
||||
@@ -169,6 +175,16 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
]);
|
||||
}
|
||||
|
||||
// Fetch reading status push report & results
|
||||
let readingStatusPushReport: ReadingStatusPushReportDto | null = null;
|
||||
let readingStatusPushResults: ReadingStatusPushResultDto[] = [];
|
||||
if (isReadingStatusPush) {
|
||||
[readingStatusPushReport, readingStatusPushResults] = await Promise.all([
|
||||
getReadingStatusPushReport(id).catch(() => null),
|
||||
getReadingStatusPushResults(id).catch(() => []),
|
||||
]);
|
||||
}
|
||||
|
||||
const typeInfo = JOB_TYPE_INFO[job.type] ?? {
|
||||
label: job.type,
|
||||
description: null,
|
||||
@@ -195,6 +211,8 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
? t("jobDetail.metadataRefresh")
|
||||
: isReadingStatusMatch
|
||||
? t("jobDetail.readingStatusMatch")
|
||||
: isReadingStatusPush
|
||||
? t("jobDetail.readingStatusPush")
|
||||
: isThumbnailOnly
|
||||
? t("jobType.thumbnail_rebuild")
|
||||
: isExtractingPages
|
||||
@@ -209,6 +227,8 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
? t("jobDetail.metadataRefreshDesc")
|
||||
: isReadingStatusMatch
|
||||
? t("jobDetail.readingStatusMatchDesc")
|
||||
: isReadingStatusPush
|
||||
? t("jobDetail.readingStatusPushDesc")
|
||||
: isThumbnailOnly
|
||||
? undefined
|
||||
: isExtractingPages
|
||||
@@ -265,7 +285,12 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
— {readingStatusReport.linked} {t("jobDetail.linked").toLowerCase()}, {readingStatusReport.no_results} {t("jobDetail.noResults").toLowerCase()}, {readingStatusReport.ambiguous} {t("jobDetail.ambiguous").toLowerCase()}, {readingStatusReport.errors} {t("jobDetail.errors").toLowerCase()}
|
||||
</span>
|
||||
)}
|
||||
{!isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && job.stats_json && (
|
||||
{isReadingStatusPush && readingStatusPushReport && (
|
||||
<span className="ml-2 text-success/80">
|
||||
— {readingStatusPushReport.pushed} {t("jobDetail.pushed").toLowerCase()}, {readingStatusPushReport.no_books} {t("jobDetail.noBooks").toLowerCase()}, {readingStatusPushReport.errors} {t("jobDetail.errors").toLowerCase()}
|
||||
</span>
|
||||
)}
|
||||
{!isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && !isReadingStatusPush && job.stats_json && (
|
||||
<span className="ml-2 text-success/80">
|
||||
— {job.stats_json.scanned_files} {t("jobDetail.scanned").toLowerCase()}, {job.stats_json.indexed_files} {t("jobDetail.indexed").toLowerCase()}
|
||||
{job.stats_json.removed_files > 0 && `, ${job.stats_json.removed_files} ${t("jobDetail.removed").toLowerCase()}`}
|
||||
@@ -274,7 +299,7 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
{job.total_files != null && job.total_files > 0 && `, ${job.total_files} ${t("jobType.thumbnail_rebuild").toLowerCase()}`}
|
||||
</span>
|
||||
)}
|
||||
{!isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && !job.stats_json && isThumbnailOnly && job.total_files != null && (
|
||||
{!isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && !isReadingStatusPush && !job.stats_json && isThumbnailOnly && job.total_files != null && (
|
||||
<span className="ml-2 text-success/80">
|
||||
— {job.processed_files ?? job.total_files} {t("jobDetail.generated").toLowerCase()}
|
||||
</span>
|
||||
@@ -539,7 +564,7 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
)}
|
||||
|
||||
{/* Index Statistics — index jobs only */}
|
||||
{job.stats_json && !isThumbnailOnly && !isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && (
|
||||
{job.stats_json && !isThumbnailOnly && !isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && !isReadingStatusPush && (
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>{t("jobDetail.indexStats")}</CardTitle>
|
||||
@@ -827,6 +852,92 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Reading status push — summary report */}
|
||||
{isReadingStatusPush && readingStatusPushReport && (
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>{t("jobDetail.readingStatusPushReport")}</CardTitle>
|
||||
<CardDescription>{t("jobDetail.seriesAnalyzed", { count: String(readingStatusPushReport.total_series) })}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="grid grid-cols-2 sm:grid-cols-3 gap-4">
|
||||
<StatBox value={readingStatusPushReport.pushed} label={t("jobDetail.pushed")} variant="success" />
|
||||
<StatBox value={readingStatusPushReport.skipped} label={t("jobDetail.skipped")} variant="primary" />
|
||||
<StatBox value={readingStatusPushReport.no_books} label={t("jobDetail.noBooks")} />
|
||||
<StatBox value={readingStatusPushReport.errors} label={t("jobDetail.errors")} variant={readingStatusPushReport.errors > 0 ? "error" : "default"} />
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Reading status push — per-series detail */}
|
||||
{isReadingStatusPush && readingStatusPushResults.length > 0 && (
|
||||
<Card className="lg:col-span-2">
|
||||
<CardHeader>
|
||||
<CardTitle>{t("jobDetail.resultsBySeries")}</CardTitle>
|
||||
<CardDescription>{t("jobDetail.seriesProcessed", { count: String(readingStatusPushResults.length) })}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-2 max-h-[600px] overflow-y-auto">
|
||||
{readingStatusPushResults.map((r) => (
|
||||
<div
|
||||
key={r.id}
|
||||
className={`p-3 rounded-lg border ${
|
||||
r.status === "pushed" ? "bg-success/10 border-success/20" :
|
||||
r.status === "error" ? "bg-destructive/10 border-destructive/20" :
|
||||
r.status === "skipped" ? "bg-primary/10 border-primary/20" :
|
||||
"bg-muted/50 border-border/60"
|
||||
}`}
|
||||
>
|
||||
<div className="flex items-center justify-between gap-2">
|
||||
{job.library_id ? (
|
||||
<Link
|
||||
href={`/libraries/${job.library_id}/series/${encodeURIComponent(r.series_name)}`}
|
||||
className="font-medium text-sm text-primary hover:underline truncate"
|
||||
>
|
||||
{r.series_name}
|
||||
</Link>
|
||||
) : (
|
||||
<span className="font-medium text-sm text-foreground truncate">{r.series_name}</span>
|
||||
)}
|
||||
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap ${
|
||||
r.status === "pushed" ? "bg-success/20 text-success" :
|
||||
r.status === "skipped" ? "bg-primary/20 text-primary" :
|
||||
r.status === "no_books" ? "bg-muted text-muted-foreground" :
|
||||
r.status === "error" ? "bg-destructive/20 text-destructive" :
|
||||
"bg-muted text-muted-foreground"
|
||||
}`}>
|
||||
{r.status === "pushed" ? t("jobDetail.pushed") :
|
||||
r.status === "skipped" ? t("jobDetail.skipped") :
|
||||
r.status === "no_books" ? t("jobDetail.noBooks") :
|
||||
r.status === "error" ? t("common.error") :
|
||||
r.status}
|
||||
</span>
|
||||
</div>
|
||||
{r.status === "pushed" && r.anilist_title && (
|
||||
<div className="mt-1 flex items-center gap-1.5 text-xs text-muted-foreground">
|
||||
<svg className="w-3 h-3 text-success shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M7 16a4 4 0 01-.88-7.903A5 5 0 1115.9 6L16 6a5 5 0 011 9.9M15 13l-3-3m0 0l-3 3m3-3v12" />
|
||||
</svg>
|
||||
{r.anilist_url ? (
|
||||
<a href={r.anilist_url} target="_blank" rel="noopener noreferrer" className="text-success hover:underline">
|
||||
{r.anilist_title}
|
||||
</a>
|
||||
) : (
|
||||
<span className="text-success">{r.anilist_title}</span>
|
||||
)}
|
||||
{r.anilist_status && <span className="text-muted-foreground/70 font-medium">{r.anilist_status}</span>}
|
||||
{r.progress_volumes != null && <span className="text-muted-foreground/60">vol. {r.progress_volumes}</span>}
|
||||
</div>
|
||||
)}
|
||||
{r.error_message && (
|
||||
<p className="text-xs text-destructive/80 mt-1">{r.error_message}</p>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Metadata batch results */}
|
||||
{isMetadataBatch && batchResults.length > 0 && (
|
||||
<Card className="lg:col-span-2">
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { revalidatePath } from "next/cache";
|
||||
import { redirect } from "next/navigation";
|
||||
import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, startMetadataRefresh, startReadingStatusMatch, IndexJobDto, LibraryDto } from "@/lib/api";
|
||||
import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, startMetadataRefresh, startReadingStatusMatch, startReadingStatusPush, IndexJobDto, LibraryDto } from "@/lib/api";
|
||||
import { JobsList } from "@/app/components/JobsList";
|
||||
import { Card, CardHeader, CardTitle, CardDescription, CardContent, FormField, FormSelect } from "@/app/components/ui";
|
||||
import { getServerTranslations } from "@/lib/i18n/server";
|
||||
@@ -149,6 +149,36 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
||||
}
|
||||
}
|
||||
|
||||
async function triggerReadingStatusPush(formData: FormData) {
|
||||
"use server";
|
||||
const libraryId = formData.get("library_id") as string;
|
||||
if (libraryId) {
|
||||
let result;
|
||||
try {
|
||||
result = await startReadingStatusPush(libraryId);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
revalidatePath("/jobs");
|
||||
redirect(`/jobs?highlight=${result.id}`);
|
||||
} else {
|
||||
// All libraries — only those with reading_status_provider configured
|
||||
const allLibraries = await fetchLibraries().catch(() => [] as LibraryDto[]);
|
||||
let lastId: string | undefined;
|
||||
for (const lib of allLibraries) {
|
||||
if (!lib.reading_status_provider) continue;
|
||||
try {
|
||||
const result = await startReadingStatusPush(lib.id);
|
||||
if (result.status !== "already_running") lastId = result.id;
|
||||
} catch {
|
||||
// Skip libraries with errors
|
||||
}
|
||||
}
|
||||
revalidatePath("/jobs");
|
||||
redirect(lastId ? `/jobs?highlight=${lastId}` : "/jobs");
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="mb-6">
|
||||
@@ -305,6 +335,16 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.matchReadingStatusShort")}</p>
|
||||
</button>
|
||||
<button type="submit" formAction={triggerReadingStatusPush}
|
||||
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
|
||||
<div className="flex items-center gap-2">
|
||||
<svg className="w-4 h-4 text-success shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M7 16a4 4 0 01-.88-7.903A5 5 0 1115.9 6L16 6a5 5 0 011 9.9M15 13l-3-3m0 0l-3 3m3-3v12" />
|
||||
</svg>
|
||||
<span className="font-medium text-sm text-foreground">{t("jobs.pushReadingStatus")}</span>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.pushReadingStatusShort")}</p>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { apiFetch, IndexJobDto, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, startMetadataRefresh, startReadingStatusMatch } from "@/lib/api";
|
||||
import { apiFetch, IndexJobDto, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, startMetadataRefresh, startReadingStatusMatch, startReadingStatusPush } from "@/lib/api";
|
||||
|
||||
export async function POST(
|
||||
_request: NextRequest,
|
||||
@@ -32,6 +32,9 @@ export async function POST(
|
||||
case "reading_status_match":
|
||||
if (!libraryId) return NextResponse.json({ error: "Library ID required for reading status match" }, { status: 400 });
|
||||
return NextResponse.json(await startReadingStatusMatch(libraryId));
|
||||
case "reading_status_push":
|
||||
if (!libraryId) return NextResponse.json({ error: "Library ID required for reading status push" }, { status: 400 });
|
||||
return NextResponse.json(await startReadingStatusPush(libraryId));
|
||||
default:
|
||||
return NextResponse.json({ error: `Cannot replay job type: ${job.type}` }, { status: 400 });
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ interface JobRowProps {
|
||||
formatDuration: (start: string, end: string | null) => string;
|
||||
}
|
||||
|
||||
const REPLAYABLE_TYPES = new Set(["rebuild", "full_rebuild", "rescan", "scan", "thumbnail_rebuild", "thumbnail_regenerate", "metadata_batch", "metadata_refresh", "reading_status_match"]);
|
||||
const REPLAYABLE_TYPES = new Set(["rebuild", "full_rebuild", "rescan", "scan", "thumbnail_rebuild", "thumbnail_regenerate", "metadata_batch", "metadata_refresh", "reading_status_match", "reading_status_push"]);
|
||||
|
||||
export function JobRow({ job, libraryName, highlighted, onCancel, onReplay, formatDate, formatDuration }: JobRowProps) {
|
||||
const { t } = useTranslation();
|
||||
|
||||
@@ -118,6 +118,7 @@ export function JobTypeBadge({ type, className = "" }: JobTypeBadgeProps) {
|
||||
metadata_batch: t("jobType.metadata_batch"),
|
||||
metadata_refresh: t("jobType.metadata_refresh"),
|
||||
reading_status_match: t("jobType.reading_status_match"),
|
||||
reading_status_push: t("jobType.reading_status_push"),
|
||||
};
|
||||
const label = jobTypeLabels[key] ?? type;
|
||||
return <Badge variant={variant} className={className}>{label}</Badge>;
|
||||
|
||||
@@ -1102,6 +1102,43 @@ export async function getReadingStatusMatchResults(jobId: string) {
|
||||
return apiFetch<ReadingStatusMatchResultDto[]>(`/reading-status/match/${jobId}/results`);
|
||||
}
|
||||
|
||||
export async function startReadingStatusPush(libraryId: string) {
|
||||
return apiFetch<{ id: string; status: string }>("/reading-status/push", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ library_id: libraryId }),
|
||||
});
|
||||
}
|
||||
|
||||
export type ReadingStatusPushReportDto = {
|
||||
job_id: string;
|
||||
status: string;
|
||||
total_series: number;
|
||||
pushed: number;
|
||||
skipped: number;
|
||||
no_books: number;
|
||||
errors: number;
|
||||
};
|
||||
|
||||
export type ReadingStatusPushResultDto = {
|
||||
id: string;
|
||||
series_name: string;
|
||||
status: "pushed" | "skipped" | "no_books" | "error";
|
||||
anilist_id: number | null;
|
||||
anilist_title: string | null;
|
||||
anilist_url: string | null;
|
||||
anilist_status: string | null;
|
||||
progress_volumes: number | null;
|
||||
error_message: string | null;
|
||||
};
|
||||
|
||||
export async function getReadingStatusPushReport(jobId: string) {
|
||||
return apiFetch<ReadingStatusPushReportDto>(`/reading-status/push/${jobId}/report`);
|
||||
}
|
||||
|
||||
export async function getReadingStatusPushResults(jobId: string) {
|
||||
return apiFetch<ReadingStatusPushResultDto[]>(`/reading-status/push/${jobId}/results`);
|
||||
}
|
||||
|
||||
export type RefreshFieldDiff = {
|
||||
field: string;
|
||||
old?: unknown;
|
||||
|
||||
@@ -262,6 +262,8 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobs.groupReadingStatus": "Reading status",
|
||||
"jobs.matchReadingStatus": "Match series",
|
||||
"jobs.matchReadingStatusShort": "Auto-link unmatched series to the reading status provider",
|
||||
"jobs.pushReadingStatus": "Push reading statuses",
|
||||
"jobs.pushReadingStatusShort": "Push changed reading statuses to AniList (differential push)",
|
||||
|
||||
// Jobs list
|
||||
"jobsList.id": "ID",
|
||||
@@ -368,6 +370,12 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobDetail.readingStatusMatchReport": "Match report",
|
||||
"jobDetail.linked": "Linked",
|
||||
"jobDetail.ambiguous": "Ambiguous",
|
||||
"jobDetail.readingStatusPush": "Reading status push",
|
||||
"jobDetail.readingStatusPushDesc": "Differential push of reading statuses to AniList",
|
||||
"jobDetail.readingStatusPushReport": "Push report",
|
||||
"jobDetail.pushed": "Pushed",
|
||||
"jobDetail.skipped": "Skipped",
|
||||
"jobDetail.noBooks": "No books",
|
||||
|
||||
// Job types
|
||||
"jobType.rebuild": "Indexing",
|
||||
@@ -397,6 +405,9 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobType.reading_status_match": "Reading status match",
|
||||
"jobType.reading_status_matchLabel": "Series matching (reading status)",
|
||||
"jobType.reading_status_matchDesc": "Automatically searches each series in the library against the configured reading status provider (e.g. AniList) and creates links for unambiguously identified series.",
|
||||
"jobType.reading_status_push": "Reading status push",
|
||||
"jobType.reading_status_pushLabel": "Reading status push",
|
||||
"jobType.reading_status_pushDesc": "Differentially pushes changed reading statuses (or new series) to AniList.",
|
||||
|
||||
// Status badges
|
||||
"statusBadge.extracting_pages": "Extracting pages",
|
||||
|
||||
@@ -260,6 +260,8 @@ const fr = {
|
||||
"jobs.groupReadingStatus": "Statut de lecture",
|
||||
"jobs.matchReadingStatus": "Correspondance des séries",
|
||||
"jobs.matchReadingStatusShort": "Lier automatiquement les séries non associées au provider",
|
||||
"jobs.pushReadingStatus": "Push des états de lecture",
|
||||
"jobs.pushReadingStatusShort": "Envoyer les états de lecture modifiés vers AniList (push différentiel)",
|
||||
|
||||
// Jobs list
|
||||
"jobsList.id": "ID",
|
||||
@@ -366,6 +368,12 @@ const fr = {
|
||||
"jobDetail.readingStatusMatchReport": "Rapport de correspondance",
|
||||
"jobDetail.linked": "Liées",
|
||||
"jobDetail.ambiguous": "Ambiguës",
|
||||
"jobDetail.readingStatusPush": "Push des états de lecture",
|
||||
"jobDetail.readingStatusPushDesc": "Envoi différentiel des états de lecture vers AniList",
|
||||
"jobDetail.readingStatusPushReport": "Rapport de push",
|
||||
"jobDetail.pushed": "Envoyés",
|
||||
"jobDetail.skipped": "Ignorés",
|
||||
"jobDetail.noBooks": "Sans livres",
|
||||
|
||||
// Job types
|
||||
"jobType.rebuild": "Indexation",
|
||||
@@ -395,6 +403,9 @@ const fr = {
|
||||
"jobType.reading_status_match": "Correspondance statut lecture",
|
||||
"jobType.reading_status_matchLabel": "Correspondance des séries (statut lecture)",
|
||||
"jobType.reading_status_matchDesc": "Recherche automatiquement chaque série de la bibliothèque sur le provider de statut de lecture configuré (ex. AniList) et crée les liens pour les séries identifiées sans ambiguïté.",
|
||||
"jobType.reading_status_push": "Push statut lecture",
|
||||
"jobType.reading_status_pushLabel": "Push des états de lecture",
|
||||
"jobType.reading_status_pushDesc": "Envoie les états de lecture modifiés (ou nouvelles séries) vers AniList de façon différentielle.",
|
||||
|
||||
// Status badges
|
||||
"statusBadge.extracting_pages": "Extraction des pages",
|
||||
|
||||
Reference in New Issue
Block a user