feat: add download detection job with Prowlarr integration
For each series with missing volumes and an approved metadata link, calls Prowlarr to find available matching releases and stores them in a report (no auto-download). Includes per-series detail page, Telegram notifications with per-event toggles, and stats display in the jobs table. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -112,19 +112,20 @@ The backoffice will be available at http://localhost:7082
|
|||||||
|
|
||||||
### Notifications
|
### Notifications
|
||||||
- **Telegram**: real-time notifications via Telegram Bot API
|
- **Telegram**: real-time notifications via Telegram Bot API
|
||||||
- 12 granular event toggles (scans, thumbnails, conversions, metadata)
|
- 16 granular event toggles (scans, thumbnails, conversions, metadata, reading status, download detection)
|
||||||
- Book thumbnail images included in notifications where applicable
|
- Book thumbnail images included in notifications where applicable
|
||||||
- Test connection from settings
|
- Test connection from settings
|
||||||
|
|
||||||
### External Integrations
|
### External Integrations
|
||||||
|
- **AniList**: bidirectional reading status sync — pull progress from AniList or push local statuses (PLANNING/CURRENT/COMPLETED) with differential detection and configurable auto-push schedule
|
||||||
- **Komga**: import reading progress
|
- **Komga**: import reading progress
|
||||||
- **Prowlarr**: search for missing volumes
|
- **Prowlarr**: search for missing volumes manually from series pages, or run a **download detection job** to automatically scan all series with missing volumes and report available releases
|
||||||
- **qBittorrent**: add torrents directly from search results
|
- **qBittorrent**: add torrents directly from search results
|
||||||
|
|
||||||
### Background Jobs
|
### Background Jobs
|
||||||
- Rebuild, rescan, thumbnail generation, metadata batch, CBR conversion
|
- Rebuild, rescan, thumbnail generation, metadata batch, CBR conversion, AniList reading status sync/push, download detection (Prowlarr)
|
||||||
- Real-time progress via Server-Sent Events (SSE)
|
- Real-time progress via Server-Sent Events (SSE)
|
||||||
- Job history, error tracking, cancellation
|
- Job history, error tracking, cancellation, replay
|
||||||
|
|
||||||
### Page Rendering
|
### Page Rendering
|
||||||
- On-demand page extraction from all formats
|
- On-demand page extraction from all formats
|
||||||
|
|||||||
611
apps/api/src/download_detection.rs
Normal file
611
apps/api/src/download_detection.rs
Normal file
@@ -0,0 +1,611 @@
|
|||||||
|
use axum::{extract::State, Json};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::{PgPool, Row};
|
||||||
|
use tracing::{info, warn};
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, prowlarr, state::AppState};
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// DTOs
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct StartDownloadDetectionRequest {
|
||||||
|
pub library_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct DownloadDetectionReportDto {
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub job_id: Uuid,
|
||||||
|
pub status: String,
|
||||||
|
pub total_series: i64,
|
||||||
|
pub found: i64,
|
||||||
|
pub not_found: i64,
|
||||||
|
pub no_missing: i64,
|
||||||
|
pub no_metadata: i64,
|
||||||
|
pub errors: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct DownloadDetectionResultDto {
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub id: Uuid,
|
||||||
|
pub series_name: String,
|
||||||
|
/// 'found' | 'not_found' | 'no_missing' | 'no_metadata' | 'error'
|
||||||
|
pub status: String,
|
||||||
|
pub missing_count: i32,
|
||||||
|
pub available_releases: Option<Vec<AvailableReleaseDto>>,
|
||||||
|
pub error_message: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, ToSchema)]
|
||||||
|
pub struct AvailableReleaseDto {
|
||||||
|
pub title: String,
|
||||||
|
pub size: i64,
|
||||||
|
pub download_url: Option<String>,
|
||||||
|
pub indexer: Option<String>,
|
||||||
|
pub seeders: Option<i32>,
|
||||||
|
pub matched_missing_volumes: Vec<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// POST /download-detection/start
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/download-detection/start",
|
||||||
|
tag = "download_detection",
|
||||||
|
request_body = StartDownloadDetectionRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Job created"),
|
||||||
|
(status = 400, description = "Bad request"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn start_detection(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(body): Json<StartDownloadDetectionRequest>,
|
||||||
|
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||||
|
let library_id: Uuid = body
|
||||||
|
.library_id
|
||||||
|
.parse()
|
||||||
|
.map_err(|_| ApiError::bad_request("invalid library_id"))?;
|
||||||
|
|
||||||
|
// Verify library exists
|
||||||
|
sqlx::query("SELECT id FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::not_found("library not found"))?;
|
||||||
|
|
||||||
|
// Verify Prowlarr is configured
|
||||||
|
prowlarr::check_prowlarr_configured(&state.pool).await?;
|
||||||
|
|
||||||
|
// Check no existing running job for this library
|
||||||
|
let existing: Option<Uuid> = sqlx::query_scalar(
|
||||||
|
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'download_detection' AND status IN ('pending', 'running') LIMIT 1",
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(existing_id) = existing {
|
||||||
|
return Ok(Json(serde_json::json!({
|
||||||
|
"id": existing_id.to_string(),
|
||||||
|
"status": "already_running",
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
|
||||||
|
let job_id = Uuid::new_v4();
|
||||||
|
sqlx::query(
|
||||||
|
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'download_detection', 'running', NOW())",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(library_id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let pool = state.pool.clone();
|
||||||
|
let library_name: Option<String> =
|
||||||
|
sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
|
||||||
|
tokio::spawn(async move {
|
||||||
|
match process_download_detection(&pool, job_id, library_id).await {
|
||||||
|
Ok((total_series, found)) => {
|
||||||
|
notifications::notify(
|
||||||
|
pool,
|
||||||
|
notifications::NotificationEvent::DownloadDetectionCompleted {
|
||||||
|
library_name,
|
||||||
|
total_series,
|
||||||
|
found,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn!("[DOWNLOAD_DETECTION] job {job_id} failed: {e}");
|
||||||
|
let _ = sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(e.to_string())
|
||||||
|
.execute(&pool)
|
||||||
|
.await;
|
||||||
|
notifications::notify(
|
||||||
|
pool,
|
||||||
|
notifications::NotificationEvent::DownloadDetectionFailed {
|
||||||
|
library_name,
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({
|
||||||
|
"id": job_id.to_string(),
|
||||||
|
"status": "running",
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// GET /download-detection/:id/report
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/download-detection/{id}/report",
|
||||||
|
tag = "download_detection",
|
||||||
|
params(("id" = String, Path, description = "Job UUID")),
|
||||||
|
responses(
|
||||||
|
(status = 200, body = DownloadDetectionReportDto),
|
||||||
|
(status = 404, description = "Job not found"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_detection_report(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
axum::extract::Path(job_id): axum::extract::Path<Uuid>,
|
||||||
|
) -> Result<Json<DownloadDetectionReportDto>, ApiError> {
|
||||||
|
let row = sqlx::query(
|
||||||
|
"SELECT status, total_files FROM index_jobs WHERE id = $1 AND type = 'download_detection'",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::not_found("job not found"))?;
|
||||||
|
|
||||||
|
let job_status: String = row.get("status");
|
||||||
|
let total_files: Option<i32> = row.get("total_files");
|
||||||
|
|
||||||
|
let counts = sqlx::query(
|
||||||
|
"SELECT status, COUNT(*) as cnt FROM download_detection_results WHERE job_id = $1 GROUP BY status",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut found = 0i64;
|
||||||
|
let mut not_found = 0i64;
|
||||||
|
let mut no_missing = 0i64;
|
||||||
|
let mut no_metadata = 0i64;
|
||||||
|
let mut errors = 0i64;
|
||||||
|
|
||||||
|
for r in &counts {
|
||||||
|
let status: String = r.get("status");
|
||||||
|
let cnt: i64 = r.get("cnt");
|
||||||
|
match status.as_str() {
|
||||||
|
"found" => found = cnt,
|
||||||
|
"not_found" => not_found = cnt,
|
||||||
|
"no_missing" => no_missing = cnt,
|
||||||
|
"no_metadata" => no_metadata = cnt,
|
||||||
|
"error" => errors = cnt,
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(DownloadDetectionReportDto {
|
||||||
|
job_id,
|
||||||
|
status: job_status,
|
||||||
|
total_series: total_files.unwrap_or(0) as i64,
|
||||||
|
found,
|
||||||
|
not_found,
|
||||||
|
no_missing,
|
||||||
|
no_metadata,
|
||||||
|
errors,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// GET /download-detection/:id/results
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct ResultsQuery {
|
||||||
|
pub status: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/download-detection/{id}/results",
|
||||||
|
tag = "download_detection",
|
||||||
|
params(
|
||||||
|
("id" = String, Path, description = "Job UUID"),
|
||||||
|
("status" = Option<String>, Query, description = "Filter by status"),
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, body = Vec<DownloadDetectionResultDto>),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_detection_results(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
axum::extract::Path(job_id): axum::extract::Path<Uuid>,
|
||||||
|
axum::extract::Query(query): axum::extract::Query<ResultsQuery>,
|
||||||
|
) -> Result<Json<Vec<DownloadDetectionResultDto>>, ApiError> {
|
||||||
|
let rows = if let Some(status_filter) = &query.status {
|
||||||
|
sqlx::query(
|
||||||
|
"SELECT id, series_name, status, missing_count, available_releases, error_message
|
||||||
|
FROM download_detection_results
|
||||||
|
WHERE job_id = $1 AND status = $2
|
||||||
|
ORDER BY series_name",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(status_filter)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
} else {
|
||||||
|
sqlx::query(
|
||||||
|
"SELECT id, series_name, status, missing_count, available_releases, error_message
|
||||||
|
FROM download_detection_results
|
||||||
|
WHERE job_id = $1
|
||||||
|
ORDER BY status, series_name",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
};
|
||||||
|
|
||||||
|
let results = rows
|
||||||
|
.iter()
|
||||||
|
.map(|row| {
|
||||||
|
let releases_json: Option<serde_json::Value> = row.get("available_releases");
|
||||||
|
let available_releases = releases_json.and_then(|v| {
|
||||||
|
serde_json::from_value::<Vec<AvailableReleaseDto>>(v).ok()
|
||||||
|
});
|
||||||
|
DownloadDetectionResultDto {
|
||||||
|
id: row.get("id"),
|
||||||
|
series_name: row.get("series_name"),
|
||||||
|
status: row.get("status"),
|
||||||
|
missing_count: row.get("missing_count"),
|
||||||
|
available_releases,
|
||||||
|
error_message: row.get("error_message"),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(Json(results))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Background processing
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
pub(crate) async fn process_download_detection(
|
||||||
|
pool: &PgPool,
|
||||||
|
job_id: Uuid,
|
||||||
|
library_id: Uuid,
|
||||||
|
) -> Result<(i32, i64), String> {
|
||||||
|
let (prowlarr_url, prowlarr_api_key, categories) =
|
||||||
|
prowlarr::load_prowlarr_config_internal(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.message)?;
|
||||||
|
|
||||||
|
// Fetch all series with their metadata link status
|
||||||
|
let all_series: Vec<String> = sqlx::query_scalar(
|
||||||
|
r#"
|
||||||
|
SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')
|
||||||
|
FROM books
|
||||||
|
WHERE library_id = $1
|
||||||
|
ORDER BY 1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let total = all_series.len() as i32;
|
||||||
|
sqlx::query("UPDATE index_jobs SET total_files = $2 WHERE id = $1")
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(total)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
// Fetch approved metadata links for this library (series_name -> link_id)
|
||||||
|
let links: Vec<(String, Uuid)> = sqlx::query(
|
||||||
|
"SELECT series_name, id FROM external_metadata_links WHERE library_id = $1 AND status = 'approved'",
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?
|
||||||
|
.into_iter()
|
||||||
|
.map(|row| {
|
||||||
|
let series_name: String = row.get("series_name");
|
||||||
|
let link_id: Uuid = row.get("id");
|
||||||
|
(series_name, link_id)
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let link_map: std::collections::HashMap<String, Uuid> = links.into_iter().collect();
|
||||||
|
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(30))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
|
||||||
|
|
||||||
|
let mut processed = 0i32;
|
||||||
|
|
||||||
|
for series_name in &all_series {
|
||||||
|
if is_job_cancelled(pool, job_id).await {
|
||||||
|
sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'cancelled', finished_at = NOW() WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
return Ok((total, 0));
|
||||||
|
}
|
||||||
|
|
||||||
|
processed += 1;
|
||||||
|
let progress = (processed * 100 / total.max(1)).min(100);
|
||||||
|
sqlx::query(
|
||||||
|
"UPDATE index_jobs SET processed_files = $2, progress_percent = $3, current_file = $4 WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(processed)
|
||||||
|
.bind(progress)
|
||||||
|
.bind(series_name)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.ok();
|
||||||
|
|
||||||
|
// Skip unclassified
|
||||||
|
if series_name == "unclassified" {
|
||||||
|
insert_result(pool, job_id, library_id, series_name, "no_metadata", 0, None, None).await;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this series has an approved metadata link
|
||||||
|
let link_id = match link_map.get(series_name) {
|
||||||
|
Some(id) => *id,
|
||||||
|
None => {
|
||||||
|
insert_result(pool, job_id, library_id, series_name, "no_metadata", 0, None, None).await;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Fetch missing books for this series
|
||||||
|
let missing_rows = sqlx::query(
|
||||||
|
"SELECT volume_number FROM external_book_metadata WHERE link_id = $1 AND book_id IS NULL ORDER BY volume_number NULLS LAST",
|
||||||
|
)
|
||||||
|
.bind(link_id)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
if missing_rows.is_empty() {
|
||||||
|
insert_result(pool, job_id, library_id, series_name, "no_missing", 0, None, None).await;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let missing_volumes: Vec<i32> = missing_rows
|
||||||
|
.iter()
|
||||||
|
.filter_map(|row| row.get::<Option<i32>, _>("volume_number"))
|
||||||
|
.collect();
|
||||||
|
let missing_count = missing_rows.len() as i32;
|
||||||
|
|
||||||
|
// Search Prowlarr
|
||||||
|
match search_prowlarr_for_series(
|
||||||
|
&client,
|
||||||
|
&prowlarr_url,
|
||||||
|
&prowlarr_api_key,
|
||||||
|
&categories,
|
||||||
|
series_name,
|
||||||
|
&missing_volumes,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(matched_releases) if !matched_releases.is_empty() => {
|
||||||
|
let releases_json = serde_json::to_value(&matched_releases).ok();
|
||||||
|
insert_result(
|
||||||
|
pool,
|
||||||
|
job_id,
|
||||||
|
library_id,
|
||||||
|
series_name,
|
||||||
|
"found",
|
||||||
|
missing_count,
|
||||||
|
releases_json,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
Ok(_) => {
|
||||||
|
insert_result(pool, job_id, library_id, series_name, "not_found", missing_count, None, None).await;
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
warn!("[DOWNLOAD_DETECTION] series '{series_name}': {e}");
|
||||||
|
insert_result(pool, job_id, library_id, series_name, "error", missing_count, None, Some(&e)).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build final stats
|
||||||
|
let counts = sqlx::query(
|
||||||
|
"SELECT status, COUNT(*) as cnt FROM download_detection_results WHERE job_id = $1 GROUP BY status",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let mut count_found = 0i64;
|
||||||
|
let mut count_not_found = 0i64;
|
||||||
|
let mut count_no_missing = 0i64;
|
||||||
|
let mut count_no_metadata = 0i64;
|
||||||
|
let mut count_errors = 0i64;
|
||||||
|
for row in &counts {
|
||||||
|
let s: String = row.get("status");
|
||||||
|
let c: i64 = row.get("cnt");
|
||||||
|
match s.as_str() {
|
||||||
|
"found" => count_found = c,
|
||||||
|
"not_found" => count_not_found = c,
|
||||||
|
"no_missing" => count_no_missing = c,
|
||||||
|
"no_metadata" => count_no_metadata = c,
|
||||||
|
"error" => count_errors = c,
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let stats = serde_json::json!({
|
||||||
|
"total_series": total as i64,
|
||||||
|
"found": count_found,
|
||||||
|
"not_found": count_not_found,
|
||||||
|
"no_missing": count_no_missing,
|
||||||
|
"no_metadata": count_no_metadata,
|
||||||
|
"errors": count_errors,
|
||||||
|
});
|
||||||
|
|
||||||
|
sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), stats_json = $2, progress_percent = 100 WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(&stats)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"[DOWNLOAD_DETECTION] job={job_id} completed: {total} series, found={count_found}, not_found={count_not_found}, no_missing={count_no_missing}, no_metadata={count_no_metadata}, errors={count_errors}"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok((total, count_found))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Helpers
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async fn search_prowlarr_for_series(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
url: &str,
|
||||||
|
api_key: &str,
|
||||||
|
categories: &[i32],
|
||||||
|
series_name: &str,
|
||||||
|
missing_volumes: &[i32],
|
||||||
|
) -> Result<Vec<AvailableReleaseDto>, String> {
|
||||||
|
let query = format!("\"{}\"", series_name);
|
||||||
|
|
||||||
|
let mut params: Vec<(&str, String)> = vec![
|
||||||
|
("query", query),
|
||||||
|
("type", "search".to_string()),
|
||||||
|
];
|
||||||
|
for cat in categories {
|
||||||
|
params.push(("categories", cat.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(format!("{url}/api/v1/search"))
|
||||||
|
.query(¶ms)
|
||||||
|
.header("X-Api-Key", api_key)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Prowlarr request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(format!("Prowlarr returned {status}: {text}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let raw_releases: Vec<prowlarr::ProwlarrRawRelease> = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse Prowlarr response: {e}"))?;
|
||||||
|
|
||||||
|
let matched: Vec<AvailableReleaseDto> = raw_releases
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|r| {
|
||||||
|
let title_volumes = prowlarr::extract_volumes_from_title_pub(&r.title);
|
||||||
|
let matched_vols: Vec<i32> = title_volumes
|
||||||
|
.into_iter()
|
||||||
|
.filter(|v| missing_volumes.contains(v))
|
||||||
|
.collect();
|
||||||
|
if matched_vols.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(AvailableReleaseDto {
|
||||||
|
title: r.title,
|
||||||
|
size: r.size,
|
||||||
|
download_url: r.download_url,
|
||||||
|
indexer: r.indexer,
|
||||||
|
seeders: r.seeders,
|
||||||
|
matched_missing_volumes: matched_vols,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(matched)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
async fn insert_result(
|
||||||
|
pool: &PgPool,
|
||||||
|
job_id: Uuid,
|
||||||
|
library_id: Uuid,
|
||||||
|
series_name: &str,
|
||||||
|
status: &str,
|
||||||
|
missing_count: i32,
|
||||||
|
available_releases: Option<serde_json::Value>,
|
||||||
|
error_message: Option<&str>,
|
||||||
|
) {
|
||||||
|
let _ = sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO download_detection_results
|
||||||
|
(job_id, library_id, series_name, status, missing_count, available_releases, error_message)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(library_id)
|
||||||
|
.bind(series_name)
|
||||||
|
.bind(status)
|
||||||
|
.bind(missing_count)
|
||||||
|
.bind(&available_releases)
|
||||||
|
.bind(error_message)
|
||||||
|
.execute(pool)
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn is_job_cancelled(pool: &PgPool, job_id: Uuid) -> bool {
|
||||||
|
sqlx::query_scalar::<_, String>("SELECT status FROM index_jobs WHERE id = $1")
|
||||||
|
.bind(job_id)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten()
|
||||||
|
.as_deref()
|
||||||
|
== Some("cancelled")
|
||||||
|
}
|
||||||
@@ -2,6 +2,7 @@ mod anilist;
|
|||||||
mod auth;
|
mod auth;
|
||||||
mod authors;
|
mod authors;
|
||||||
mod books;
|
mod books;
|
||||||
|
mod download_detection;
|
||||||
mod error;
|
mod error;
|
||||||
mod handlers;
|
mod handlers;
|
||||||
mod index_jobs;
|
mod index_jobs;
|
||||||
@@ -153,6 +154,9 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
.route("/reading-status/push", axum::routing::post(reading_status_push::start_push))
|
.route("/reading-status/push", axum::routing::post(reading_status_push::start_push))
|
||||||
.route("/reading-status/push/:id/report", get(reading_status_push::get_push_report))
|
.route("/reading-status/push/:id/report", get(reading_status_push::get_push_report))
|
||||||
.route("/reading-status/push/:id/results", get(reading_status_push::get_push_results))
|
.route("/reading-status/push/:id/results", get(reading_status_push::get_push_results))
|
||||||
|
.route("/download-detection/start", axum::routing::post(download_detection::start_detection))
|
||||||
|
.route("/download-detection/:id/report", get(download_detection::get_detection_report))
|
||||||
|
.route("/download-detection/:id/results", get(download_detection::get_detection_results))
|
||||||
.merge(settings::settings_routes())
|
.merge(settings::settings_routes())
|
||||||
.route_layer(middleware::from_fn_with_state(
|
.route_layer(middleware::from_fn_with_state(
|
||||||
state.clone(),
|
state.clone(),
|
||||||
|
|||||||
@@ -85,6 +85,20 @@ struct ProwlarrConfig {
|
|||||||
categories: Option<Vec<i32>>,
|
categories: Option<Vec<i32>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn load_prowlarr_config_internal(
|
||||||
|
pool: &sqlx::PgPool,
|
||||||
|
) -> Result<(String, String, Vec<i32>), ApiError> {
|
||||||
|
load_prowlarr_config(pool).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn check_prowlarr_configured(pool: &sqlx::PgPool) -> Result<(), ApiError> {
|
||||||
|
load_prowlarr_config(pool).await.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn extract_volumes_from_title_pub(title: &str) -> Vec<i32> {
|
||||||
|
extract_volumes_from_title(title)
|
||||||
|
}
|
||||||
|
|
||||||
async fn load_prowlarr_config(
|
async fn load_prowlarr_config(
|
||||||
pool: &sqlx::PgPool,
|
pool: &sqlx::PgPool,
|
||||||
) -> Result<(String, String, Vec<i32>), ApiError> {
|
) -> Result<(String, String, Vec<i32>), ApiError> {
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ export const dynamic = "force-dynamic";
|
|||||||
|
|
||||||
import { notFound } from "next/navigation";
|
import { notFound } from "next/navigation";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import { apiFetch, getMetadataBatchReport, getMetadataBatchResults, getMetadataRefreshReport, getReadingStatusMatchReport, getReadingStatusMatchResults, getReadingStatusPushReport, getReadingStatusPushResults, MetadataBatchReportDto, MetadataBatchResultDto, MetadataRefreshReportDto, ReadingStatusMatchReportDto, ReadingStatusMatchResultDto, ReadingStatusPushReportDto, ReadingStatusPushResultDto } from "@/lib/api";
|
import { apiFetch, getMetadataBatchReport, getMetadataBatchResults, getMetadataRefreshReport, getReadingStatusMatchReport, getReadingStatusMatchResults, getReadingStatusPushReport, getReadingStatusPushResults, getDownloadDetectionReport, getDownloadDetectionResults, MetadataBatchReportDto, MetadataBatchResultDto, MetadataRefreshReportDto, ReadingStatusMatchReportDto, ReadingStatusMatchResultDto, ReadingStatusPushReportDto, ReadingStatusPushResultDto, DownloadDetectionReportDto, DownloadDetectionResultDto } from "@/lib/api";
|
||||||
import {
|
import {
|
||||||
Card, CardHeader, CardTitle, CardDescription, CardContent,
|
Card, CardHeader, CardTitle, CardDescription, CardContent,
|
||||||
StatusBadge, JobTypeBadge, StatBox, ProgressBar
|
StatusBadge, JobTypeBadge, StatBox, ProgressBar
|
||||||
@@ -142,12 +142,18 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
|||||||
description: t("jobType.reading_status_pushDesc"),
|
description: t("jobType.reading_status_pushDesc"),
|
||||||
isThumbnailOnly: false,
|
isThumbnailOnly: false,
|
||||||
},
|
},
|
||||||
|
download_detection: {
|
||||||
|
label: t("jobType.download_detectionLabel"),
|
||||||
|
description: t("jobType.download_detectionDesc"),
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const isMetadataBatch = job.type === "metadata_batch";
|
const isMetadataBatch = job.type === "metadata_batch";
|
||||||
const isMetadataRefresh = job.type === "metadata_refresh";
|
const isMetadataRefresh = job.type === "metadata_refresh";
|
||||||
const isReadingStatusMatch = job.type === "reading_status_match";
|
const isReadingStatusMatch = job.type === "reading_status_match";
|
||||||
const isReadingStatusPush = job.type === "reading_status_push";
|
const isReadingStatusPush = job.type === "reading_status_push";
|
||||||
|
const isDownloadDetection = job.type === "download_detection";
|
||||||
|
|
||||||
// Fetch batch report & results for metadata_batch jobs
|
// Fetch batch report & results for metadata_batch jobs
|
||||||
let batchReport: MetadataBatchReportDto | null = null;
|
let batchReport: MetadataBatchReportDto | null = null;
|
||||||
@@ -185,6 +191,16 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
|||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Fetch download detection report & results
|
||||||
|
let downloadDetectionReport: DownloadDetectionReportDto | null = null;
|
||||||
|
let downloadDetectionResults: DownloadDetectionResultDto[] = [];
|
||||||
|
if (isDownloadDetection) {
|
||||||
|
[downloadDetectionReport, downloadDetectionResults] = await Promise.all([
|
||||||
|
getDownloadDetectionReport(id).catch(() => null),
|
||||||
|
getDownloadDetectionResults(id, "found").catch(() => []),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
const typeInfo = JOB_TYPE_INFO[job.type] ?? {
|
const typeInfo = JOB_TYPE_INFO[job.type] ?? {
|
||||||
label: job.type,
|
label: job.type,
|
||||||
description: null,
|
description: null,
|
||||||
@@ -213,6 +229,8 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
|||||||
? t("jobDetail.readingStatusMatch")
|
? t("jobDetail.readingStatusMatch")
|
||||||
: isReadingStatusPush
|
: isReadingStatusPush
|
||||||
? t("jobDetail.readingStatusPush")
|
? t("jobDetail.readingStatusPush")
|
||||||
|
: isDownloadDetection
|
||||||
|
? t("jobDetail.downloadDetection")
|
||||||
: isThumbnailOnly
|
: isThumbnailOnly
|
||||||
? t("jobType.thumbnail_rebuild")
|
? t("jobType.thumbnail_rebuild")
|
||||||
: isExtractingPages
|
: isExtractingPages
|
||||||
@@ -229,6 +247,8 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
|||||||
? t("jobDetail.readingStatusMatchDesc")
|
? t("jobDetail.readingStatusMatchDesc")
|
||||||
: isReadingStatusPush
|
: isReadingStatusPush
|
||||||
? t("jobDetail.readingStatusPushDesc")
|
? t("jobDetail.readingStatusPushDesc")
|
||||||
|
: isDownloadDetection
|
||||||
|
? t("jobDetail.downloadDetectionDesc")
|
||||||
: isThumbnailOnly
|
: isThumbnailOnly
|
||||||
? undefined
|
? undefined
|
||||||
: isExtractingPages
|
: isExtractingPages
|
||||||
@@ -290,7 +310,12 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
|||||||
— {readingStatusPushReport.pushed} {t("jobDetail.pushed").toLowerCase()}, {readingStatusPushReport.no_books} {t("jobDetail.noBooks").toLowerCase()}, {readingStatusPushReport.errors} {t("jobDetail.errors").toLowerCase()}
|
— {readingStatusPushReport.pushed} {t("jobDetail.pushed").toLowerCase()}, {readingStatusPushReport.no_books} {t("jobDetail.noBooks").toLowerCase()}, {readingStatusPushReport.errors} {t("jobDetail.errors").toLowerCase()}
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
{!isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && !isReadingStatusPush && job.stats_json && (
|
{isDownloadDetection && downloadDetectionReport && (
|
||||||
|
<span className="ml-2 text-success/80">
|
||||||
|
— {downloadDetectionReport.found} {t("jobDetail.downloadFound").toLowerCase()}, {downloadDetectionReport.not_found} {t("jobDetail.downloadNotFound").toLowerCase()}, {downloadDetectionReport.errors} {t("jobDetail.errors").toLowerCase()}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{!isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && !isReadingStatusPush && !isDownloadDetection && job.stats_json && (
|
||||||
<span className="ml-2 text-success/80">
|
<span className="ml-2 text-success/80">
|
||||||
— {job.stats_json.scanned_files} {t("jobDetail.scanned").toLowerCase()}, {job.stats_json.indexed_files} {t("jobDetail.indexed").toLowerCase()}
|
— {job.stats_json.scanned_files} {t("jobDetail.scanned").toLowerCase()}, {job.stats_json.indexed_files} {t("jobDetail.indexed").toLowerCase()}
|
||||||
{job.stats_json.removed_files > 0 && `, ${job.stats_json.removed_files} ${t("jobDetail.removed").toLowerCase()}`}
|
{job.stats_json.removed_files > 0 && `, ${job.stats_json.removed_files} ${t("jobDetail.removed").toLowerCase()}`}
|
||||||
@@ -564,7 +589,7 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
|||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Index Statistics — index jobs only */}
|
{/* Index Statistics — index jobs only */}
|
||||||
{job.stats_json && !isThumbnailOnly && !isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && !isReadingStatusPush && (
|
{job.stats_json && !isThumbnailOnly && !isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && !isReadingStatusPush && !isDownloadDetection && (
|
||||||
<Card>
|
<Card>
|
||||||
<CardHeader>
|
<CardHeader>
|
||||||
<CardTitle>{t("jobDetail.indexStats")}</CardTitle>
|
<CardTitle>{t("jobDetail.indexStats")}</CardTitle>
|
||||||
@@ -938,6 +963,85 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
|||||||
</Card>
|
</Card>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{/* Download detection — summary report */}
|
||||||
|
{isDownloadDetection && downloadDetectionReport && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.downloadDetectionReport")}</CardTitle>
|
||||||
|
<CardDescription>{t("jobDetail.seriesAnalyzed", { count: String(downloadDetectionReport.total_series) })}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-3 gap-4">
|
||||||
|
<StatBox value={downloadDetectionReport.found} label={t("jobDetail.downloadFound")} variant="success" />
|
||||||
|
<StatBox value={downloadDetectionReport.not_found} label={t("jobDetail.downloadNotFound")} />
|
||||||
|
<StatBox value={downloadDetectionReport.no_missing} label={t("jobDetail.downloadNoMissing")} variant="primary" />
|
||||||
|
<StatBox value={downloadDetectionReport.no_metadata} label={t("jobDetail.downloadNoMetadata")} />
|
||||||
|
<StatBox value={downloadDetectionReport.errors} label={t("jobDetail.errors")} variant={downloadDetectionReport.errors > 0 ? "error" : "default"} />
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Download detection — available releases per series */}
|
||||||
|
{isDownloadDetection && downloadDetectionResults.length > 0 && (
|
||||||
|
<Card className="lg:col-span-2">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.downloadAvailableReleases")}</CardTitle>
|
||||||
|
<CardDescription>{t("jobDetail.downloadAvailableReleasesDesc", { count: String(downloadDetectionResults.length) })}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-4 max-h-[700px] overflow-y-auto">
|
||||||
|
{downloadDetectionResults.map((r) => (
|
||||||
|
<div key={r.id} className="rounded-lg border border-success/20 bg-success/5 p-3">
|
||||||
|
<div className="flex items-center justify-between gap-2 mb-2">
|
||||||
|
{job.library_id ? (
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${job.library_id}/series/${encodeURIComponent(r.series_name)}`}
|
||||||
|
className="font-semibold text-sm text-primary hover:underline truncate"
|
||||||
|
>
|
||||||
|
{r.series_name}
|
||||||
|
</Link>
|
||||||
|
) : (
|
||||||
|
<span className="font-semibold text-sm text-foreground truncate">{r.series_name}</span>
|
||||||
|
)}
|
||||||
|
<span className="text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap bg-warning/20 text-warning shrink-0">
|
||||||
|
{t("jobDetail.downloadMissingCount", { count: String(r.missing_count) })}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
{r.available_releases && r.available_releases.length > 0 && (
|
||||||
|
<div className="space-y-1.5">
|
||||||
|
{r.available_releases.map((release, idx) => (
|
||||||
|
<div key={idx} className="flex items-start gap-2 p-2 rounded bg-background/60 border border-border/40">
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<p className="text-xs font-mono text-foreground truncate" title={release.title}>{release.title}</p>
|
||||||
|
<div className="flex items-center gap-3 mt-1 flex-wrap">
|
||||||
|
{release.indexer && (
|
||||||
|
<span className="text-[10px] text-muted-foreground">{release.indexer}</span>
|
||||||
|
)}
|
||||||
|
{release.seeders != null && (
|
||||||
|
<span className="text-[10px] text-success font-medium">{release.seeders} {t("prowlarr.columnSeeders").toLowerCase()}</span>
|
||||||
|
)}
|
||||||
|
<span className="text-[10px] text-muted-foreground">
|
||||||
|
{(release.size / 1024 / 1024).toFixed(0)} MB
|
||||||
|
</span>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
{release.matched_missing_volumes.map((vol) => (
|
||||||
|
<span key={vol} className="text-[10px] px-1.5 py-0.5 rounded-full bg-success/20 text-success font-medium">
|
||||||
|
T.{vol}
|
||||||
|
</span>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
{/* Metadata batch results */}
|
{/* Metadata batch results */}
|
||||||
{isMetadataBatch && batchResults.length > 0 && (
|
{isMetadataBatch && batchResults.length > 0 && (
|
||||||
<Card className="lg:col-span-2">
|
<Card className="lg:col-span-2">
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { revalidatePath } from "next/cache";
|
import { revalidatePath } from "next/cache";
|
||||||
import { redirect } from "next/navigation";
|
import { redirect } from "next/navigation";
|
||||||
import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, startMetadataRefresh, startReadingStatusMatch, startReadingStatusPush, IndexJobDto, LibraryDto } from "@/lib/api";
|
import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, startMetadataRefresh, startReadingStatusMatch, startReadingStatusPush, startDownloadDetection, apiFetch, IndexJobDto, LibraryDto } from "@/lib/api";
|
||||||
import { JobsList } from "@/app/components/JobsList";
|
import { JobsList } from "@/app/components/JobsList";
|
||||||
import { Card, CardHeader, CardTitle, CardDescription, CardContent, FormField, FormSelect } from "@/app/components/ui";
|
import { Card, CardHeader, CardTitle, CardDescription, CardContent, FormField, FormSelect } from "@/app/components/ui";
|
||||||
import { getServerTranslations } from "@/lib/i18n/server";
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
@@ -10,10 +10,12 @@ export const dynamic = "force-dynamic";
|
|||||||
export default async function JobsPage({ searchParams }: { searchParams: Promise<{ highlight?: string }> }) {
|
export default async function JobsPage({ searchParams }: { searchParams: Promise<{ highlight?: string }> }) {
|
||||||
const { highlight } = await searchParams;
|
const { highlight } = await searchParams;
|
||||||
const { t } = await getServerTranslations();
|
const { t } = await getServerTranslations();
|
||||||
const [jobs, libraries] = await Promise.all([
|
const [jobs, libraries, prowlarrSettings] = await Promise.all([
|
||||||
listJobs().catch(() => [] as IndexJobDto[]),
|
listJobs().catch(() => [] as IndexJobDto[]),
|
||||||
fetchLibraries().catch(() => [] as LibraryDto[])
|
fetchLibraries().catch(() => [] as LibraryDto[]),
|
||||||
|
apiFetch<{ url?: string }>("/settings/prowlarr").catch(() => null),
|
||||||
]);
|
]);
|
||||||
|
const prowlarrConfigured = !!prowlarrSettings?.url;
|
||||||
|
|
||||||
const libraryMap = new Map(libraries.map(l => [l.id, l.name]));
|
const libraryMap = new Map(libraries.map(l => [l.id, l.name]));
|
||||||
const readingStatusLibraries = libraries.filter(l => l.reading_status_provider);
|
const readingStatusLibraries = libraries.filter(l => l.reading_status_provider);
|
||||||
@@ -179,6 +181,35 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function triggerDownloadDetection(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
if (libraryId) {
|
||||||
|
let result;
|
||||||
|
try {
|
||||||
|
result = await startDownloadDetection(libraryId);
|
||||||
|
} catch {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
} else {
|
||||||
|
// All libraries
|
||||||
|
const allLibraries = await fetchLibraries().catch(() => [] as LibraryDto[]);
|
||||||
|
let lastId: string | undefined;
|
||||||
|
for (const lib of allLibraries) {
|
||||||
|
try {
|
||||||
|
const result = await startDownloadDetection(lib.id);
|
||||||
|
if (result.status !== "already_running") lastId = result.id;
|
||||||
|
} catch {
|
||||||
|
// Skip libraries with errors (e.g. Prowlarr not configured)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(lastId ? `/jobs?highlight=${lastId}` : "/jobs");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="mb-6">
|
<div className="mb-6">
|
||||||
@@ -349,6 +380,28 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
{/* Download group — only shown if Prowlarr is configured */}
|
||||||
|
{prowlarrConfigured && <div className="space-y-3">
|
||||||
|
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
|
||||||
|
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-4l-4 4m0 0l-4-4m4 4V4" />
|
||||||
|
</svg>
|
||||||
|
{t("jobs.groupProwlarr")}
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2">
|
||||||
|
<button type="submit" formAction={triggerDownloadDetection}
|
||||||
|
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0zM10 7v3m0 0v3m0-3h3m-3 0H7" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-foreground">{t("jobs.downloadDetection")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.downloadDetectionShort")}</p>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>}
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
|
|||||||
@@ -17,6 +17,12 @@ export const DEFAULT_EVENTS = {
|
|||||||
metadata_batch_failed: true,
|
metadata_batch_failed: true,
|
||||||
metadata_refresh_completed: true,
|
metadata_refresh_completed: true,
|
||||||
metadata_refresh_failed: true,
|
metadata_refresh_failed: true,
|
||||||
|
reading_status_match_completed: true,
|
||||||
|
reading_status_match_failed: true,
|
||||||
|
reading_status_push_completed: true,
|
||||||
|
reading_status_push_failed: true,
|
||||||
|
download_detection_completed: true,
|
||||||
|
download_detection_failed: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
export function TelegramCard({ handleUpdateSetting }: { handleUpdateSetting: (key: string, value: unknown) => Promise<void> }) {
|
export function TelegramCard({ handleUpdateSetting }: { handleUpdateSetting: (key: string, value: unknown) => Promise<void> }) {
|
||||||
@@ -191,6 +197,24 @@ export function TelegramCard({ handleUpdateSetting }: { handleUpdateSetting: (ke
|
|||||||
{ key: "metadata_refresh_failed" as const, label: t("settings.eventRefreshFailed") },
|
{ key: "metadata_refresh_failed" as const, label: t("settings.eventRefreshFailed") },
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
category: t("settings.eventCategoryReadingStatus"),
|
||||||
|
icon: "books" as const,
|
||||||
|
items: [
|
||||||
|
{ key: "reading_status_match_completed" as const, label: t("settings.eventMatchCompleted") },
|
||||||
|
{ key: "reading_status_match_failed" as const, label: t("settings.eventMatchFailed") },
|
||||||
|
{ key: "reading_status_push_completed" as const, label: t("settings.eventPushCompleted") },
|
||||||
|
{ key: "reading_status_push_failed" as const, label: t("settings.eventPushFailed") },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
category: t("settings.eventCategoryDownloadDetection"),
|
||||||
|
icon: "download" as const,
|
||||||
|
items: [
|
||||||
|
{ key: "download_detection_completed" as const, label: t("settings.eventCompleted") },
|
||||||
|
{ key: "download_detection_failed" as const, label: t("settings.eventFailed") },
|
||||||
|
],
|
||||||
|
},
|
||||||
]).map(({ category, icon, items }) => (
|
]).map(({ category, icon, items }) => (
|
||||||
<div key={category}>
|
<div key={category}>
|
||||||
<p className="text-xs font-medium text-muted-foreground uppercase tracking-wide mb-2 flex items-center gap-1.5">
|
<p className="text-xs font-medium text-muted-foreground uppercase tracking-wide mb-2 flex items-center gap-1.5">
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ interface JobRowProps {
|
|||||||
refreshed?: number;
|
refreshed?: number;
|
||||||
linked?: number;
|
linked?: number;
|
||||||
pushed?: number;
|
pushed?: number;
|
||||||
|
found?: number;
|
||||||
} | null;
|
} | null;
|
||||||
progress_percent: number | null;
|
progress_percent: number | null;
|
||||||
processed_files: number | null;
|
processed_files: number | null;
|
||||||
@@ -69,6 +70,7 @@ export function JobRow({ job, libraryName, highlighted, onCancel, onReplay, form
|
|||||||
const isMetadataRefresh = job.type === "metadata_refresh";
|
const isMetadataRefresh = job.type === "metadata_refresh";
|
||||||
const isReadingStatusMatch = job.type === "reading_status_match";
|
const isReadingStatusMatch = job.type === "reading_status_match";
|
||||||
const isReadingStatusPush = job.type === "reading_status_push";
|
const isReadingStatusPush = job.type === "reading_status_push";
|
||||||
|
const isDownloadDetection = job.type === "download_detection";
|
||||||
|
|
||||||
// Thumbnails progress (Phase 2: extracting_pages + generating_thumbnails)
|
// Thumbnails progress (Phase 2: extracting_pages + generating_thumbnails)
|
||||||
const thumbInProgress = hasThumbnailPhase && (job.status === "running" || isPhase2);
|
const thumbInProgress = hasThumbnailPhase && (job.status === "running" || isPhase2);
|
||||||
@@ -210,6 +212,23 @@ export function JobRow({ job, libraryName, highlighted, onCancel, onReplay, form
|
|||||||
</span>
|
</span>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
|
{/* Download detection: total series + found count */}
|
||||||
|
{isDownloadDetection && job.total_files != null && job.total_files > 0 && (
|
||||||
|
<Tooltip label={t("jobRow.seriesTotal", { count: job.total_files })}>
|
||||||
|
<span className="inline-flex items-center gap-1 text-info">
|
||||||
|
<Icon name="series" size="sm" />
|
||||||
|
{job.total_files}
|
||||||
|
</span>
|
||||||
|
</Tooltip>
|
||||||
|
)}
|
||||||
|
{isDownloadDetection && job.stats_json?.found != null && job.stats_json.found > 0 && (
|
||||||
|
<Tooltip label={t("jobRow.downloadFound", { count: job.stats_json.found })}>
|
||||||
|
<span className="inline-flex items-center gap-1 text-success">
|
||||||
|
<Icon name="download" size="sm" />
|
||||||
|
{job.stats_json.found}
|
||||||
|
</span>
|
||||||
|
</Tooltip>
|
||||||
|
)}
|
||||||
{/* Errors */}
|
{/* Errors */}
|
||||||
{errors > 0 && (
|
{errors > 0 && (
|
||||||
<Tooltip label={t("jobRow.errors", { count: errors })}>
|
<Tooltip label={t("jobRow.errors", { count: errors })}>
|
||||||
@@ -229,7 +248,7 @@ export function JobRow({ job, libraryName, highlighted, onCancel, onReplay, form
|
|||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
{/* Nothing to show */}
|
{/* Nothing to show */}
|
||||||
{indexed === 0 && removed === 0 && errors === 0 && scanned === 0 && !hasThumbnailPhase && !isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && !isReadingStatusPush && (
|
{indexed === 0 && removed === 0 && errors === 0 && scanned === 0 && !hasThumbnailPhase && !isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && !isReadingStatusPush && !isDownloadDetection && (
|
||||||
<span className="text-sm text-muted-foreground">—</span>
|
<span className="text-sm text-muted-foreground">—</span>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -37,7 +37,8 @@ type IconName =
|
|||||||
| "authors"
|
| "authors"
|
||||||
| "bell"
|
| "bell"
|
||||||
| "link"
|
| "link"
|
||||||
| "eye";
|
| "eye"
|
||||||
|
| "download";
|
||||||
|
|
||||||
type IconSize = "sm" | "md" | "lg" | "xl";
|
type IconSize = "sm" | "md" | "lg" | "xl";
|
||||||
|
|
||||||
@@ -94,6 +95,7 @@ const icons: Record<IconName, string> = {
|
|||||||
bell: "M15 17h5l-1.405-1.405A2.032 2.032 0 0118 14.158V11a6.002 6.002 0 00-4-5.659V5a2 2 0 10-4 0v.341C7.67 6.165 6 8.388 6 11v3.159c0 .538-.214 1.055-.595 1.436L4 17h5m6 0v1a3 3 0 11-6 0v-1m6 0H9",
|
bell: "M15 17h5l-1.405-1.405A2.032 2.032 0 0118 14.158V11a6.002 6.002 0 00-4-5.659V5a2 2 0 10-4 0v.341C7.67 6.165 6 8.388 6 11v3.159c0 .538-.214 1.055-.595 1.436L4 17h5m6 0v1a3 3 0 11-6 0v-1m6 0H9",
|
||||||
link: "M13.828 10.172a4 4 0 00-5.656 0l-4 4a4 4 0 105.656 5.656l1.102-1.101m-.758-4.899a4 4 0 005.656 0l4-4a4 4 0 00-5.656-5.656l-1.1 1.1",
|
link: "M13.828 10.172a4 4 0 00-5.656 0l-4 4a4 4 0 105.656 5.656l1.102-1.101m-.758-4.899a4 4 0 005.656 0l4-4a4 4 0 00-5.656-5.656l-1.1 1.1",
|
||||||
eye: "M15 12a3 3 0 11-6 0 3 3 0 016 0zm-3-9C7.477 3 3.268 6.11 1.5 12c1.768 5.89 5.977 9 10.5 9s8.732-3.11 10.5-9C20.732 6.11 16.523 3 12 3z",
|
eye: "M15 12a3 3 0 11-6 0 3 3 0 016 0zm-3-9C7.477 3 3.268 6.11 1.5 12c1.768 5.89 5.977 9 10.5 9s8.732-3.11 10.5-9C20.732 6.11 16.523 3 12 3z",
|
||||||
|
download: "M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-4l-4 4m0 0l-4-4m4 4V4",
|
||||||
};
|
};
|
||||||
|
|
||||||
const colorClasses: Partial<Record<IconName, string>> = {
|
const colorClasses: Partial<Record<IconName, string>> = {
|
||||||
|
|||||||
@@ -1141,6 +1141,53 @@ export async function getReadingStatusPushResults(jobId: string) {
|
|||||||
return apiFetch<ReadingStatusPushResultDto[]>(`/reading-status/push/${jobId}/results`);
|
return apiFetch<ReadingStatusPushResultDto[]>(`/reading-status/push/${jobId}/results`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function startDownloadDetection(libraryId: string) {
|
||||||
|
return apiFetch<{ id: string; status: string }>("/download-detection/start", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify({ library_id: libraryId }),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export type AvailableReleaseDto = {
|
||||||
|
title: string;
|
||||||
|
size: number;
|
||||||
|
download_url: string | null;
|
||||||
|
indexer: string | null;
|
||||||
|
seeders: number | null;
|
||||||
|
matched_missing_volumes: number[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DownloadDetectionReportDto = {
|
||||||
|
job_id: string;
|
||||||
|
status: string;
|
||||||
|
total_series: number;
|
||||||
|
found: number;
|
||||||
|
not_found: number;
|
||||||
|
no_missing: number;
|
||||||
|
no_metadata: number;
|
||||||
|
errors: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type DownloadDetectionResultDto = {
|
||||||
|
id: string;
|
||||||
|
series_name: string;
|
||||||
|
status: "found" | "not_found" | "no_missing" | "no_metadata" | "error";
|
||||||
|
missing_count: number;
|
||||||
|
available_releases: AvailableReleaseDto[] | null;
|
||||||
|
error_message: string | null;
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function getDownloadDetectionReport(jobId: string) {
|
||||||
|
return apiFetch<DownloadDetectionReportDto>(`/download-detection/${jobId}/report`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getDownloadDetectionResults(jobId: string, status?: string) {
|
||||||
|
const url = status
|
||||||
|
? `/download-detection/${jobId}/results?status=${encodeURIComponent(status)}`
|
||||||
|
: `/download-detection/${jobId}/results`;
|
||||||
|
return apiFetch<DownloadDetectionResultDto[]>(url);
|
||||||
|
}
|
||||||
|
|
||||||
export type RefreshFieldDiff = {
|
export type RefreshFieldDiff = {
|
||||||
field: string;
|
field: string;
|
||||||
old?: unknown;
|
old?: unknown;
|
||||||
|
|||||||
@@ -266,6 +266,9 @@ const en: Record<TranslationKey, string> = {
|
|||||||
"jobs.matchReadingStatusShort": "Auto-link unmatched series to the reading status provider",
|
"jobs.matchReadingStatusShort": "Auto-link unmatched series to the reading status provider",
|
||||||
"jobs.pushReadingStatus": "Push reading statuses",
|
"jobs.pushReadingStatus": "Push reading statuses",
|
||||||
"jobs.pushReadingStatusShort": "Push changed reading statuses to AniList (differential push)",
|
"jobs.pushReadingStatusShort": "Push changed reading statuses to AniList (differential push)",
|
||||||
|
"jobs.groupProwlarr": "Download",
|
||||||
|
"jobs.downloadDetection": "Download detection",
|
||||||
|
"jobs.downloadDetectionShort": "Search Prowlarr for available releases matching missing volumes",
|
||||||
|
|
||||||
// Jobs list
|
// Jobs list
|
||||||
"jobsList.id": "ID",
|
"jobsList.id": "ID",
|
||||||
@@ -290,6 +293,7 @@ const en: Record<TranslationKey, string> = {
|
|||||||
"jobRow.seriesTotal": "{{count}} series total",
|
"jobRow.seriesTotal": "{{count}} series total",
|
||||||
"jobRow.seriesLinked": "{{count}} series linked",
|
"jobRow.seriesLinked": "{{count}} series linked",
|
||||||
"jobRow.seriesPushed": "{{count}} series pushed",
|
"jobRow.seriesPushed": "{{count}} series pushed",
|
||||||
|
"jobRow.downloadFound": "{{count}} releases found",
|
||||||
"jobRow.errors": "{{count}} errors",
|
"jobRow.errors": "{{count}} errors",
|
||||||
"jobRow.view": "View",
|
"jobRow.view": "View",
|
||||||
"jobRow.replay": "Replay",
|
"jobRow.replay": "Replay",
|
||||||
@@ -381,6 +385,16 @@ const en: Record<TranslationKey, string> = {
|
|||||||
"jobDetail.pushed": "Pushed",
|
"jobDetail.pushed": "Pushed",
|
||||||
"jobDetail.skipped": "Skipped",
|
"jobDetail.skipped": "Skipped",
|
||||||
"jobDetail.noBooks": "No books",
|
"jobDetail.noBooks": "No books",
|
||||||
|
"jobDetail.downloadDetection": "Download detection",
|
||||||
|
"jobDetail.downloadDetectionDesc": "Scanning series with missing volumes via Prowlarr",
|
||||||
|
"jobDetail.downloadDetectionReport": "Detection report",
|
||||||
|
"jobDetail.downloadFound": "Available",
|
||||||
|
"jobDetail.downloadNotFound": "Not found",
|
||||||
|
"jobDetail.downloadNoMissing": "Complete",
|
||||||
|
"jobDetail.downloadNoMetadata": "No metadata",
|
||||||
|
"jobDetail.downloadAvailableReleases": "Available releases",
|
||||||
|
"jobDetail.downloadAvailableReleasesDesc": "{{count}} series with at least one release found",
|
||||||
|
"jobDetail.downloadMissingCount": "{{count}} missing",
|
||||||
|
|
||||||
// Job types
|
// Job types
|
||||||
"jobType.rebuild": "Indexing",
|
"jobType.rebuild": "Indexing",
|
||||||
@@ -413,6 +427,9 @@ const en: Record<TranslationKey, string> = {
|
|||||||
"jobType.reading_status_push": "Reading status push",
|
"jobType.reading_status_push": "Reading status push",
|
||||||
"jobType.reading_status_pushLabel": "Reading status push",
|
"jobType.reading_status_pushLabel": "Reading status push",
|
||||||
"jobType.reading_status_pushDesc": "Differentially pushes changed reading statuses (or new series) to AniList.",
|
"jobType.reading_status_pushDesc": "Differentially pushes changed reading statuses (or new series) to AniList.",
|
||||||
|
"jobType.download_detection": "Download detection",
|
||||||
|
"jobType.download_detectionLabel": "Available downloads detection",
|
||||||
|
"jobType.download_detectionDesc": "Scans series with missing volumes and queries Prowlarr to find available releases. Downloads nothing — produces a report of opportunities only.",
|
||||||
|
|
||||||
// Status badges
|
// Status badges
|
||||||
"statusBadge.extracting_pages": "Extracting pages",
|
"statusBadge.extracting_pages": "Extracting pages",
|
||||||
@@ -647,6 +664,12 @@ const en: Record<TranslationKey, string> = {
|
|||||||
"settings.eventBatchFailed": "Batch failed",
|
"settings.eventBatchFailed": "Batch failed",
|
||||||
"settings.eventRefreshCompleted": "Refresh completed",
|
"settings.eventRefreshCompleted": "Refresh completed",
|
||||||
"settings.eventRefreshFailed": "Refresh failed",
|
"settings.eventRefreshFailed": "Refresh failed",
|
||||||
|
"settings.eventCategoryReadingStatus": "Reading status",
|
||||||
|
"settings.eventMatchCompleted": "Sync completed",
|
||||||
|
"settings.eventMatchFailed": "Sync failed",
|
||||||
|
"settings.eventPushCompleted": "Push completed",
|
||||||
|
"settings.eventPushFailed": "Push failed",
|
||||||
|
"settings.eventCategoryDownloadDetection": "Download detection",
|
||||||
"settings.telegramHelp": "How to get the required information?",
|
"settings.telegramHelp": "How to get the required information?",
|
||||||
"settings.telegramHelpBot": "Open Telegram, search for <b>@BotFather</b>, send <code>/newbot</code> and follow the instructions. Copy the token it gives you.",
|
"settings.telegramHelpBot": "Open Telegram, search for <b>@BotFather</b>, send <code>/newbot</code> and follow the instructions. Copy the token it gives you.",
|
||||||
"settings.telegramHelpChat": "Send a message to your bot, then open <code>https://api.telegram.org/bot<TOKEN>/getUpdates</code> in your browser. The <b>chat id</b> is in <code>message.chat.id</code>.",
|
"settings.telegramHelpChat": "Send a message to your bot, then open <code>https://api.telegram.org/bot<TOKEN>/getUpdates</code> in your browser. The <b>chat id</b> is in <code>message.chat.id</code>.",
|
||||||
|
|||||||
@@ -264,6 +264,9 @@ const fr = {
|
|||||||
"jobs.matchReadingStatusShort": "Lier automatiquement les séries non associées au provider",
|
"jobs.matchReadingStatusShort": "Lier automatiquement les séries non associées au provider",
|
||||||
"jobs.pushReadingStatus": "Push des états de lecture",
|
"jobs.pushReadingStatus": "Push des états de lecture",
|
||||||
"jobs.pushReadingStatusShort": "Envoyer les états de lecture modifiés vers AniList (push différentiel)",
|
"jobs.pushReadingStatusShort": "Envoyer les états de lecture modifiés vers AniList (push différentiel)",
|
||||||
|
"jobs.groupProwlarr": "Téléchargement",
|
||||||
|
"jobs.downloadDetection": "Détection de téléchargements",
|
||||||
|
"jobs.downloadDetectionShort": "Cherche sur Prowlarr les releases disponibles pour les volumes manquants",
|
||||||
|
|
||||||
// Jobs list
|
// Jobs list
|
||||||
"jobsList.id": "ID",
|
"jobsList.id": "ID",
|
||||||
@@ -288,6 +291,7 @@ const fr = {
|
|||||||
"jobRow.seriesTotal": "{{count}} séries au total",
|
"jobRow.seriesTotal": "{{count}} séries au total",
|
||||||
"jobRow.seriesLinked": "{{count}} séries liées",
|
"jobRow.seriesLinked": "{{count}} séries liées",
|
||||||
"jobRow.seriesPushed": "{{count}} séries synchronisées",
|
"jobRow.seriesPushed": "{{count}} séries synchronisées",
|
||||||
|
"jobRow.downloadFound": "{{count}} releases trouvées",
|
||||||
"jobRow.errors": "{{count}} erreurs",
|
"jobRow.errors": "{{count}} erreurs",
|
||||||
"jobRow.view": "Voir",
|
"jobRow.view": "Voir",
|
||||||
"jobRow.replay": "Rejouer",
|
"jobRow.replay": "Rejouer",
|
||||||
@@ -379,6 +383,16 @@ const fr = {
|
|||||||
"jobDetail.pushed": "Envoyés",
|
"jobDetail.pushed": "Envoyés",
|
||||||
"jobDetail.skipped": "Ignorés",
|
"jobDetail.skipped": "Ignorés",
|
||||||
"jobDetail.noBooks": "Sans livres",
|
"jobDetail.noBooks": "Sans livres",
|
||||||
|
"jobDetail.downloadDetection": "Détection de téléchargements",
|
||||||
|
"jobDetail.downloadDetectionDesc": "Analyse des séries avec volumes manquants via Prowlarr",
|
||||||
|
"jobDetail.downloadDetectionReport": "Rapport de détection",
|
||||||
|
"jobDetail.downloadFound": "Disponibles",
|
||||||
|
"jobDetail.downloadNotFound": "Non trouvés",
|
||||||
|
"jobDetail.downloadNoMissing": "Complets",
|
||||||
|
"jobDetail.downloadNoMetadata": "Sans métadonnées",
|
||||||
|
"jobDetail.downloadAvailableReleases": "Releases disponibles",
|
||||||
|
"jobDetail.downloadAvailableReleasesDesc": "{{count}} série(s) avec au moins une release trouvée",
|
||||||
|
"jobDetail.downloadMissingCount": "{{count}} manquant(s)",
|
||||||
|
|
||||||
// Job types
|
// Job types
|
||||||
"jobType.rebuild": "Indexation",
|
"jobType.rebuild": "Indexation",
|
||||||
@@ -411,6 +425,9 @@ const fr = {
|
|||||||
"jobType.reading_status_push": "Push statut lecture",
|
"jobType.reading_status_push": "Push statut lecture",
|
||||||
"jobType.reading_status_pushLabel": "Push des états de lecture",
|
"jobType.reading_status_pushLabel": "Push des états de lecture",
|
||||||
"jobType.reading_status_pushDesc": "Envoie les états de lecture modifiés (ou nouvelles séries) vers AniList de façon différentielle.",
|
"jobType.reading_status_pushDesc": "Envoie les états de lecture modifiés (ou nouvelles séries) vers AniList de façon différentielle.",
|
||||||
|
"jobType.download_detection": "Détection téléchargements",
|
||||||
|
"jobType.download_detectionLabel": "Détection de téléchargements disponibles",
|
||||||
|
"jobType.download_detectionDesc": "Analyse les séries avec des volumes manquants et interroge Prowlarr pour trouver les releases disponibles. Ne télécharge rien — produit uniquement un rapport des opportunités.",
|
||||||
|
|
||||||
// Status badges
|
// Status badges
|
||||||
"statusBadge.extracting_pages": "Extraction des pages",
|
"statusBadge.extracting_pages": "Extraction des pages",
|
||||||
@@ -645,6 +662,12 @@ const fr = {
|
|||||||
"settings.eventBatchFailed": "Batch échoué",
|
"settings.eventBatchFailed": "Batch échoué",
|
||||||
"settings.eventRefreshCompleted": "Rafraîchissement terminé",
|
"settings.eventRefreshCompleted": "Rafraîchissement terminé",
|
||||||
"settings.eventRefreshFailed": "Rafraîchissement échoué",
|
"settings.eventRefreshFailed": "Rafraîchissement échoué",
|
||||||
|
"settings.eventCategoryReadingStatus": "État de lecture",
|
||||||
|
"settings.eventMatchCompleted": "Synchro. terminée",
|
||||||
|
"settings.eventMatchFailed": "Synchro. échouée",
|
||||||
|
"settings.eventPushCompleted": "Push terminé",
|
||||||
|
"settings.eventPushFailed": "Push échoué",
|
||||||
|
"settings.eventCategoryDownloadDetection": "Détection téléchargements",
|
||||||
"settings.telegramHelp": "Comment obtenir les informations ?",
|
"settings.telegramHelp": "Comment obtenir les informations ?",
|
||||||
"settings.telegramHelpBot": "Ouvrez Telegram, recherchez <b>@BotFather</b>, envoyez <code>/newbot</code> et suivez les instructions. Copiez le token fourni.",
|
"settings.telegramHelpBot": "Ouvrez Telegram, recherchez <b>@BotFather</b>, envoyez <code>/newbot</code> et suivez les instructions. Copiez le token fourni.",
|
||||||
"settings.telegramHelpChat": "Envoyez un message à votre bot, puis ouvrez <code>https://api.telegram.org/bot<TOKEN>/getUpdates</code> dans votre navigateur. Le <b>chat id</b> apparaît dans <code>message.chat.id</code>.",
|
"settings.telegramHelpChat": "Envoyez un message à votre bot, puis ouvrez <code>https://api.telegram.org/bot<TOKEN>/getUpdates</code> dans votre navigateur. Le <b>chat id</b> apparaît dans <code>message.chat.id</code>.",
|
||||||
|
|||||||
@@ -51,6 +51,10 @@ pub struct EventToggles {
|
|||||||
pub reading_status_push_completed: bool,
|
pub reading_status_push_completed: bool,
|
||||||
#[serde(default = "default_true")]
|
#[serde(default = "default_true")]
|
||||||
pub reading_status_push_failed: bool,
|
pub reading_status_push_failed: bool,
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub download_detection_completed: bool,
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub download_detection_failed: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_true() -> bool {
|
fn default_true() -> bool {
|
||||||
@@ -75,6 +79,8 @@ fn default_events() -> EventToggles {
|
|||||||
reading_status_match_failed: true,
|
reading_status_match_failed: true,
|
||||||
reading_status_push_completed: true,
|
reading_status_push_completed: true,
|
||||||
reading_status_push_failed: true,
|
reading_status_push_failed: true,
|
||||||
|
download_detection_completed: true,
|
||||||
|
download_detection_failed: true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -280,6 +286,16 @@ pub enum NotificationEvent {
|
|||||||
library_name: Option<String>,
|
library_name: Option<String>,
|
||||||
error: String,
|
error: String,
|
||||||
},
|
},
|
||||||
|
// Download detection (Prowlarr search for missing volumes)
|
||||||
|
DownloadDetectionCompleted {
|
||||||
|
library_name: Option<String>,
|
||||||
|
total_series: i32,
|
||||||
|
found: i64,
|
||||||
|
},
|
||||||
|
DownloadDetectionFailed {
|
||||||
|
library_name: Option<String>,
|
||||||
|
error: String,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Classify an indexer job_type string into the right event constructor category.
|
/// Classify an indexer job_type string into the right event constructor category.
|
||||||
@@ -557,6 +573,37 @@ fn format_event(event: &NotificationEvent) -> String {
|
|||||||
]
|
]
|
||||||
.join("\n")
|
.join("\n")
|
||||||
}
|
}
|
||||||
|
NotificationEvent::DownloadDetectionCompleted {
|
||||||
|
library_name,
|
||||||
|
total_series,
|
||||||
|
found,
|
||||||
|
} => {
|
||||||
|
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||||
|
[
|
||||||
|
format!("✅ <b>Download detection completed</b>"),
|
||||||
|
String::new(),
|
||||||
|
format!("📂 <b>Library:</b> {lib}"),
|
||||||
|
String::new(),
|
||||||
|
format!("📊 <b>Results</b>"),
|
||||||
|
format!(" 📥 Available: <b>{found}</b> / <b>{total_series}</b> series"),
|
||||||
|
]
|
||||||
|
.join("\n")
|
||||||
|
}
|
||||||
|
NotificationEvent::DownloadDetectionFailed {
|
||||||
|
library_name,
|
||||||
|
error,
|
||||||
|
} => {
|
||||||
|
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||||
|
let err = truncate(error, 200);
|
||||||
|
[
|
||||||
|
format!("🚨 <b>Download detection failed</b>"),
|
||||||
|
String::new(),
|
||||||
|
format!("📂 <b>Library:</b> {lib}"),
|
||||||
|
String::new(),
|
||||||
|
format!("💬 <code>{err}</code>"),
|
||||||
|
]
|
||||||
|
.join("\n")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -601,6 +648,8 @@ fn is_event_enabled(config: &TelegramConfig, event: &NotificationEvent) -> bool
|
|||||||
NotificationEvent::ReadingStatusMatchFailed { .. } => config.events.reading_status_match_failed,
|
NotificationEvent::ReadingStatusMatchFailed { .. } => config.events.reading_status_match_failed,
|
||||||
NotificationEvent::ReadingStatusPushCompleted { .. } => config.events.reading_status_push_completed,
|
NotificationEvent::ReadingStatusPushCompleted { .. } => config.events.reading_status_push_completed,
|
||||||
NotificationEvent::ReadingStatusPushFailed { .. } => config.events.reading_status_push_failed,
|
NotificationEvent::ReadingStatusPushFailed { .. } => config.events.reading_status_push_failed,
|
||||||
|
NotificationEvent::DownloadDetectionCompleted { .. } => config.events.download_detection_completed,
|
||||||
|
NotificationEvent::DownloadDetectionFailed { .. } => config.events.download_detection_failed,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -150,6 +150,29 @@
|
|||||||
### Field Locking
|
### Field Locking
|
||||||
- Individual book fields can be locked to prevent external sync from overwriting manual edits
|
- Individual book fields can be locked to prevent external sync from overwriting manual edits
|
||||||
|
|
||||||
|
### AniList Reading Status Sync
|
||||||
|
|
||||||
|
Integration with AniList to synchronize reading progress in both directions for linked series.
|
||||||
|
|
||||||
|
#### Configuration
|
||||||
|
- AniList user ID required for pull/push operations
|
||||||
|
- Configured per library in the reading status provider settings
|
||||||
|
- Auto-push schedule configurable per library: `manual`, `hourly`, `daily`, `weekly`
|
||||||
|
|
||||||
|
#### Reading Status Match (`reading_status_match`)
|
||||||
|
- Pull reading progress from AniList and update local book statuses
|
||||||
|
- Maps AniList list status: `PLANNING` → `unread`, `CURRENT` → `reading`, `COMPLETED` → `read`
|
||||||
|
- Detailed per-series report: matched, updated, skipped, errors
|
||||||
|
- Rate limit handling: waits 10s and retries once on HTTP 429, aborts on second 429
|
||||||
|
|
||||||
|
#### Reading Status Push (`reading_status_push`)
|
||||||
|
- Differential push: only syncs series that changed since last push, have new books, or have never been synced
|
||||||
|
- Maps local status to AniList: `unread` → `PLANNING`, `reading` → `CURRENT`, `read` → `COMPLETED`
|
||||||
|
- Never auto-completes a series on AniList based solely on owned books (requires all books read)
|
||||||
|
- Per-series result tracking: pushed, skipped, no_books, error
|
||||||
|
- Same 429 retry logic as `reading_status_match`
|
||||||
|
- Auto-push schedule runs every minute check via indexer scheduler
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## External Integrations
|
## External Integrations
|
||||||
@@ -178,7 +201,7 @@
|
|||||||
- Test connection button in settings
|
- Test connection button in settings
|
||||||
|
|
||||||
### Granular Event Toggles
|
### Granular Event Toggles
|
||||||
12 individually configurable notification events grouped by category:
|
16 individually configurable notification events grouped by category:
|
||||||
|
|
||||||
| Category | Events |
|
| Category | Events |
|
||||||
|----------|--------|
|
|----------|--------|
|
||||||
@@ -186,6 +209,7 @@
|
|||||||
| Thumbnails | `thumbnail_completed`, `thumbnail_failed`, `thumbnail_cancelled` |
|
| Thumbnails | `thumbnail_completed`, `thumbnail_failed`, `thumbnail_cancelled` |
|
||||||
| Conversion | `conversion_completed`, `conversion_failed`, `conversion_cancelled` |
|
| Conversion | `conversion_completed`, `conversion_failed`, `conversion_cancelled` |
|
||||||
| Metadata | `metadata_approved`, `metadata_batch_completed`, `metadata_refresh_completed` |
|
| Metadata | `metadata_approved`, `metadata_batch_completed`, `metadata_refresh_completed` |
|
||||||
|
| Reading status | `reading_status_match_completed`, `reading_status_match_failed`, `reading_status_push_completed`, `reading_status_push_failed` |
|
||||||
|
|
||||||
### Thumbnail Images in Notifications
|
### Thumbnail Images in Notifications
|
||||||
- Book cover thumbnails attached to applicable notifications (conversion, metadata approval)
|
- Book cover thumbnails attached to applicable notifications (conversion, metadata approval)
|
||||||
@@ -233,6 +257,8 @@
|
|||||||
| `cbr_to_cbz` | Convert RAR to ZIP |
|
| `cbr_to_cbz` | Convert RAR to ZIP |
|
||||||
| `metadata_batch` | Auto-match series to metadata |
|
| `metadata_batch` | Auto-match series to metadata |
|
||||||
| `metadata_refresh` | Update approved metadata links |
|
| `metadata_refresh` | Update approved metadata links |
|
||||||
|
| `reading_status_match` | Pull reading progress from AniList to local |
|
||||||
|
| `reading_status_push` | Differential push of reading statuses to AniList |
|
||||||
|
|
||||||
### Job Lifecycle
|
### Job Lifecycle
|
||||||
- Status flow: `pending` → `running` → `success` | `failed` | `cancelled`
|
- Status flow: `pending` → `running` → `success` | `failed` | `cancelled`
|
||||||
|
|||||||
20
infra/migrations/0060_add_download_detection_job.sql
Normal file
20
infra/migrations/0060_add_download_detection_job.sql
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
ALTER TABLE index_jobs
|
||||||
|
DROP CONSTRAINT IF EXISTS index_jobs_type_check,
|
||||||
|
ADD CONSTRAINT index_jobs_type_check
|
||||||
|
CHECK (type IN ('scan', 'rebuild', 'full_rebuild', 'rescan', 'thumbnail_rebuild', 'thumbnail_regenerate', 'cbr_to_cbz', 'metadata_batch', 'metadata_refresh', 'reading_status_match', 'reading_status_push', 'download_detection'));
|
||||||
|
|
||||||
|
CREATE TABLE download_detection_results (
|
||||||
|
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
job_id UUID NOT NULL REFERENCES index_jobs(id) ON DELETE CASCADE,
|
||||||
|
library_id UUID NOT NULL,
|
||||||
|
series_name TEXT NOT NULL,
|
||||||
|
-- 'found' | 'not_found' | 'no_missing' | 'no_metadata' | 'error'
|
||||||
|
status TEXT NOT NULL,
|
||||||
|
missing_count INTEGER NOT NULL DEFAULT 0,
|
||||||
|
-- JSON array of available Prowlarr releases (simplified)
|
||||||
|
available_releases JSONB,
|
||||||
|
error_message TEXT,
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX download_detection_results_job_id_idx ON download_detection_results(job_id);
|
||||||
Reference in New Issue
Block a user