From e5c3542d3f23a5bf14255f8da942aa16aa850161 Mon Sep 17 00:00:00 2001 From: Froidefond Julien Date: Sat, 21 Mar 2026 14:23:19 +0100 Subject: [PATCH] refactor: split books.rs into books+series, reorganize OpenAPI tags and fix access control - Extract series code from books.rs into dedicated series.rs module - Reorganize OpenAPI tags: split overloaded "books" tag into books, series, search, stats - Add missing endpoints to OpenAPI: metadata_batch, metadata_refresh, komga, update_metadata_provider - Add missing schemas: MissingVolumeInput, Komga/Batch/Refresh DTOs - Fix access control: move GET /libraries and POST /libraries/:id/scan to read routes so non-admin tokens can list libraries and trigger scans Co-Authored-By: Claude Opus 4.6 --- apps/api/src/books.rs | 1010 +----------------------------------- apps/api/src/libraries.rs | 2 - apps/api/src/main.rs | 22 +- apps/api/src/openapi.rs | 60 ++- apps/api/src/search.rs | 2 +- apps/api/src/series.rs | 1015 +++++++++++++++++++++++++++++++++++++ apps/api/src/stats.rs | 2 +- 7 files changed, 1076 insertions(+), 1037 deletions(-) create mode 100644 apps/api/src/series.rs diff --git a/apps/api/src/books.rs b/apps/api/src/books.rs index 10afdca..15b2a51 100644 --- a/apps/api/src/books.rs +++ b/apps/api/src/books.rs @@ -346,784 +346,9 @@ pub async fn get_book( })) } -#[derive(Serialize, ToSchema)] -pub struct SeriesItem { - pub name: String, - pub book_count: i64, - pub books_read_count: i64, - #[schema(value_type = String)] - pub first_book_id: Uuid, - #[schema(value_type = String)] - pub library_id: Uuid, - pub series_status: Option, - pub missing_count: Option, - pub metadata_provider: Option, -} +// ─── Helpers ────────────────────────────────────────────────────────────────── -#[derive(Serialize, ToSchema)] -pub struct SeriesPage { - pub items: Vec, - pub total: i64, - pub page: i64, - pub limit: i64, -} - -#[derive(Deserialize, ToSchema)] -pub struct ListSeriesQuery { - #[schema(value_type = Option, example = "dragon")] - pub q: Option, - #[schema(value_type = Option, example = "unread,reading")] - pub reading_status: Option, - /// Filter by series status (e.g. "ongoing", "ended") - #[schema(value_type = Option, example = "ongoing")] - pub series_status: Option, - /// Filter series with missing books: "true" to show only series with missing books - #[schema(value_type = Option, example = "true")] - pub has_missing: Option, - /// Filter by metadata provider: a provider name (e.g. "google_books"), "linked" (any provider), or "unlinked" (no provider) - #[schema(value_type = Option, example = "google_books")] - pub metadata_provider: Option, - #[schema(value_type = Option, example = 1)] - pub page: Option, - #[schema(value_type = Option, example = 50)] - pub limit: Option, -} - -/// List all series in a library with pagination -#[utoipa::path( - get, - path = "/libraries/{library_id}/series", - tag = "books", - params( - ("library_id" = String, Path, description = "Library UUID"), - ("q" = Option, Query, description = "Filter by series name (case-insensitive, partial match)"), - ("reading_status" = Option, Query, description = "Filter by reading status, comma-separated (e.g. 'unread,reading')"), - ("metadata_provider" = Option, Query, description = "Filter by metadata provider: a provider name (e.g. 'google_books'), 'linked' (any provider), or 'unlinked' (no provider)"), - ("page" = Option, Query, description = "Page number (1-indexed, default 1)"), - ("limit" = Option, Query, description = "Items per page (max 200, default 50)"), - ), - responses( - (status = 200, body = SeriesPage), - (status = 401, description = "Unauthorized"), - ), - security(("Bearer" = [])) -)] -pub async fn list_series( - State(state): State, - Path(library_id): Path, - Query(query): Query, -) -> Result, ApiError> { - let limit = query.limit.unwrap_or(50).clamp(1, 200); - let page = query.page.unwrap_or(1).max(1); - let offset = (page - 1) * limit; - - let reading_statuses: Option> = query.reading_status.as_deref().map(|s| { - s.split(',').map(|v| v.trim().to_string()).filter(|v| !v.is_empty()).collect() - }); - - let series_status_expr = r#"CASE - WHEN sc.books_read_count = sc.book_count THEN 'read' - WHEN sc.books_read_count = 0 THEN 'unread' - ELSE 'reading' - END"#; - - let has_missing = query.has_missing.as_deref() == Some("true"); - - // Paramètres dynamiques — $1 = library_id fixe, puis optionnels dans l'ordre - let mut p: usize = 1; - - let q_cond = if query.q.is_some() { - p += 1; format!("AND sc.name ILIKE ${p}") - } else { String::new() }; - - let count_rs_cond = if reading_statuses.is_some() { - p += 1; format!("AND {series_status_expr} = ANY(${p})") - } else { String::new() }; - - let ss_cond = if query.series_status.is_some() { - p += 1; format!("AND LOWER(sm.status) = ${p}") - } else { String::new() }; - - let missing_cond = if has_missing { - "AND mc.missing_count > 0".to_string() - } else { String::new() }; - - let metadata_provider_cond = match query.metadata_provider.as_deref() { - Some("unlinked") => "AND ml.provider IS NULL".to_string(), - Some("linked") => "AND ml.provider IS NOT NULL".to_string(), - Some(_) => { p += 1; format!("AND ml.provider = ${p}") }, - None => String::new(), - }; - - let missing_cte = r#" - missing_counts AS ( - SELECT eml.series_name, - COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count - FROM external_metadata_links eml - JOIN external_book_metadata ebm ON ebm.link_id = eml.id - WHERE eml.library_id = $1 AND eml.status = 'approved' - GROUP BY eml.series_name - ) - "#.to_string(); - - let metadata_links_cte = r#" - metadata_links AS ( - SELECT DISTINCT ON (eml.series_name, eml.library_id) - eml.series_name, eml.library_id, eml.provider - FROM external_metadata_links eml - WHERE eml.status = 'approved' - ORDER BY eml.series_name, eml.library_id, eml.created_at DESC - ) - "#; - - let count_sql = format!( - r#" - WITH sorted_books AS ( - SELECT COALESCE(NULLIF(series, ''), 'unclassified') as name, id - FROM books WHERE library_id = $1 - ), - series_counts AS ( - SELECT sb.name, - COUNT(*) as book_count, - COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count - FROM sorted_books sb - LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id - GROUP BY sb.name - ), - {missing_cte}, - {metadata_links_cte} - SELECT COUNT(*) FROM series_counts sc - LEFT JOIN series_metadata sm ON sm.library_id = $1 AND sm.name = sc.name - LEFT JOIN missing_counts mc ON mc.series_name = sc.name - LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = $1 - WHERE TRUE {q_cond} {count_rs_cond} {ss_cond} {missing_cond} {metadata_provider_cond} - "# - ); - - let limit_p = p + 1; - let offset_p = p + 2; - - let data_sql = format!( - r#" - WITH sorted_books AS ( - SELECT - COALESCE(NULLIF(series, ''), 'unclassified') as name, - id, - ROW_NUMBER() OVER ( - PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified') - ORDER BY - volume NULLS LAST, - REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''), - COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0), - title ASC - ) as rn - FROM books - WHERE library_id = $1 - ), - series_counts AS ( - SELECT - sb.name, - COUNT(*) as book_count, - COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count - FROM sorted_books sb - LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id - GROUP BY sb.name - ), - {missing_cte}, - {metadata_links_cte} - SELECT - sc.name, - sc.book_count, - sc.books_read_count, - sb.id as first_book_id, - sm.status as series_status, - mc.missing_count, - ml.provider as metadata_provider - FROM series_counts sc - JOIN sorted_books sb ON sb.name = sc.name AND sb.rn = 1 - LEFT JOIN series_metadata sm ON sm.library_id = $1 AND sm.name = sc.name - LEFT JOIN missing_counts mc ON mc.series_name = sc.name - LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = $1 - WHERE TRUE - {q_cond} - {count_rs_cond} - {ss_cond} - {missing_cond} - {metadata_provider_cond} - ORDER BY - REGEXP_REPLACE(LOWER(sc.name), '[0-9].*$', ''), - COALESCE( - (REGEXP_MATCH(LOWER(sc.name), '\d+'))[1]::int, - 0 - ), - sc.name ASC - LIMIT ${limit_p} OFFSET ${offset_p} - "# - ); - - let q_pattern = query.q.as_deref().map(|q| format!("%{}%", q)); - - let mut count_builder = sqlx::query(&count_sql).bind(library_id); - let mut data_builder = sqlx::query(&data_sql).bind(library_id); - - if let Some(ref pat) = q_pattern { - count_builder = count_builder.bind(pat); - data_builder = data_builder.bind(pat); - } - if let Some(ref statuses) = reading_statuses { - count_builder = count_builder.bind(statuses.clone()); - data_builder = data_builder.bind(statuses.clone()); - } - if let Some(ref ss) = query.series_status { - count_builder = count_builder.bind(ss); - data_builder = data_builder.bind(ss); - } - if let Some(ref mp) = query.metadata_provider { - if mp != "linked" && mp != "unlinked" { - count_builder = count_builder.bind(mp); - data_builder = data_builder.bind(mp); - } - } - - data_builder = data_builder.bind(limit).bind(offset); - - let (count_row, rows) = tokio::try_join!( - count_builder.fetch_one(&state.pool), - data_builder.fetch_all(&state.pool), - )?; - let total: i64 = count_row.get(0); - - let items: Vec = rows - .iter() - .map(|row| SeriesItem { - name: row.get("name"), - book_count: row.get("book_count"), - books_read_count: row.get("books_read_count"), - first_book_id: row.get("first_book_id"), - library_id, - series_status: row.get("series_status"), - missing_count: row.get("missing_count"), - metadata_provider: row.get("metadata_provider"), - }) - .collect(); - - Ok(Json(SeriesPage { - items, - total, - page, - limit, - })) -} - -#[derive(Deserialize, ToSchema)] -pub struct ListAllSeriesQuery { - #[schema(value_type = Option, example = "dragon")] - pub q: Option, - #[schema(value_type = Option)] - pub library_id: Option, - #[schema(value_type = Option, example = "unread,reading")] - pub reading_status: Option, - /// Filter by series status (e.g. "ongoing", "ended") - #[schema(value_type = Option, example = "ongoing")] - pub series_status: Option, - /// Filter series with missing books: "true" to show only series with missing books - #[schema(value_type = Option, example = "true")] - pub has_missing: Option, - /// Filter by metadata provider: a provider name (e.g. "google_books"), "linked" (any provider), or "unlinked" (no provider) - #[schema(value_type = Option, example = "google_books")] - pub metadata_provider: Option, - #[schema(value_type = Option, example = 1)] - pub page: Option, - #[schema(value_type = Option, example = 50)] - pub limit: Option, - /// Sort order: "title" (default) or "latest" (most recently added first) - #[schema(value_type = Option, example = "latest")] - pub sort: Option, -} - -/// List all series across libraries with optional filtering and pagination -#[utoipa::path( - get, - path = "/series", - tag = "books", - params( - ("q" = Option, Query, description = "Filter by series name (case-insensitive, partial match)"), - ("library_id" = Option, Query, description = "Filter by library ID"), - ("reading_status" = Option, Query, description = "Filter by reading status, comma-separated (e.g. 'unread,reading')"), - ("metadata_provider" = Option, Query, description = "Filter by metadata provider: a provider name (e.g. 'google_books'), 'linked' (any provider), or 'unlinked' (no provider)"), - ("page" = Option, Query, description = "Page number (1-indexed, default 1)"), - ("limit" = Option, Query, description = "Items per page (max 200, default 50)"), - ("sort" = Option, Query, description = "Sort order: 'title' (default) or 'latest' (most recently added first)"), - ), - responses( - (status = 200, body = SeriesPage), - (status = 401, description = "Unauthorized"), - ), - security(("Bearer" = [])) -)] -pub async fn list_all_series( - State(state): State, - Query(query): Query, -) -> Result, ApiError> { - let limit = query.limit.unwrap_or(50).clamp(1, 200); - let page = query.page.unwrap_or(1).max(1); - let offset = (page - 1) * limit; - - let reading_statuses: Option> = query.reading_status.as_deref().map(|s| { - s.split(',').map(|v| v.trim().to_string()).filter(|v| !v.is_empty()).collect() - }); - - let series_status_expr = r#"CASE - WHEN sc.books_read_count = sc.book_count THEN 'read' - WHEN sc.books_read_count = 0 THEN 'unread' - ELSE 'reading' - END"#; - - let has_missing = query.has_missing.as_deref() == Some("true"); - - let mut p: usize = 0; - - let lib_cond = if query.library_id.is_some() { - p += 1; format!("WHERE library_id = ${p}") - } else { - "WHERE TRUE".to_string() - }; - - let q_cond = if query.q.is_some() { - p += 1; format!("AND sc.name ILIKE ${p}") - } else { String::new() }; - - let rs_cond = if reading_statuses.is_some() { - p += 1; format!("AND {series_status_expr} = ANY(${p})") - } else { String::new() }; - - let ss_cond = if query.series_status.is_some() { - p += 1; format!("AND LOWER(sm.status) = ${p}") - } else { String::new() }; - - let missing_cond = if has_missing { - "AND mc.missing_count > 0".to_string() - } else { String::new() }; - - let metadata_provider_cond = match query.metadata_provider.as_deref() { - Some("unlinked") => "AND ml.provider IS NULL".to_string(), - Some("linked") => "AND ml.provider IS NOT NULL".to_string(), - Some(_) => { p += 1; format!("AND ml.provider = ${p}") }, - None => String::new(), - }; - - // Missing counts CTE — needs library_id filter when filtering by library - let missing_cte = if query.library_id.is_some() { - r#" - missing_counts AS ( - SELECT eml.series_name, eml.library_id, - COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count - FROM external_metadata_links eml - JOIN external_book_metadata ebm ON ebm.link_id = eml.id - WHERE eml.library_id = $1 AND eml.status = 'approved' - GROUP BY eml.series_name, eml.library_id - ) - "#.to_string() - } else { - r#" - missing_counts AS ( - SELECT eml.series_name, eml.library_id, - COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count - FROM external_metadata_links eml - JOIN external_book_metadata ebm ON ebm.link_id = eml.id - WHERE eml.status = 'approved' - GROUP BY eml.series_name, eml.library_id - ) - "#.to_string() - }; - - let metadata_links_cte = r#" - metadata_links AS ( - SELECT DISTINCT ON (eml.series_name, eml.library_id) - eml.series_name, eml.library_id, eml.provider - FROM external_metadata_links eml - WHERE eml.status = 'approved' - ORDER BY eml.series_name, eml.library_id, eml.created_at DESC - ) - "#; - - let count_sql = format!( - r#" - WITH sorted_books AS ( - SELECT COALESCE(NULLIF(series, ''), 'unclassified') as name, id, library_id - FROM books {lib_cond} - ), - series_counts AS ( - SELECT sb.name, sb.library_id, - COUNT(*) as book_count, - COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count - FROM sorted_books sb - LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id - GROUP BY sb.name, sb.library_id - ), - {missing_cte}, - {metadata_links_cte} - SELECT COUNT(*) FROM series_counts sc - LEFT JOIN series_metadata sm ON sm.library_id = sc.library_id AND sm.name = sc.name - LEFT JOIN missing_counts mc ON mc.series_name = sc.name AND mc.library_id = sc.library_id - LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = sc.library_id - WHERE TRUE {q_cond} {rs_cond} {ss_cond} {missing_cond} {metadata_provider_cond} - "# - ); - - let series_order_clause = if query.sort.as_deref() == Some("latest") { - "sc.latest_updated_at DESC".to_string() - } else { - "REGEXP_REPLACE(LOWER(sc.name), '[0-9].*$', ''), COALESCE((REGEXP_MATCH(LOWER(sc.name), '\\d+'))[1]::int, 0), sc.name ASC".to_string() - }; - - let limit_p = p + 1; - let offset_p = p + 2; - - let data_sql = format!( - r#" - WITH sorted_books AS ( - SELECT - COALESCE(NULLIF(series, ''), 'unclassified') as name, - id, - library_id, - updated_at, - ROW_NUMBER() OVER ( - PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified') - ORDER BY - volume NULLS LAST, - REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''), - COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0), - title ASC - ) as rn - FROM books - {lib_cond} - ), - series_counts AS ( - SELECT - sb.name, - sb.library_id, - COUNT(*) as book_count, - COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count, - MAX(sb.updated_at) as latest_updated_at - FROM sorted_books sb - LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id - GROUP BY sb.name, sb.library_id - ), - {missing_cte}, - {metadata_links_cte} - SELECT - sc.name, - sc.book_count, - sc.books_read_count, - sb.id as first_book_id, - sb.library_id, - sm.status as series_status, - mc.missing_count, - ml.provider as metadata_provider - FROM series_counts sc - JOIN sorted_books sb ON sb.name = sc.name AND sb.rn = 1 - LEFT JOIN series_metadata sm ON sm.library_id = sc.library_id AND sm.name = sc.name - LEFT JOIN missing_counts mc ON mc.series_name = sc.name AND mc.library_id = sc.library_id - LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = sc.library_id - WHERE TRUE - {q_cond} - {rs_cond} - {ss_cond} - {missing_cond} - {metadata_provider_cond} - ORDER BY {series_order_clause} - LIMIT ${limit_p} OFFSET ${offset_p} - "# - ); - - let q_pattern = query.q.as_deref().map(|q| format!("%{}%", q)); - - let mut count_builder = sqlx::query(&count_sql); - let mut data_builder = sqlx::query(&data_sql); - - if let Some(lib_id) = query.library_id { - count_builder = count_builder.bind(lib_id); - data_builder = data_builder.bind(lib_id); - } - if let Some(ref pat) = q_pattern { - count_builder = count_builder.bind(pat); - data_builder = data_builder.bind(pat); - } - if let Some(ref statuses) = reading_statuses { - count_builder = count_builder.bind(statuses.clone()); - data_builder = data_builder.bind(statuses.clone()); - } - if let Some(ref ss) = query.series_status { - count_builder = count_builder.bind(ss); - data_builder = data_builder.bind(ss); - } - if let Some(ref mp) = query.metadata_provider { - if mp != "linked" && mp != "unlinked" { - count_builder = count_builder.bind(mp); - data_builder = data_builder.bind(mp); - } - } - - data_builder = data_builder.bind(limit).bind(offset); - - let (count_row, rows) = tokio::try_join!( - count_builder.fetch_one(&state.pool), - data_builder.fetch_all(&state.pool), - )?; - let total: i64 = count_row.get(0); - - let items: Vec = rows - .iter() - .map(|row| SeriesItem { - name: row.get("name"), - book_count: row.get("book_count"), - books_read_count: row.get("books_read_count"), - first_book_id: row.get("first_book_id"), - library_id: row.get("library_id"), - series_status: row.get("series_status"), - missing_count: row.get("missing_count"), - metadata_provider: row.get("metadata_provider"), - }) - .collect(); - - Ok(Json(SeriesPage { - items, - total, - page, - limit, - })) -} - -/// List all distinct series status values present in the database -#[utoipa::path( - get, - path = "/series/statuses", - tag = "books", - responses( - (status = 200, body = Vec), - (status = 401, description = "Unauthorized"), - ), - security(("Bearer" = [])) -)] -pub async fn series_statuses( - State(state): State, -) -> Result>, ApiError> { - let rows: Vec = sqlx::query_scalar( - r#"SELECT DISTINCT s FROM ( - SELECT LOWER(status) AS s FROM series_metadata WHERE status IS NOT NULL - UNION - SELECT mapped_status AS s FROM status_mappings WHERE mapped_status IS NOT NULL - ) t ORDER BY s"#, - ) - .fetch_all(&state.pool) - .await?; - Ok(Json(rows)) -} - -/// List distinct raw provider statuses from external metadata links -#[utoipa::path( - get, - path = "/series/provider-statuses", - tag = "books", - responses( - (status = 200, body = Vec), - (status = 401, description = "Unauthorized"), - ), - security(("Bearer" = [])) -)] -pub async fn provider_statuses( - State(state): State, -) -> Result>, ApiError> { - let rows: Vec = sqlx::query_scalar( - r#"SELECT DISTINCT lower(metadata_json->>'status') AS s - FROM external_metadata_links - WHERE metadata_json->>'status' IS NOT NULL - AND metadata_json->>'status' != '' - ORDER BY s"#, - ) - .fetch_all(&state.pool) - .await?; - Ok(Json(rows)) -} - -#[derive(Deserialize, ToSchema)] -pub struct OngoingQuery { - #[schema(value_type = Option, example = 10)] - pub limit: Option, -} - -/// List ongoing series (partially read, sorted by most recent activity) -#[utoipa::path( - get, - path = "/series/ongoing", - tag = "books", - params( - ("limit" = Option, Query, description = "Max items to return (default 10, max 50)"), - ), - responses( - (status = 200, body = Vec), - (status = 401, description = "Unauthorized"), - ), - security(("Bearer" = [])) -)] -pub async fn ongoing_series( - State(state): State, - Query(query): Query, -) -> Result>, ApiError> { - let limit = query.limit.unwrap_or(10).clamp(1, 50); - - let rows = sqlx::query( - r#" - WITH series_stats AS ( - SELECT - COALESCE(NULLIF(b.series, ''), 'unclassified') AS name, - COUNT(*) AS book_count, - COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read_count, - MAX(brp.last_read_at) AS last_read_at - FROM books b - LEFT JOIN book_reading_progress brp ON brp.book_id = b.id - GROUP BY COALESCE(NULLIF(b.series, ''), 'unclassified') - HAVING ( - COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0 - AND COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') < COUNT(*) - ) - ), - first_books AS ( - SELECT - COALESCE(NULLIF(series, ''), 'unclassified') AS name, - id, - library_id, - ROW_NUMBER() OVER ( - PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified') - ORDER BY - volume NULLS LAST, - REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''), - COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0), - title ASC - ) AS rn - FROM books - ) - SELECT ss.name, ss.book_count, ss.books_read_count, fb.id AS first_book_id, fb.library_id - FROM series_stats ss - JOIN first_books fb ON fb.name = ss.name AND fb.rn = 1 - ORDER BY ss.last_read_at DESC NULLS LAST - LIMIT $1 - "#, - ) - .bind(limit) - .fetch_all(&state.pool) - .await?; - - let items: Vec = rows - .iter() - .map(|row| SeriesItem { - name: row.get("name"), - book_count: row.get("book_count"), - books_read_count: row.get("books_read_count"), - first_book_id: row.get("first_book_id"), - library_id: row.get("library_id"), - series_status: None, - missing_count: None, - metadata_provider: None, - }) - .collect(); - - Ok(Json(items)) -} - -/// List next unread book for each ongoing series (sorted by most recent activity) -#[utoipa::path( - get, - path = "/books/ongoing", - tag = "books", - params( - ("limit" = Option, Query, description = "Max items to return (default 10, max 50)"), - ), - responses( - (status = 200, body = Vec), - (status = 401, description = "Unauthorized"), - ), - security(("Bearer" = [])) -)] -pub async fn ongoing_books( - State(state): State, - Query(query): Query, -) -> Result>, ApiError> { - let limit = query.limit.unwrap_or(10).clamp(1, 50); - - let rows = sqlx::query( - r#" - WITH ongoing_series AS ( - SELECT - COALESCE(NULLIF(b.series, ''), 'unclassified') AS name, - MAX(brp.last_read_at) AS series_last_read_at - FROM books b - LEFT JOIN book_reading_progress brp ON brp.book_id = b.id - GROUP BY COALESCE(NULLIF(b.series, ''), 'unclassified') - HAVING ( - COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0 - AND COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') < COUNT(*) - ) - ), - next_books AS ( - SELECT - b.id, b.library_id, b.kind, b.format, b.title, b.author, b.authors, b.series, b.volume, - b.language, b.page_count, b.thumbnail_path, b.updated_at, - COALESCE(brp.status, 'unread') AS reading_status, - brp.current_page AS reading_current_page, - brp.last_read_at AS reading_last_read_at, - os.series_last_read_at, - ROW_NUMBER() OVER ( - PARTITION BY COALESCE(NULLIF(b.series, ''), 'unclassified') - ORDER BY b.volume NULLS LAST, b.title - ) AS rn - FROM books b - JOIN ongoing_series os ON COALESCE(NULLIF(b.series, ''), 'unclassified') = os.name - LEFT JOIN book_reading_progress brp ON brp.book_id = b.id - WHERE COALESCE(brp.status, 'unread') != 'read' - ) - SELECT id, library_id, kind, format, title, author, authors, series, volume, language, page_count, - thumbnail_path, updated_at, reading_status, reading_current_page, reading_last_read_at - FROM next_books - WHERE rn = 1 - ORDER BY series_last_read_at DESC NULLS LAST - LIMIT $1 - "#, - ) - .bind(limit) - .fetch_all(&state.pool) - .await?; - - let items: Vec = rows - .iter() - .map(|row| { - let thumbnail_path: Option = row.get("thumbnail_path"); - BookItem { - id: row.get("id"), - library_id: row.get("library_id"), - kind: row.get("kind"), - format: row.get("format"), - title: row.get("title"), - author: row.get("author"), - authors: row.get::, _>("authors"), - series: row.get("series"), - volume: row.get("volume"), - language: row.get("language"), - page_count: row.get("page_count"), - thumbnail_url: thumbnail_path.map(|_| format!("/books/{}/thumbnail", row.get::("id"))), - updated_at: row.get("updated_at"), - reading_status: row.get("reading_status"), - reading_current_page: row.get("reading_current_page"), - reading_last_read_at: row.get("reading_last_read_at"), - } - }) - .collect(); - - Ok(Json(items)) -} - -fn remap_libraries_path(path: &str) -> String { +pub(crate) fn remap_libraries_path(path: &str) -> String { if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") { if path.starts_with("/libraries/") { return path.replacen("/libraries", &root, 1); @@ -1141,6 +366,8 @@ fn unmap_libraries_path(path: &str) -> String { path.to_string() } +// ─── Convert CBR → CBZ ─────────────────────────────────────────────────────── + /// Enqueue a CBR → CBZ conversion job for a single book #[utoipa::path( post, @@ -1341,234 +568,7 @@ pub async fn update_book( })) } -#[derive(Serialize, ToSchema)] -pub struct SeriesMetadata { - /// Authors of the series (series-level metadata, distinct from per-book author field) - pub authors: Vec, - pub description: Option, - pub publishers: Vec, - pub start_year: Option, - pub total_volumes: Option, - /// Series status: "ongoing", "ended", "hiatus", "cancelled", or null - pub status: Option, - /// Convenience: author from first book (for pre-filling the per-book apply section) - pub book_author: Option, - pub book_language: Option, - /// Fields locked from external metadata sync, e.g. {"authors": true, "description": true} - pub locked_fields: serde_json::Value, -} - -/// Get metadata for a specific series -#[utoipa::path( - get, - path = "/libraries/{library_id}/series/{name}/metadata", - tag = "books", - params( - ("library_id" = String, Path, description = "Library UUID"), - ("name" = String, Path, description = "Series name"), - ), - responses( - (status = 200, body = SeriesMetadata), - (status = 401, description = "Unauthorized"), - ), - security(("Bearer" = [])) -)] -pub async fn get_series_metadata( - State(state): State, - Path((library_id, name)): Path<(Uuid, String)>, -) -> Result, ApiError> { - // author/language from first book of series - let books_row = if name == "unclassified" { - sqlx::query("SELECT author, language FROM books WHERE library_id = $1 AND (series IS NULL OR series = '') LIMIT 1") - .bind(library_id) - .fetch_optional(&state.pool) - .await? - } else { - sqlx::query("SELECT author, language FROM books WHERE library_id = $1 AND series = $2 LIMIT 1") - .bind(library_id) - .bind(&name) - .fetch_optional(&state.pool) - .await? - }; - - let meta_row = sqlx::query( - "SELECT authors, description, publishers, start_year, total_volumes, status, locked_fields FROM series_metadata WHERE library_id = $1 AND name = $2" - ) - .bind(library_id) - .bind(&name) - .fetch_optional(&state.pool) - .await?; - - Ok(Json(SeriesMetadata { - authors: meta_row.as_ref().map(|r| r.get::, _>("authors")).unwrap_or_default(), - description: meta_row.as_ref().and_then(|r| r.get("description")), - publishers: meta_row.as_ref().map(|r| r.get::, _>("publishers")).unwrap_or_default(), - start_year: meta_row.as_ref().and_then(|r| r.get("start_year")), - total_volumes: meta_row.as_ref().and_then(|r| r.get("total_volumes")), - status: meta_row.as_ref().and_then(|r| r.get("status")), - book_author: books_row.as_ref().and_then(|r| r.get("author")), - book_language: books_row.as_ref().and_then(|r| r.get("language")), - locked_fields: meta_row.as_ref().map(|r| r.get::("locked_fields")).unwrap_or(serde_json::json!({})), - })) -} - -/// `author` and `language` are wrapped in an extra Option so we can distinguish -/// "absent from JSON" (keep books unchanged) from "present as null" (clear the field). -#[derive(Deserialize, ToSchema)] -pub struct UpdateSeriesRequest { - pub new_name: String, - /// Series-level authors list (stored in series_metadata) - #[serde(default)] - pub authors: Vec, - /// Per-book author propagation: absent = keep books unchanged, present = overwrite all books - #[serde(default, skip_serializing_if = "Option::is_none")] - pub author: Option>, - /// Per-book language propagation: absent = keep books unchanged, present = overwrite all books - #[serde(default, skip_serializing_if = "Option::is_none")] - pub language: Option>, - pub description: Option, - #[serde(default)] - pub publishers: Vec, - pub start_year: Option, - pub total_volumes: Option, - /// Series status: "ongoing", "ended", "hiatus", "cancelled", or null - pub status: Option, - /// Fields locked from external metadata sync - #[serde(default)] - pub locked_fields: Option, -} - -#[derive(Serialize, ToSchema)] -pub struct UpdateSeriesResponse { - pub updated: u64, -} - -/// Update metadata for all books in a series -#[utoipa::path( - patch, - path = "/libraries/{library_id}/series/{name}", - tag = "books", - params( - ("library_id" = String, Path, description = "Library UUID"), - ("name" = String, Path, description = "Series name (use 'unclassified' for books without series)"), - ), - request_body = UpdateSeriesRequest, - responses( - (status = 200, body = UpdateSeriesResponse), - (status = 400, description = "Invalid request"), - (status = 401, description = "Unauthorized"), - (status = 403, description = "Forbidden - Admin scope required"), - ), - security(("Bearer" = [])) -)] -pub async fn update_series( - State(state): State, - Path((library_id, name)): Path<(Uuid, String)>, - Json(body): Json, -) -> Result, ApiError> { - let new_name = body.new_name.trim().to_string(); - if new_name.is_empty() { - return Err(ApiError::bad_request("series name cannot be empty")); - } - // author/language: None = absent (keep books unchanged), Some(v) = apply to all books - let apply_author = body.author.is_some(); - let author_value = body.author.flatten().as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string); - let apply_language = body.language.is_some(); - let language_value = body.language.flatten().as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string); - let description = body.description.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string); - let publishers: Vec = body.publishers.iter() - .map(|p| p.trim().to_string()) - .filter(|p| !p.is_empty()) - .collect(); - let new_series_value: Option = if new_name == "unclassified" { None } else { Some(new_name.clone()) }; - - // 1. Update books: always update series name; author/language only if opted-in - // $1=library_id, $2=new_series_value, $3=apply_author, $4=author_value, - // $5=apply_language, $6=language_value, [$7=old_name] - let result = if name == "unclassified" { - sqlx::query( - "UPDATE books \ - SET series = $2, \ - author = CASE WHEN $3 THEN $4 ELSE author END, \ - language = CASE WHEN $5 THEN $6 ELSE language END, \ - updated_at = NOW() \ - WHERE library_id = $1 AND (series IS NULL OR series = '')" - ) - .bind(library_id) - .bind(&new_series_value) - .bind(apply_author) - .bind(&author_value) - .bind(apply_language) - .bind(&language_value) - .execute(&state.pool) - .await? - } else { - sqlx::query( - "UPDATE books \ - SET series = $2, \ - author = CASE WHEN $3 THEN $4 ELSE author END, \ - language = CASE WHEN $5 THEN $6 ELSE language END, \ - updated_at = NOW() \ - WHERE library_id = $1 AND series = $7" - ) - .bind(library_id) - .bind(&new_series_value) - .bind(apply_author) - .bind(&author_value) - .bind(apply_language) - .bind(&language_value) - .bind(&name) - .execute(&state.pool) - .await? - }; - - // 2. Upsert series_metadata (keyed by new_name) - let meta_name = new_series_value.as_deref().unwrap_or("unclassified"); - let authors: Vec = body.authors.iter() - .map(|a| a.trim().to_string()) - .filter(|a| !a.is_empty()) - .collect(); - let locked_fields = body.locked_fields.clone().unwrap_or(serde_json::json!({})); - sqlx::query( - r#" - INSERT INTO series_metadata (library_id, name, authors, description, publishers, start_year, total_volumes, status, locked_fields, updated_at) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, NOW()) - ON CONFLICT (library_id, name) DO UPDATE - SET authors = EXCLUDED.authors, - description = EXCLUDED.description, - publishers = EXCLUDED.publishers, - start_year = EXCLUDED.start_year, - total_volumes = EXCLUDED.total_volumes, - status = EXCLUDED.status, - locked_fields = EXCLUDED.locked_fields, - updated_at = NOW() - "# - ) - .bind(library_id) - .bind(meta_name) - .bind(&authors) - .bind(&description) - .bind(&publishers) - .bind(body.start_year) - .bind(body.total_volumes) - .bind(&body.status) - .bind(&locked_fields) - .execute(&state.pool) - .await?; - - // 3. If renamed, move series_metadata from old name to new name - if name != "unclassified" && new_name != name { - sqlx::query( - "DELETE FROM series_metadata WHERE library_id = $1 AND name = $2" - ) - .bind(library_id) - .bind(&name) - .execute(&state.pool) - .await?; - } - - Ok(Json(UpdateSeriesResponse { updated: result.rows_affected() })) -} +// ─── Thumbnail ──────────────────────────────────────────────────────────────── use axum::{ body::Body, diff --git a/apps/api/src/libraries.rs b/apps/api/src/libraries.rs index 33f3297..f534c1a 100644 --- a/apps/api/src/libraries.rs +++ b/apps/api/src/libraries.rs @@ -48,7 +48,6 @@ pub struct CreateLibraryRequest { responses( (status = 200, body = Vec), (status = 401, description = "Unauthorized"), - (status = 403, description = "Forbidden - Admin scope required"), ), security(("Bearer" = [])) )] @@ -221,7 +220,6 @@ use crate::index_jobs::{IndexJobResponse, RebuildRequest}; (status = 200, body = IndexJobResponse), (status = 404, description = "Library not found"), (status = 401, description = "Unauthorized"), - (status = 403, description = "Forbidden - Admin scope required"), ), security(("Bearer" = [])) )] diff --git a/apps/api/src/main.rs b/apps/api/src/main.rs index cf09a6d..2204ff4 100644 --- a/apps/api/src/main.rs +++ b/apps/api/src/main.rs @@ -17,6 +17,7 @@ mod prowlarr; mod qbittorrent; mod reading_progress; mod search; +mod series; mod settings; mod state; mod stats; @@ -86,14 +87,13 @@ async fn main() -> anyhow::Result<()> { }; let admin_routes = Router::new() - .route("/libraries", get(libraries::list_libraries).post(libraries::create_library)) + .route("/libraries", axum::routing::post(libraries::create_library)) .route("/libraries/:id", delete(libraries::delete_library)) - .route("/libraries/:id/scan", axum::routing::post(libraries::scan_library)) .route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring)) .route("/libraries/:id/metadata-provider", axum::routing::patch(libraries::update_metadata_provider)) .route("/books/:id", axum::routing::patch(books::update_book)) .route("/books/:id/convert", axum::routing::post(books::convert_book)) - .route("/libraries/:library_id/series/:name", axum::routing::patch(books::update_series)) + .route("/libraries/:library_id/series/:name", axum::routing::patch(series::update_series)) .route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild)) .route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild)) .route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate)) @@ -133,18 +133,20 @@ async fn main() -> anyhow::Result<()> { )); let read_routes = Router::new() + .route("/libraries", get(libraries::list_libraries)) + .route("/libraries/:id/scan", axum::routing::post(libraries::scan_library)) .route("/books", get(books::list_books)) - .route("/books/ongoing", get(books::ongoing_books)) + .route("/books/ongoing", get(series::ongoing_books)) .route("/books/:id", get(books::get_book)) .route("/books/:id/thumbnail", get(books::get_thumbnail)) .route("/books/:id/pages/:n", get(pages::get_page)) .route("/books/:id/progress", get(reading_progress::get_reading_progress).patch(reading_progress::update_reading_progress)) - .route("/libraries/:library_id/series", get(books::list_series)) - .route("/libraries/:library_id/series/:name/metadata", get(books::get_series_metadata)) - .route("/series", get(books::list_all_series)) - .route("/series/ongoing", get(books::ongoing_series)) - .route("/series/statuses", get(books::series_statuses)) - .route("/series/provider-statuses", get(books::provider_statuses)) + .route("/libraries/:library_id/series", get(series::list_series)) + .route("/libraries/:library_id/series/:name/metadata", get(series::get_series_metadata)) + .route("/series", get(series::list_all_series)) + .route("/series/ongoing", get(series::ongoing_series)) + .route("/series/statuses", get(series::series_statuses)) + .route("/series/provider-statuses", get(series::provider_statuses)) .route("/series/mark-read", axum::routing::post(reading_progress::mark_series_read)) .route("/authors", get(authors::list_authors)) .route("/stats", get(stats::get_stats)) diff --git a/apps/api/src/openapi.rs b/apps/api/src/openapi.rs index 5f65a8f..2d5c2e3 100644 --- a/apps/api/src/openapi.rs +++ b/apps/api/src/openapi.rs @@ -10,14 +10,14 @@ use utoipa::OpenApi; crate::reading_progress::update_reading_progress, crate::reading_progress::mark_series_read, crate::books::get_thumbnail, - crate::books::list_series, - crate::books::list_all_series, - crate::books::ongoing_series, - crate::books::ongoing_books, + crate::series::list_series, + crate::series::list_all_series, + crate::series::ongoing_series, + crate::series::ongoing_books, crate::books::convert_book, crate::books::update_book, - crate::books::get_series_metadata, - crate::books::update_series, + crate::series::get_series_metadata, + crate::series::update_series, crate::pages::get_page, crate::search::search_books, crate::index_jobs::enqueue_rebuild, @@ -35,6 +35,7 @@ use utoipa::OpenApi; crate::libraries::delete_library, crate::libraries::scan_library, crate::libraries::update_monitoring, + crate::libraries::update_metadata_provider, crate::tokens::list_tokens, crate::tokens::create_token, crate::tokens::revoke_token, @@ -54,8 +55,8 @@ use utoipa::OpenApi; crate::metadata::get_metadata_links, crate::metadata::get_missing_books, crate::metadata::delete_metadata_link, - crate::books::series_statuses, - crate::books::provider_statuses, + crate::series::series_statuses, + crate::series::provider_statuses, crate::settings::list_status_mappings, crate::settings::upsert_status_mapping, crate::settings::delete_status_mapping, @@ -63,6 +64,14 @@ use utoipa::OpenApi; crate::prowlarr::test_prowlarr, crate::qbittorrent::add_torrent, crate::qbittorrent::test_qbittorrent, + crate::metadata_batch::start_batch, + crate::metadata_batch::get_batch_report, + crate::metadata_batch::get_batch_results, + crate::metadata_refresh::start_refresh, + crate::metadata_refresh::get_refresh_report, + crate::komga::sync_komga_read_books, + crate::komga::list_sync_reports, + crate::komga::get_sync_report, ), components( schemas( @@ -74,14 +83,14 @@ use utoipa::OpenApi; crate::reading_progress::UpdateReadingProgressRequest, crate::reading_progress::MarkSeriesReadRequest, crate::reading_progress::MarkSeriesReadResponse, - crate::books::SeriesItem, - crate::books::SeriesPage, - crate::books::ListAllSeriesQuery, - crate::books::OngoingQuery, + crate::series::SeriesItem, + crate::series::SeriesPage, + crate::series::ListAllSeriesQuery, + crate::series::OngoingQuery, crate::books::UpdateBookRequest, - crate::books::SeriesMetadata, - crate::books::UpdateSeriesRequest, - crate::books::UpdateSeriesResponse, + crate::series::SeriesMetadata, + crate::series::UpdateSeriesRequest, + crate::series::UpdateSeriesResponse, crate::pages::PageQuery, crate::search::SearchQuery, crate::search::SearchResponse, @@ -96,6 +105,7 @@ use utoipa::OpenApi; crate::libraries::LibraryResponse, crate::libraries::CreateLibraryRequest, crate::libraries::UpdateMonitoringRequest, + crate::libraries::UpdateMetadataProviderRequest, crate::tokens::CreateTokenRequest, crate::tokens::TokenResponse, crate::tokens::CreatedTokenResponse, @@ -137,7 +147,16 @@ use utoipa::OpenApi; crate::prowlarr::ProwlarrRelease, crate::prowlarr::ProwlarrCategory, crate::prowlarr::ProwlarrSearchResponse, + crate::prowlarr::MissingVolumeInput, crate::prowlarr::ProwlarrTestResponse, + crate::metadata_batch::MetadataBatchRequest, + crate::metadata_batch::MetadataBatchReportDto, + crate::metadata_batch::MetadataBatchResultDto, + crate::metadata_refresh::MetadataRefreshRequest, + crate::metadata_refresh::MetadataRefreshReportDto, + crate::komga::KomgaSyncRequest, + crate::komga::KomgaSyncResponse, + crate::komga::KomgaSyncReportSummary, ErrorResponse, ) ), @@ -145,11 +164,16 @@ use utoipa::OpenApi; ("Bearer" = []) ), tags( - (name = "authors", description = "Author browsing and listing"), - (name = "books", description = "Read-only endpoints for browsing and searching books"), + (name = "books", description = "Book browsing, details and management"), + (name = "series", description = "Series browsing, filtering and management"), + (name = "search", description = "Full-text search across books and series"), (name = "reading-progress", description = "Reading progress tracking per book"), - (name = "libraries", description = "Library management endpoints (Admin only)"), + (name = "authors", description = "Author browsing and listing"), + (name = "stats", description = "Collection statistics and dashboard data"), + (name = "libraries", description = "Library listing, scanning, and management (create/delete/settings: Admin only)"), (name = "indexing", description = "Search index management and job control (Admin only)"), + (name = "metadata", description = "External metadata providers and matching (Admin only)"), + (name = "komga", description = "Komga read-status sync (Admin only)"), (name = "tokens", description = "API token management (Admin only)"), (name = "settings", description = "Application settings and cache management (Admin only)"), (name = "prowlarr", description = "Prowlarr indexer integration (Admin only)"), diff --git a/apps/api/src/search.rs b/apps/api/src/search.rs index df2c84f..06e2cd1 100644 --- a/apps/api/src/search.rs +++ b/apps/api/src/search.rs @@ -43,7 +43,7 @@ pub struct SearchResponse { #[utoipa::path( get, path = "/search", - tag = "books", + tag = "search", params( ("q" = String, Query, description = "Search query (books + series via PostgreSQL full-text)"), ("library_id" = Option, Query, description = "Filter by library ID"), diff --git a/apps/api/src/series.rs b/apps/api/src/series.rs new file mode 100644 index 0000000..e5d18e5 --- /dev/null +++ b/apps/api/src/series.rs @@ -0,0 +1,1015 @@ +use axum::{extract::{Path, Query, State}, Json}; +use serde::{Deserialize, Serialize}; +use sqlx::Row; +use uuid::Uuid; +use utoipa::ToSchema; + +use crate::{books::BookItem, error::ApiError, state::AppState}; + +#[derive(Serialize, ToSchema)] +pub struct SeriesItem { + pub name: String, + pub book_count: i64, + pub books_read_count: i64, + #[schema(value_type = String)] + pub first_book_id: Uuid, + #[schema(value_type = String)] + pub library_id: Uuid, + pub series_status: Option, + pub missing_count: Option, + pub metadata_provider: Option, +} + +#[derive(Serialize, ToSchema)] +pub struct SeriesPage { + pub items: Vec, + pub total: i64, + pub page: i64, + pub limit: i64, +} + +#[derive(Deserialize, ToSchema)] +pub struct ListSeriesQuery { + #[schema(value_type = Option, example = "dragon")] + pub q: Option, + #[schema(value_type = Option, example = "unread,reading")] + pub reading_status: Option, + /// Filter by series status (e.g. "ongoing", "ended") + #[schema(value_type = Option, example = "ongoing")] + pub series_status: Option, + /// Filter series with missing books: "true" to show only series with missing books + #[schema(value_type = Option, example = "true")] + pub has_missing: Option, + /// Filter by metadata provider: a provider name (e.g. "google_books"), "linked" (any provider), or "unlinked" (no provider) + #[schema(value_type = Option, example = "google_books")] + pub metadata_provider: Option, + #[schema(value_type = Option, example = 1)] + pub page: Option, + #[schema(value_type = Option, example = 50)] + pub limit: Option, +} + +/// List all series in a library with pagination +#[utoipa::path( + get, + path = "/libraries/{library_id}/series", + tag = "series", + params( + ("library_id" = String, Path, description = "Library UUID"), + ("q" = Option, Query, description = "Filter by series name (case-insensitive, partial match)"), + ("reading_status" = Option, Query, description = "Filter by reading status, comma-separated (e.g. 'unread,reading')"), + ("metadata_provider" = Option, Query, description = "Filter by metadata provider: a provider name (e.g. 'google_books'), 'linked' (any provider), or 'unlinked' (no provider)"), + ("page" = Option, Query, description = "Page number (1-indexed, default 1)"), + ("limit" = Option, Query, description = "Items per page (max 200, default 50)"), + ), + responses( + (status = 200, body = SeriesPage), + (status = 401, description = "Unauthorized"), + ), + security(("Bearer" = [])) +)] +pub async fn list_series( + State(state): State, + Path(library_id): Path, + Query(query): Query, +) -> Result, ApiError> { + let limit = query.limit.unwrap_or(50).clamp(1, 200); + let page = query.page.unwrap_or(1).max(1); + let offset = (page - 1) * limit; + + let reading_statuses: Option> = query.reading_status.as_deref().map(|s| { + s.split(',').map(|v| v.trim().to_string()).filter(|v| !v.is_empty()).collect() + }); + + let series_status_expr = r#"CASE + WHEN sc.books_read_count = sc.book_count THEN 'read' + WHEN sc.books_read_count = 0 THEN 'unread' + ELSE 'reading' + END"#; + + let has_missing = query.has_missing.as_deref() == Some("true"); + + // Paramètres dynamiques — $1 = library_id fixe, puis optionnels dans l'ordre + let mut p: usize = 1; + + let q_cond = if query.q.is_some() { + p += 1; format!("AND sc.name ILIKE ${p}") + } else { String::new() }; + + let count_rs_cond = if reading_statuses.is_some() { + p += 1; format!("AND {series_status_expr} = ANY(${p})") + } else { String::new() }; + + let ss_cond = if query.series_status.is_some() { + p += 1; format!("AND LOWER(sm.status) = ${p}") + } else { String::new() }; + + let missing_cond = if has_missing { + "AND mc.missing_count > 0".to_string() + } else { String::new() }; + + let metadata_provider_cond = match query.metadata_provider.as_deref() { + Some("unlinked") => "AND ml.provider IS NULL".to_string(), + Some("linked") => "AND ml.provider IS NOT NULL".to_string(), + Some(_) => { p += 1; format!("AND ml.provider = ${p}") }, + None => String::new(), + }; + + let missing_cte = r#" + missing_counts AS ( + SELECT eml.series_name, + COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count + FROM external_metadata_links eml + JOIN external_book_metadata ebm ON ebm.link_id = eml.id + WHERE eml.library_id = $1 AND eml.status = 'approved' + GROUP BY eml.series_name + ) + "#.to_string(); + + let metadata_links_cte = r#" + metadata_links AS ( + SELECT DISTINCT ON (eml.series_name, eml.library_id) + eml.series_name, eml.library_id, eml.provider + FROM external_metadata_links eml + WHERE eml.status = 'approved' + ORDER BY eml.series_name, eml.library_id, eml.created_at DESC + ) + "#; + + let count_sql = format!( + r#" + WITH sorted_books AS ( + SELECT COALESCE(NULLIF(series, ''), 'unclassified') as name, id + FROM books WHERE library_id = $1 + ), + series_counts AS ( + SELECT sb.name, + COUNT(*) as book_count, + COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count + FROM sorted_books sb + LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id + GROUP BY sb.name + ), + {missing_cte}, + {metadata_links_cte} + SELECT COUNT(*) FROM series_counts sc + LEFT JOIN series_metadata sm ON sm.library_id = $1 AND sm.name = sc.name + LEFT JOIN missing_counts mc ON mc.series_name = sc.name + LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = $1 + WHERE TRUE {q_cond} {count_rs_cond} {ss_cond} {missing_cond} {metadata_provider_cond} + "# + ); + + let limit_p = p + 1; + let offset_p = p + 2; + + let data_sql = format!( + r#" + WITH sorted_books AS ( + SELECT + COALESCE(NULLIF(series, ''), 'unclassified') as name, + id, + ROW_NUMBER() OVER ( + PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified') + ORDER BY + volume NULLS LAST, + REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''), + COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0), + title ASC + ) as rn + FROM books + WHERE library_id = $1 + ), + series_counts AS ( + SELECT + sb.name, + COUNT(*) as book_count, + COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count + FROM sorted_books sb + LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id + GROUP BY sb.name + ), + {missing_cte}, + {metadata_links_cte} + SELECT + sc.name, + sc.book_count, + sc.books_read_count, + sb.id as first_book_id, + sm.status as series_status, + mc.missing_count, + ml.provider as metadata_provider + FROM series_counts sc + JOIN sorted_books sb ON sb.name = sc.name AND sb.rn = 1 + LEFT JOIN series_metadata sm ON sm.library_id = $1 AND sm.name = sc.name + LEFT JOIN missing_counts mc ON mc.series_name = sc.name + LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = $1 + WHERE TRUE + {q_cond} + {count_rs_cond} + {ss_cond} + {missing_cond} + {metadata_provider_cond} + ORDER BY + REGEXP_REPLACE(LOWER(sc.name), '[0-9].*$', ''), + COALESCE( + (REGEXP_MATCH(LOWER(sc.name), '\d+'))[1]::int, + 0 + ), + sc.name ASC + LIMIT ${limit_p} OFFSET ${offset_p} + "# + ); + + let q_pattern = query.q.as_deref().map(|q| format!("%{}%", q)); + + let mut count_builder = sqlx::query(&count_sql).bind(library_id); + let mut data_builder = sqlx::query(&data_sql).bind(library_id); + + if let Some(ref pat) = q_pattern { + count_builder = count_builder.bind(pat); + data_builder = data_builder.bind(pat); + } + if let Some(ref statuses) = reading_statuses { + count_builder = count_builder.bind(statuses.clone()); + data_builder = data_builder.bind(statuses.clone()); + } + if let Some(ref ss) = query.series_status { + count_builder = count_builder.bind(ss); + data_builder = data_builder.bind(ss); + } + if let Some(ref mp) = query.metadata_provider { + if mp != "linked" && mp != "unlinked" { + count_builder = count_builder.bind(mp); + data_builder = data_builder.bind(mp); + } + } + + data_builder = data_builder.bind(limit).bind(offset); + + let (count_row, rows) = tokio::try_join!( + count_builder.fetch_one(&state.pool), + data_builder.fetch_all(&state.pool), + )?; + let total: i64 = count_row.get(0); + + let items: Vec = rows + .iter() + .map(|row| SeriesItem { + name: row.get("name"), + book_count: row.get("book_count"), + books_read_count: row.get("books_read_count"), + first_book_id: row.get("first_book_id"), + library_id, + series_status: row.get("series_status"), + missing_count: row.get("missing_count"), + metadata_provider: row.get("metadata_provider"), + }) + .collect(); + + Ok(Json(SeriesPage { + items, + total, + page, + limit, + })) +} + +#[derive(Deserialize, ToSchema)] +pub struct ListAllSeriesQuery { + #[schema(value_type = Option, example = "dragon")] + pub q: Option, + #[schema(value_type = Option)] + pub library_id: Option, + #[schema(value_type = Option, example = "unread,reading")] + pub reading_status: Option, + /// Filter by series status (e.g. "ongoing", "ended") + #[schema(value_type = Option, example = "ongoing")] + pub series_status: Option, + /// Filter series with missing books: "true" to show only series with missing books + #[schema(value_type = Option, example = "true")] + pub has_missing: Option, + /// Filter by metadata provider: a provider name (e.g. "google_books"), "linked" (any provider), or "unlinked" (no provider) + #[schema(value_type = Option, example = "google_books")] + pub metadata_provider: Option, + #[schema(value_type = Option, example = 1)] + pub page: Option, + #[schema(value_type = Option, example = 50)] + pub limit: Option, + /// Sort order: "title" (default) or "latest" (most recently added first) + #[schema(value_type = Option, example = "latest")] + pub sort: Option, +} + +/// List all series across libraries with optional filtering and pagination +#[utoipa::path( + get, + path = "/series", + tag = "series", + params( + ("q" = Option, Query, description = "Filter by series name (case-insensitive, partial match)"), + ("library_id" = Option, Query, description = "Filter by library ID"), + ("reading_status" = Option, Query, description = "Filter by reading status, comma-separated (e.g. 'unread,reading')"), + ("metadata_provider" = Option, Query, description = "Filter by metadata provider: a provider name (e.g. 'google_books'), 'linked' (any provider), or 'unlinked' (no provider)"), + ("page" = Option, Query, description = "Page number (1-indexed, default 1)"), + ("limit" = Option, Query, description = "Items per page (max 200, default 50)"), + ("sort" = Option, Query, description = "Sort order: 'title' (default) or 'latest' (most recently added first)"), + ), + responses( + (status = 200, body = SeriesPage), + (status = 401, description = "Unauthorized"), + ), + security(("Bearer" = [])) +)] +pub async fn list_all_series( + State(state): State, + Query(query): Query, +) -> Result, ApiError> { + let limit = query.limit.unwrap_or(50).clamp(1, 200); + let page = query.page.unwrap_or(1).max(1); + let offset = (page - 1) * limit; + + let reading_statuses: Option> = query.reading_status.as_deref().map(|s| { + s.split(',').map(|v| v.trim().to_string()).filter(|v| !v.is_empty()).collect() + }); + + let series_status_expr = r#"CASE + WHEN sc.books_read_count = sc.book_count THEN 'read' + WHEN sc.books_read_count = 0 THEN 'unread' + ELSE 'reading' + END"#; + + let has_missing = query.has_missing.as_deref() == Some("true"); + + let mut p: usize = 0; + + let lib_cond = if query.library_id.is_some() { + p += 1; format!("WHERE library_id = ${p}") + } else { + "WHERE TRUE".to_string() + }; + + let q_cond = if query.q.is_some() { + p += 1; format!("AND sc.name ILIKE ${p}") + } else { String::new() }; + + let rs_cond = if reading_statuses.is_some() { + p += 1; format!("AND {series_status_expr} = ANY(${p})") + } else { String::new() }; + + let ss_cond = if query.series_status.is_some() { + p += 1; format!("AND LOWER(sm.status) = ${p}") + } else { String::new() }; + + let missing_cond = if has_missing { + "AND mc.missing_count > 0".to_string() + } else { String::new() }; + + let metadata_provider_cond = match query.metadata_provider.as_deref() { + Some("unlinked") => "AND ml.provider IS NULL".to_string(), + Some("linked") => "AND ml.provider IS NOT NULL".to_string(), + Some(_) => { p += 1; format!("AND ml.provider = ${p}") }, + None => String::new(), + }; + + // Missing counts CTE — needs library_id filter when filtering by library + let missing_cte = if query.library_id.is_some() { + r#" + missing_counts AS ( + SELECT eml.series_name, eml.library_id, + COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count + FROM external_metadata_links eml + JOIN external_book_metadata ebm ON ebm.link_id = eml.id + WHERE eml.library_id = $1 AND eml.status = 'approved' + GROUP BY eml.series_name, eml.library_id + ) + "#.to_string() + } else { + r#" + missing_counts AS ( + SELECT eml.series_name, eml.library_id, + COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count + FROM external_metadata_links eml + JOIN external_book_metadata ebm ON ebm.link_id = eml.id + WHERE eml.status = 'approved' + GROUP BY eml.series_name, eml.library_id + ) + "#.to_string() + }; + + let metadata_links_cte = r#" + metadata_links AS ( + SELECT DISTINCT ON (eml.series_name, eml.library_id) + eml.series_name, eml.library_id, eml.provider + FROM external_metadata_links eml + WHERE eml.status = 'approved' + ORDER BY eml.series_name, eml.library_id, eml.created_at DESC + ) + "#; + + let count_sql = format!( + r#" + WITH sorted_books AS ( + SELECT COALESCE(NULLIF(series, ''), 'unclassified') as name, id, library_id + FROM books {lib_cond} + ), + series_counts AS ( + SELECT sb.name, sb.library_id, + COUNT(*) as book_count, + COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count + FROM sorted_books sb + LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id + GROUP BY sb.name, sb.library_id + ), + {missing_cte}, + {metadata_links_cte} + SELECT COUNT(*) FROM series_counts sc + LEFT JOIN series_metadata sm ON sm.library_id = sc.library_id AND sm.name = sc.name + LEFT JOIN missing_counts mc ON mc.series_name = sc.name AND mc.library_id = sc.library_id + LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = sc.library_id + WHERE TRUE {q_cond} {rs_cond} {ss_cond} {missing_cond} {metadata_provider_cond} + "# + ); + + let series_order_clause = if query.sort.as_deref() == Some("latest") { + "sc.latest_updated_at DESC".to_string() + } else { + "REGEXP_REPLACE(LOWER(sc.name), '[0-9].*$', ''), COALESCE((REGEXP_MATCH(LOWER(sc.name), '\\d+'))[1]::int, 0), sc.name ASC".to_string() + }; + + let limit_p = p + 1; + let offset_p = p + 2; + + let data_sql = format!( + r#" + WITH sorted_books AS ( + SELECT + COALESCE(NULLIF(series, ''), 'unclassified') as name, + id, + library_id, + updated_at, + ROW_NUMBER() OVER ( + PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified') + ORDER BY + volume NULLS LAST, + REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''), + COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0), + title ASC + ) as rn + FROM books + {lib_cond} + ), + series_counts AS ( + SELECT + sb.name, + sb.library_id, + COUNT(*) as book_count, + COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count, + MAX(sb.updated_at) as latest_updated_at + FROM sorted_books sb + LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id + GROUP BY sb.name, sb.library_id + ), + {missing_cte}, + {metadata_links_cte} + SELECT + sc.name, + sc.book_count, + sc.books_read_count, + sb.id as first_book_id, + sb.library_id, + sm.status as series_status, + mc.missing_count, + ml.provider as metadata_provider + FROM series_counts sc + JOIN sorted_books sb ON sb.name = sc.name AND sb.rn = 1 + LEFT JOIN series_metadata sm ON sm.library_id = sc.library_id AND sm.name = sc.name + LEFT JOIN missing_counts mc ON mc.series_name = sc.name AND mc.library_id = sc.library_id + LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = sc.library_id + WHERE TRUE + {q_cond} + {rs_cond} + {ss_cond} + {missing_cond} + {metadata_provider_cond} + ORDER BY {series_order_clause} + LIMIT ${limit_p} OFFSET ${offset_p} + "# + ); + + let q_pattern = query.q.as_deref().map(|q| format!("%{}%", q)); + + let mut count_builder = sqlx::query(&count_sql); + let mut data_builder = sqlx::query(&data_sql); + + if let Some(lib_id) = query.library_id { + count_builder = count_builder.bind(lib_id); + data_builder = data_builder.bind(lib_id); + } + if let Some(ref pat) = q_pattern { + count_builder = count_builder.bind(pat); + data_builder = data_builder.bind(pat); + } + if let Some(ref statuses) = reading_statuses { + count_builder = count_builder.bind(statuses.clone()); + data_builder = data_builder.bind(statuses.clone()); + } + if let Some(ref ss) = query.series_status { + count_builder = count_builder.bind(ss); + data_builder = data_builder.bind(ss); + } + if let Some(ref mp) = query.metadata_provider { + if mp != "linked" && mp != "unlinked" { + count_builder = count_builder.bind(mp); + data_builder = data_builder.bind(mp); + } + } + + data_builder = data_builder.bind(limit).bind(offset); + + let (count_row, rows) = tokio::try_join!( + count_builder.fetch_one(&state.pool), + data_builder.fetch_all(&state.pool), + )?; + let total: i64 = count_row.get(0); + + let items: Vec = rows + .iter() + .map(|row| SeriesItem { + name: row.get("name"), + book_count: row.get("book_count"), + books_read_count: row.get("books_read_count"), + first_book_id: row.get("first_book_id"), + library_id: row.get("library_id"), + series_status: row.get("series_status"), + missing_count: row.get("missing_count"), + metadata_provider: row.get("metadata_provider"), + }) + .collect(); + + Ok(Json(SeriesPage { + items, + total, + page, + limit, + })) +} + +/// List all distinct series status values present in the database +#[utoipa::path( + get, + path = "/series/statuses", + tag = "series", + responses( + (status = 200, body = Vec), + (status = 401, description = "Unauthorized"), + ), + security(("Bearer" = [])) +)] +pub async fn series_statuses( + State(state): State, +) -> Result>, ApiError> { + let rows: Vec = sqlx::query_scalar( + r#"SELECT DISTINCT s FROM ( + SELECT LOWER(status) AS s FROM series_metadata WHERE status IS NOT NULL + UNION + SELECT mapped_status AS s FROM status_mappings WHERE mapped_status IS NOT NULL + ) t ORDER BY s"#, + ) + .fetch_all(&state.pool) + .await?; + Ok(Json(rows)) +} + +/// List distinct raw provider statuses from external metadata links +#[utoipa::path( + get, + path = "/series/provider-statuses", + tag = "series", + responses( + (status = 200, body = Vec), + (status = 401, description = "Unauthorized"), + ), + security(("Bearer" = [])) +)] +pub async fn provider_statuses( + State(state): State, +) -> Result>, ApiError> { + let rows: Vec = sqlx::query_scalar( + r#"SELECT DISTINCT lower(metadata_json->>'status') AS s + FROM external_metadata_links + WHERE metadata_json->>'status' IS NOT NULL + AND metadata_json->>'status' != '' + ORDER BY s"#, + ) + .fetch_all(&state.pool) + .await?; + Ok(Json(rows)) +} + +#[derive(Deserialize, ToSchema)] +pub struct OngoingQuery { + #[schema(value_type = Option, example = 10)] + pub limit: Option, +} + +/// List ongoing series (partially read, sorted by most recent activity) +#[utoipa::path( + get, + path = "/series/ongoing", + tag = "series", + params( + ("limit" = Option, Query, description = "Max items to return (default 10, max 50)"), + ), + responses( + (status = 200, body = Vec), + (status = 401, description = "Unauthorized"), + ), + security(("Bearer" = [])) +)] +pub async fn ongoing_series( + State(state): State, + Query(query): Query, +) -> Result>, ApiError> { + let limit = query.limit.unwrap_or(10).clamp(1, 50); + + let rows = sqlx::query( + r#" + WITH series_stats AS ( + SELECT + COALESCE(NULLIF(b.series, ''), 'unclassified') AS name, + COUNT(*) AS book_count, + COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read_count, + MAX(brp.last_read_at) AS last_read_at + FROM books b + LEFT JOIN book_reading_progress brp ON brp.book_id = b.id + GROUP BY COALESCE(NULLIF(b.series, ''), 'unclassified') + HAVING ( + COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0 + AND COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') < COUNT(*) + ) + ), + first_books AS ( + SELECT + COALESCE(NULLIF(series, ''), 'unclassified') AS name, + id, + library_id, + ROW_NUMBER() OVER ( + PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified') + ORDER BY + volume NULLS LAST, + REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''), + COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0), + title ASC + ) AS rn + FROM books + ) + SELECT ss.name, ss.book_count, ss.books_read_count, fb.id AS first_book_id, fb.library_id + FROM series_stats ss + JOIN first_books fb ON fb.name = ss.name AND fb.rn = 1 + ORDER BY ss.last_read_at DESC NULLS LAST + LIMIT $1 + "#, + ) + .bind(limit) + .fetch_all(&state.pool) + .await?; + + let items: Vec = rows + .iter() + .map(|row| SeriesItem { + name: row.get("name"), + book_count: row.get("book_count"), + books_read_count: row.get("books_read_count"), + first_book_id: row.get("first_book_id"), + library_id: row.get("library_id"), + series_status: None, + missing_count: None, + metadata_provider: None, + }) + .collect(); + + Ok(Json(items)) +} + +/// List next unread book for each ongoing series (sorted by most recent activity) +#[utoipa::path( + get, + path = "/books/ongoing", + tag = "series", + params( + ("limit" = Option, Query, description = "Max items to return (default 10, max 50)"), + ), + responses( + (status = 200, body = Vec), + (status = 401, description = "Unauthorized"), + ), + security(("Bearer" = [])) +)] +pub async fn ongoing_books( + State(state): State, + Query(query): Query, +) -> Result>, ApiError> { + let limit = query.limit.unwrap_or(10).clamp(1, 50); + + let rows = sqlx::query( + r#" + WITH ongoing_series AS ( + SELECT + COALESCE(NULLIF(b.series, ''), 'unclassified') AS name, + MAX(brp.last_read_at) AS series_last_read_at + FROM books b + LEFT JOIN book_reading_progress brp ON brp.book_id = b.id + GROUP BY COALESCE(NULLIF(b.series, ''), 'unclassified') + HAVING ( + COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0 + AND COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') < COUNT(*) + ) + ), + next_books AS ( + SELECT + b.id, b.library_id, b.kind, b.format, b.title, b.author, b.authors, b.series, b.volume, + b.language, b.page_count, b.thumbnail_path, b.updated_at, + COALESCE(brp.status, 'unread') AS reading_status, + brp.current_page AS reading_current_page, + brp.last_read_at AS reading_last_read_at, + os.series_last_read_at, + ROW_NUMBER() OVER ( + PARTITION BY COALESCE(NULLIF(b.series, ''), 'unclassified') + ORDER BY b.volume NULLS LAST, b.title + ) AS rn + FROM books b + JOIN ongoing_series os ON COALESCE(NULLIF(b.series, ''), 'unclassified') = os.name + LEFT JOIN book_reading_progress brp ON brp.book_id = b.id + WHERE COALESCE(brp.status, 'unread') != 'read' + ) + SELECT id, library_id, kind, format, title, author, authors, series, volume, language, page_count, + thumbnail_path, updated_at, reading_status, reading_current_page, reading_last_read_at + FROM next_books + WHERE rn = 1 + ORDER BY series_last_read_at DESC NULLS LAST + LIMIT $1 + "#, + ) + .bind(limit) + .fetch_all(&state.pool) + .await?; + + let items: Vec = rows + .iter() + .map(|row| { + let thumbnail_path: Option = row.get("thumbnail_path"); + BookItem { + id: row.get("id"), + library_id: row.get("library_id"), + kind: row.get("kind"), + format: row.get("format"), + title: row.get("title"), + author: row.get("author"), + authors: row.get::, _>("authors"), + series: row.get("series"), + volume: row.get("volume"), + language: row.get("language"), + page_count: row.get("page_count"), + thumbnail_url: thumbnail_path.map(|_| format!("/books/{}/thumbnail", row.get::("id"))), + updated_at: row.get("updated_at"), + reading_status: row.get("reading_status"), + reading_current_page: row.get("reading_current_page"), + reading_last_read_at: row.get("reading_last_read_at"), + } + }) + .collect(); + + Ok(Json(items)) +} + +// ─── Series metadata ──────────────────────────────────────────────────────── + +#[derive(Serialize, ToSchema)] +pub struct SeriesMetadata { + /// Authors of the series (series-level metadata, distinct from per-book author field) + pub authors: Vec, + pub description: Option, + pub publishers: Vec, + pub start_year: Option, + pub total_volumes: Option, + /// Series status: "ongoing", "ended", "hiatus", "cancelled", or null + pub status: Option, + /// Convenience: author from first book (for pre-filling the per-book apply section) + pub book_author: Option, + pub book_language: Option, + /// Fields locked from external metadata sync, e.g. {"authors": true, "description": true} + pub locked_fields: serde_json::Value, +} + +/// Get metadata for a specific series +#[utoipa::path( + get, + path = "/libraries/{library_id}/series/{name}/metadata", + tag = "series", + params( + ("library_id" = String, Path, description = "Library UUID"), + ("name" = String, Path, description = "Series name"), + ), + responses( + (status = 200, body = SeriesMetadata), + (status = 401, description = "Unauthorized"), + ), + security(("Bearer" = [])) +)] +pub async fn get_series_metadata( + State(state): State, + Path((library_id, name)): Path<(Uuid, String)>, +) -> Result, ApiError> { + // author/language from first book of series + let books_row = if name == "unclassified" { + sqlx::query("SELECT author, language FROM books WHERE library_id = $1 AND (series IS NULL OR series = '') LIMIT 1") + .bind(library_id) + .fetch_optional(&state.pool) + .await? + } else { + sqlx::query("SELECT author, language FROM books WHERE library_id = $1 AND series = $2 LIMIT 1") + .bind(library_id) + .bind(&name) + .fetch_optional(&state.pool) + .await? + }; + + let meta_row = sqlx::query( + "SELECT authors, description, publishers, start_year, total_volumes, status, locked_fields FROM series_metadata WHERE library_id = $1 AND name = $2" + ) + .bind(library_id) + .bind(&name) + .fetch_optional(&state.pool) + .await?; + + Ok(Json(SeriesMetadata { + authors: meta_row.as_ref().map(|r| r.get::, _>("authors")).unwrap_or_default(), + description: meta_row.as_ref().and_then(|r| r.get("description")), + publishers: meta_row.as_ref().map(|r| r.get::, _>("publishers")).unwrap_or_default(), + start_year: meta_row.as_ref().and_then(|r| r.get("start_year")), + total_volumes: meta_row.as_ref().and_then(|r| r.get("total_volumes")), + status: meta_row.as_ref().and_then(|r| r.get("status")), + book_author: books_row.as_ref().and_then(|r| r.get("author")), + book_language: books_row.as_ref().and_then(|r| r.get("language")), + locked_fields: meta_row.as_ref().map(|r| r.get::("locked_fields")).unwrap_or(serde_json::json!({})), + })) +} + +/// `author` and `language` are wrapped in an extra Option so we can distinguish +/// "absent from JSON" (keep books unchanged) from "present as null" (clear the field). +#[derive(Deserialize, ToSchema)] +pub struct UpdateSeriesRequest { + pub new_name: String, + /// Series-level authors list (stored in series_metadata) + #[serde(default)] + pub authors: Vec, + /// Per-book author propagation: absent = keep books unchanged, present = overwrite all books + #[serde(default, skip_serializing_if = "Option::is_none")] + pub author: Option>, + /// Per-book language propagation: absent = keep books unchanged, present = overwrite all books + #[serde(default, skip_serializing_if = "Option::is_none")] + pub language: Option>, + pub description: Option, + #[serde(default)] + pub publishers: Vec, + pub start_year: Option, + pub total_volumes: Option, + /// Series status: "ongoing", "ended", "hiatus", "cancelled", or null + pub status: Option, + /// Fields locked from external metadata sync + #[serde(default)] + pub locked_fields: Option, +} + +#[derive(Serialize, ToSchema)] +pub struct UpdateSeriesResponse { + pub updated: u64, +} + +/// Update metadata for all books in a series +#[utoipa::path( + patch, + path = "/libraries/{library_id}/series/{name}", + tag = "series", + params( + ("library_id" = String, Path, description = "Library UUID"), + ("name" = String, Path, description = "Series name (use 'unclassified' for books without series)"), + ), + request_body = UpdateSeriesRequest, + responses( + (status = 200, body = UpdateSeriesResponse), + (status = 400, description = "Invalid request"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden - Admin scope required"), + ), + security(("Bearer" = [])) +)] +pub async fn update_series( + State(state): State, + Path((library_id, name)): Path<(Uuid, String)>, + Json(body): Json, +) -> Result, ApiError> { + let new_name = body.new_name.trim().to_string(); + if new_name.is_empty() { + return Err(ApiError::bad_request("series name cannot be empty")); + } + // author/language: None = absent (keep books unchanged), Some(v) = apply to all books + let apply_author = body.author.is_some(); + let author_value = body.author.flatten().as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string); + let apply_language = body.language.is_some(); + let language_value = body.language.flatten().as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string); + let description = body.description.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string); + let publishers: Vec = body.publishers.iter() + .map(|p| p.trim().to_string()) + .filter(|p| !p.is_empty()) + .collect(); + let new_series_value: Option = if new_name == "unclassified" { None } else { Some(new_name.clone()) }; + + // 1. Update books: always update series name; author/language only if opted-in + // $1=library_id, $2=new_series_value, $3=apply_author, $4=author_value, + // $5=apply_language, $6=language_value, [$7=old_name] + let result = if name == "unclassified" { + sqlx::query( + "UPDATE books \ + SET series = $2, \ + author = CASE WHEN $3 THEN $4 ELSE author END, \ + language = CASE WHEN $5 THEN $6 ELSE language END, \ + updated_at = NOW() \ + WHERE library_id = $1 AND (series IS NULL OR series = '')" + ) + .bind(library_id) + .bind(&new_series_value) + .bind(apply_author) + .bind(&author_value) + .bind(apply_language) + .bind(&language_value) + .execute(&state.pool) + .await? + } else { + sqlx::query( + "UPDATE books \ + SET series = $2, \ + author = CASE WHEN $3 THEN $4 ELSE author END, \ + language = CASE WHEN $5 THEN $6 ELSE language END, \ + updated_at = NOW() \ + WHERE library_id = $1 AND series = $7" + ) + .bind(library_id) + .bind(&new_series_value) + .bind(apply_author) + .bind(&author_value) + .bind(apply_language) + .bind(&language_value) + .bind(&name) + .execute(&state.pool) + .await? + }; + + // 2. Upsert series_metadata (keyed by new_name) + let meta_name = new_series_value.as_deref().unwrap_or("unclassified"); + let authors: Vec = body.authors.iter() + .map(|a| a.trim().to_string()) + .filter(|a| !a.is_empty()) + .collect(); + let locked_fields = body.locked_fields.clone().unwrap_or(serde_json::json!({})); + sqlx::query( + r#" + INSERT INTO series_metadata (library_id, name, authors, description, publishers, start_year, total_volumes, status, locked_fields, updated_at) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, NOW()) + ON CONFLICT (library_id, name) DO UPDATE + SET authors = EXCLUDED.authors, + description = EXCLUDED.description, + publishers = EXCLUDED.publishers, + start_year = EXCLUDED.start_year, + total_volumes = EXCLUDED.total_volumes, + status = EXCLUDED.status, + locked_fields = EXCLUDED.locked_fields, + updated_at = NOW() + "# + ) + .bind(library_id) + .bind(meta_name) + .bind(&authors) + .bind(&description) + .bind(&publishers) + .bind(body.start_year) + .bind(body.total_volumes) + .bind(&body.status) + .bind(&locked_fields) + .execute(&state.pool) + .await?; + + // 3. If renamed, move series_metadata from old name to new name + if name != "unclassified" && new_name != name { + sqlx::query( + "DELETE FROM series_metadata WHERE library_id = $1 AND name = $2" + ) + .bind(library_id) + .bind(&name) + .execute(&state.pool) + .await?; + } + + Ok(Json(UpdateSeriesResponse { updated: result.rows_affected() })) +} diff --git a/apps/api/src/stats.rs b/apps/api/src/stats.rs index ce3bb6c..2ada441 100644 --- a/apps/api/src/stats.rs +++ b/apps/api/src/stats.rs @@ -90,7 +90,7 @@ pub struct StatsResponse { #[utoipa::path( get, path = "/stats", - tag = "books", + tag = "stats", responses( (status = 200, body = StatsResponse), (status = 401, description = "Unauthorized"),