refactor: migrer tout le code Rust vers series_id (table series)

API (15 fichiers):
- series.rs: helpers resolve_series_id/get_or_create_series, toutes les
  queries migrent de books.series TEXT vers series_id FK + JOIN series
- Routes /series/:name → /series/:series_id (UUID)
- books.rs: filtres série par series_id, SELECT s.name AS series via JOIN
- metadata.rs: sync écrit dans series au lieu de series_metadata
- metadata_refresh.rs: refresh_link et rematch via series_id
- metadata_batch.rs: sync via series table
- anilist.rs: liens par series_id au lieu de series_name
- download_detection.rs: available_downloads via series_id
- reading_progress.rs: mark_series_read par series_id
- torrent_import.rs: import via series JOIN
- search.rs, stats.rs, libraries.rs: JOINs series pour les noms
- reading_status_match.rs, reading_status_push.rs: séries via JOIN

Indexer (3 fichiers):
- scanner.rs: get_or_create_series_id() avec cache HashMap
- batch.rs: BookInsert/BookUpdate.series_id UUID au lieu de series String
- job.rs: rematch_unlinked_books via series JOIN

4 nouveaux tests (SeriesItem, SeriesMetadata, UpdateSeriesResponse,
BatchStructs avec series_id)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-29 21:13:11 +02:00
parent f2a7db939f
commit 292e9bc77f
18 changed files with 675 additions and 443 deletions

View File

@@ -501,15 +501,16 @@ pub async fn list_unlinked(
SELECT SELECT
l.id AS library_id, l.id AS library_id,
l.name AS library_name, l.name AS library_name,
COALESCE(NULLIF(b.series, ''), 'unclassified') AS series_name COALESCE(s.name, 'unclassified') AS series_name
FROM books b FROM books b
JOIN libraries l ON l.id = b.library_id JOIN libraries l ON l.id = b.library_id
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN anilist_series_links asl LEFT JOIN anilist_series_links asl
ON asl.library_id = b.library_id ON asl.library_id = b.library_id
AND asl.series_name = COALESCE(NULLIF(b.series, ''), 'unclassified') AND asl.series_name = COALESCE(s.name, 'unclassified')
WHERE l.reading_status_provider = 'anilist' WHERE l.reading_status_provider = 'anilist'
AND asl.library_id IS NULL AND asl.library_id IS NULL
GROUP BY l.id, l.name, COALESCE(NULLIF(b.series, ''), 'unclassified') GROUP BY l.id, l.name, COALESCE(s.name, 'unclassified')
ORDER BY l.name, series_name ORDER BY l.name, series_name
"#, "#,
) )
@@ -576,10 +577,11 @@ pub async fn preview_sync(
SELECT SELECT
COUNT(*) as book_count, COUNT(*) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read, COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read,
(SELECT sm.total_volumes FROM series_metadata sm WHERE sm.library_id = $1 AND sm.name = $2 LIMIT 1) as total_volumes (SELECT sm.total_volumes FROM series sm WHERE sm.library_id = $1 AND sm.name = $2 LIMIT 1) as total_volumes
FROM books b FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND brp.user_id = $3 LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND brp.user_id = $3
WHERE b.library_id = $1 AND COALESCE(NULLIF(b.series, ''), 'unclassified') = $2 WHERE b.library_id = $1 AND COALESCE(s.name, 'unclassified') = $2
"#, "#,
) )
.bind(library_id) .bind(library_id)
@@ -684,10 +686,11 @@ pub async fn sync_to_anilist(
SELECT SELECT
COUNT(*) as book_count, COUNT(*) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read, COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read,
(SELECT sm.total_volumes FROM series_metadata sm WHERE sm.library_id = $1 AND sm.name = $2 LIMIT 1) as total_volumes (SELECT sm.total_volumes FROM series sm WHERE sm.library_id = $1 AND sm.name = $2 LIMIT 1) as total_volumes
FROM books b FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND brp.user_id = $3 LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND brp.user_id = $3
WHERE b.library_id = $1 AND COALESCE(NULLIF(b.series, ''), 'unclassified') = $2 WHERE b.library_id = $1 AND COALESCE(s.name, 'unclassified') = $2
"#, "#,
) )
.bind(library_id) .bind(library_id)
@@ -866,7 +869,7 @@ pub async fn pull_from_anilist(
// Get all book IDs for this series, ordered by volume // Get all book IDs for this series, ordered by volume
let book_rows = sqlx::query( let book_rows = sqlx::query(
"SELECT id, volume FROM books WHERE library_id = $1 AND COALESCE(NULLIF(series, ''), 'unclassified') = $2 ORDER BY volume NULLS LAST", "SELECT b.id, b.volume FROM books b LEFT JOIN series s ON s.id = b.series_id WHERE b.library_id = $1 AND COALESCE(s.name, 'unclassified') = $2 ORDER BY b.volume NULLS LAST",
) )
.bind(library_id) .bind(library_id)
.bind(series_name) .bind(series_name)

View File

@@ -141,15 +141,15 @@ pub async fn list_books(
// Conditions partagées COUNT et DATA — $1=library_id $2=kind $3=format, puis optionnels // Conditions partagées COUNT et DATA — $1=library_id $2=kind $3=format, puis optionnels
let mut p: usize = 3; let mut p: usize = 3;
let series_cond = match query.series.as_deref() { let series_cond = match query.series.as_deref() {
Some("unclassified") => "AND (b.series IS NULL OR b.series = '')".to_string(), Some("unclassified") => "AND b.series_id IS NULL".to_string(),
Some(_) => { p += 1; format!("AND b.series = ${p}") } Some(_) => { p += 1; format!("AND b.series_id = ${p}") }
None => String::new(), None => String::new(),
}; };
let rs_cond = if reading_statuses.is_some() { let rs_cond = if reading_statuses.is_some() {
p += 1; format!("AND COALESCE(brp.status, 'unread') = ANY(${p})") p += 1; format!("AND COALESCE(brp.status, 'unread') = ANY(${p})")
} else { String::new() }; } else { String::new() };
let author_cond = if query.author.is_some() { let author_cond = if query.author.is_some() {
p += 1; format!("AND (${p} = ANY(COALESCE(NULLIF(b.authors, '{{}}'), CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END)) OR EXISTS (SELECT 1 FROM series_metadata sm WHERE sm.library_id = b.library_id AND sm.name = b.series AND ${p} = ANY(sm.authors)))") p += 1; format!("AND (${p} = ANY(COALESCE(NULLIF(b.authors, '{{}}'), CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END)) OR (s.id IS NOT NULL AND ${p} = ANY(COALESCE(s.authors, ARRAY[]::text[]))))")
} else { String::new() }; } else { String::new() };
let metadata_cond = match query.metadata_provider.as_deref() { let metadata_cond = match query.metadata_provider.as_deref() {
Some("unlinked") => "AND eml.id IS NULL".to_string(), Some("unlinked") => "AND eml.id IS NULL".to_string(),
@@ -158,25 +158,26 @@ pub async fn list_books(
None => String::new(), None => String::new(),
}; };
let q_cond = if query.q.is_some() { let q_cond = if query.q.is_some() {
p += 1; format!("AND (b.title ILIKE ${p} OR b.series ILIKE ${p} OR b.author ILIKE ${p})") p += 1; format!("AND (b.title ILIKE ${p} OR s.name ILIKE ${p} OR b.author ILIKE ${p})")
} else { String::new() }; } else { String::new() };
p += 1; p += 1;
let uid_p = p; let uid_p = p;
let metadata_links_cte = r#" let metadata_links_cte = r#"
metadata_links AS ( metadata_links AS (
SELECT DISTINCT ON (eml.series_name, eml.library_id) SELECT DISTINCT ON (eml.series_id, eml.library_id)
eml.series_name, eml.library_id, eml.provider, eml.id eml.series_id, eml.library_id, eml.provider, eml.id
FROM external_metadata_links eml FROM external_metadata_links eml
WHERE eml.status = 'approved' WHERE eml.status = 'approved'
ORDER BY eml.series_name, eml.library_id, eml.created_at DESC ORDER BY eml.series_id, eml.library_id, eml.created_at DESC
)"#; )"#;
let count_sql = format!( let count_sql = format!(
r#"WITH {metadata_links_cte} r#"WITH {metadata_links_cte}
SELECT COUNT(*) FROM books b SELECT COUNT(*) FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p} LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p}
LEFT JOIN metadata_links eml ON eml.series_name = b.series AND eml.library_id = b.library_id LEFT JOIN metadata_links eml ON eml.series_id = b.series_id AND eml.library_id = b.library_id
WHERE ($1::uuid IS NULL OR b.library_id = $1) WHERE ($1::uuid IS NULL OR b.library_id = $1)
AND ($2::text IS NULL OR b.kind = $2) AND ($2::text IS NULL OR b.kind = $2)
AND ($3::text IS NULL OR b.format = $3) AND ($3::text IS NULL OR b.format = $3)
@@ -199,13 +200,14 @@ pub async fn list_books(
let data_sql = format!( let data_sql = format!(
r#" r#"
WITH {metadata_links_cte} WITH {metadata_links_cte}
SELECT b.id, b.library_id, b.kind, b.format, b.title, b.author, b.authors, b.series, b.volume, b.language, b.page_count, b.thumbnail_path, b.updated_at, SELECT b.id, b.library_id, b.kind, b.format, b.title, b.author, b.authors, s.name AS series, b.volume, b.language, b.page_count, b.thumbnail_path, b.updated_at,
COALESCE(brp.status, 'unread') AS reading_status, COALESCE(brp.status, 'unread') AS reading_status,
brp.current_page AS reading_current_page, brp.current_page AS reading_current_page,
brp.last_read_at AS reading_last_read_at brp.last_read_at AS reading_last_read_at
FROM books b FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p} LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p}
LEFT JOIN metadata_links eml ON eml.series_name = b.series AND eml.library_id = b.library_id LEFT JOIN metadata_links eml ON eml.series_id = b.series_id AND eml.library_id = b.library_id
WHERE ($1::uuid IS NULL OR b.library_id = $1) WHERE ($1::uuid IS NULL OR b.library_id = $1)
AND ($2::text IS NULL OR b.kind = $2) AND ($2::text IS NULL OR b.kind = $2)
AND ($3::text IS NULL OR b.format = $3) AND ($3::text IS NULL OR b.format = $3)
@@ -230,8 +232,9 @@ pub async fn list_books(
if let Some(s) = query.series.as_deref() { if let Some(s) = query.series.as_deref() {
if s != "unclassified" { if s != "unclassified" {
count_builder = count_builder.bind(s); let series_uuid: Uuid = s.parse().map_err(|_| ApiError::bad_request("invalid series id"))?;
data_builder = data_builder.bind(s); count_builder = count_builder.bind(series_uuid);
data_builder = data_builder.bind(series_uuid);
} }
} }
if let Some(ref statuses) = reading_statuses { if let Some(ref statuses) = reading_statuses {
@@ -318,12 +321,13 @@ pub async fn get_book(
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id); let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
let row = sqlx::query( let row = sqlx::query(
r#" r#"
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.authors, b.series, b.volume, b.language, b.page_count, b.thumbnail_path, b.locked_fields, b.summary, b.isbn, b.publish_date, SELECT b.id, b.library_id, b.kind, b.title, b.author, b.authors, s.name AS series, b.volume, b.language, b.page_count, b.thumbnail_path, b.locked_fields, b.summary, b.isbn, b.publish_date,
bf.abs_path, bf.format, bf.parse_status, bf.abs_path, bf.format, bf.parse_status,
COALESCE(brp.status, 'unread') AS reading_status, COALESCE(brp.status, 'unread') AS reading_status,
brp.current_page AS reading_current_page, brp.current_page AS reading_current_page,
brp.last_read_at AS reading_last_read_at brp.last_read_at AS reading_last_read_at
FROM books b FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN LATERAL ( LEFT JOIN LATERAL (
SELECT abs_path, format, parse_status SELECT abs_path, format, parse_status
FROM book_files FROM book_files
@@ -519,13 +523,39 @@ pub async fn update_book(
let isbn = body.isbn.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string); let isbn = body.isbn.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
let publish_date = body.publish_date.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string); let publish_date = body.publish_date.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
let locked_fields = body.locked_fields.clone().unwrap_or(serde_json::json!({})); let locked_fields = body.locked_fields.clone().unwrap_or(serde_json::json!({}));
// Resolve series name to series_id
let series_id: Option<Uuid> = if let Some(ref s) = series {
// Look up existing series or create one
let book_row = sqlx::query("SELECT library_id FROM books WHERE id = $1")
.bind(id)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found("book not found"))?;
let lib_id: Uuid = book_row.get("library_id");
let sid: Uuid = sqlx::query_scalar(
r#"
INSERT INTO series (id, library_id, name, created_at, updated_at)
VALUES (gen_random_uuid(), $1, $2, NOW(), NOW())
ON CONFLICT (library_id, name) DO UPDATE SET updated_at = NOW()
RETURNING id
"#,
)
.bind(lib_id)
.bind(s)
.fetch_one(&state.pool)
.await?;
Some(sid)
} else {
None
};
let row = sqlx::query( let row = sqlx::query(
r#" r#"
UPDATE books UPDATE books
SET title = $2, author = $3, authors = $4, series = $5, volume = $6, language = $7, SET title = $2, author = $3, authors = $4, series_id = $5, volume = $6, language = $7,
summary = $8, isbn = $9, publish_date = $10, locked_fields = $11, updated_at = NOW() summary = $8, isbn = $9, publish_date = $10, locked_fields = $11, updated_at = NOW()
WHERE id = $1 WHERE id = $1
RETURNING id, library_id, kind, title, author, authors, series, volume, language, page_count, thumbnail_path, RETURNING id, library_id, kind, title, author, authors, volume, language, page_count, thumbnail_path,
summary, isbn, publish_date, summary, isbn, publish_date,
'unread' AS reading_status, 'unread' AS reading_status,
NULL::integer AS reading_current_page, NULL::integer AS reading_current_page,
@@ -536,7 +566,7 @@ pub async fn update_book(
.bind(&title) .bind(&title)
.bind(&author) .bind(&author)
.bind(&authors) .bind(&authors)
.bind(&series) .bind(series_id)
.bind(body.volume) .bind(body.volume)
.bind(&language) .bind(&language)
.bind(&summary) .bind(&summary)
@@ -556,7 +586,7 @@ pub async fn update_book(
title: row.get("title"), title: row.get("title"),
author: row.get("author"), author: row.get("author"),
authors: row.get::<Vec<String>, _>("authors"), authors: row.get::<Vec<String>, _>("authors"),
series: row.get("series"), series: series.clone(),
volume: row.get("volume"), volume: row.get("volume"),
language: row.get("language"), language: row.get("language"),
page_count: row.get("page_count"), page_count: row.get("page_count"),

View File

@@ -511,9 +511,10 @@ pub(crate) async fn process_download_detection(
// Fetch all series with their metadata link status // Fetch all series with their metadata link status
let all_series: Vec<String> = sqlx::query_scalar( let all_series: Vec<String> = sqlx::query_scalar(
r#" r#"
SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified') SELECT DISTINCT COALESCE(s.name, 'unclassified')
FROM books FROM books b
WHERE library_id = $1 LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
ORDER BY 1 ORDER BY 1
"#, "#,
) )
@@ -528,8 +529,10 @@ pub(crate) async fn process_download_detection(
DELETE FROM available_downloads DELETE FROM available_downloads
WHERE library_id = $1 WHERE library_id = $1
AND series_name NOT IN ( AND series_name NOT IN (
SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified') SELECT DISTINCT COALESCE(s.name, 'unclassified')
FROM books WHERE library_id = $1 FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
) )
"#, "#,
) )

View File

@@ -62,16 +62,17 @@ pub async fn list_libraries(State(state): State<AppState>) -> Result<Json<Vec<Li
let rows = sqlx::query( let rows = sqlx::query(
"SELECT l.id, l.name, l.root_path, l.enabled, l.monitor_enabled, l.scan_mode, l.next_scan_at, l.watcher_enabled, l.metadata_provider, l.fallback_metadata_provider, l.metadata_refresh_mode, l.next_metadata_refresh_at, l.reading_status_provider, l.reading_status_push_mode, l.next_reading_status_push_at, l.download_detection_mode, l.next_download_detection_at, "SELECT l.id, l.name, l.root_path, l.enabled, l.monitor_enabled, l.scan_mode, l.next_scan_at, l.watcher_enabled, l.metadata_provider, l.fallback_metadata_provider, l.metadata_refresh_mode, l.next_metadata_refresh_at, l.reading_status_provider, l.reading_status_push_mode, l.next_reading_status_push_at, l.download_detection_mode, l.next_download_detection_at,
(SELECT COUNT(*) FROM books b WHERE b.library_id = l.id) as book_count, (SELECT COUNT(*) FROM books b WHERE b.library_id = l.id) as book_count,
(SELECT COUNT(DISTINCT COALESCE(NULLIF(b.series, ''), 'unclassified')) FROM books b WHERE b.library_id = l.id) as series_count, (SELECT COUNT(DISTINCT b.series_id) + CASE WHEN EXISTS(SELECT 1 FROM books b WHERE b.library_id = l.id AND b.series_id IS NULL) THEN 1 ELSE 0 END FROM books b WHERE b.library_id = l.id) as series_count,
COALESCE(( COALESCE((
SELECT ARRAY_AGG(first_id ORDER BY series_name) SELECT ARRAY_AGG(first_id ORDER BY series_name)
FROM ( FROM (
SELECT DISTINCT ON (COALESCE(NULLIF(b.series, ''), 'unclassified')) SELECT DISTINCT ON (COALESCE(s.name, 'unclassified'))
COALESCE(NULLIF(b.series, ''), 'unclassified') as series_name, COALESCE(s.name, 'unclassified') as series_name,
b.id as first_id b.id as first_id
FROM books b FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = l.id WHERE b.library_id = l.id
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'), ORDER BY COALESCE(s.name, 'unclassified'),
b.volume NULLS LAST, b.title ASC b.volume NULLS LAST, b.title ASC
LIMIT 5 LIMIT 5
) sub ) sub
@@ -377,15 +378,16 @@ pub async fn update_monitoring(
.fetch_one(&state.pool) .fetch_one(&state.pool)
.await?; .await?;
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')) FROM books WHERE library_id = $1") let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT b.series_id) + CASE WHEN EXISTS(SELECT 1 FROM books b WHERE b.library_id = $1 AND b.series_id IS NULL) THEN 1 ELSE 0 END FROM books b WHERE b.library_id = $1")
.bind(library_id) .bind(library_id)
.fetch_one(&state.pool) .fetch_one(&state.pool)
.await?; .await?;
let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar( let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar(
"SELECT b.id FROM books b "SELECT b.id FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1 WHERE b.library_id = $1
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'), b.volume NULLS LAST, b.title ASC ORDER BY COALESCE(s.name, 'unclassified'), b.volume NULLS LAST, b.title ASC
LIMIT 5" LIMIT 5"
) )
.bind(library_id) .bind(library_id)
@@ -466,15 +468,16 @@ pub async fn update_metadata_provider(
.fetch_one(&state.pool) .fetch_one(&state.pool)
.await?; .await?;
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')) FROM books WHERE library_id = $1") let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT b.series_id) + CASE WHEN EXISTS(SELECT 1 FROM books b WHERE b.library_id = $1 AND b.series_id IS NULL) THEN 1 ELSE 0 END FROM books b WHERE b.library_id = $1")
.bind(library_id) .bind(library_id)
.fetch_one(&state.pool) .fetch_one(&state.pool)
.await?; .await?;
let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar( let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar(
"SELECT b.id FROM books b "SELECT b.id FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1 WHERE b.library_id = $1
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'), b.volume NULLS LAST, b.title ASC ORDER BY COALESCE(s.name, 'unclassified'), b.volume NULLS LAST, b.title ASC
LIMIT 5" LIMIT 5"
) )
.bind(library_id) .bind(library_id)

View File

@@ -103,7 +103,7 @@ async fn main() -> anyhow::Result<()> {
.route("/libraries/:id/reading-status-provider", axum::routing::patch(libraries::update_reading_status_provider)) .route("/libraries/:id/reading-status-provider", axum::routing::patch(libraries::update_reading_status_provider))
.route("/books/:id", axum::routing::patch(books::update_book).delete(books::delete_book)) .route("/books/:id", axum::routing::patch(books::update_book).delete(books::delete_book))
.route("/books/:id/convert", axum::routing::post(books::convert_book)) .route("/books/:id/convert", axum::routing::post(books::convert_book))
.route("/libraries/:library_id/series/:name", axum::routing::patch(series::update_series).delete(series::delete_series)) .route("/libraries/:library_id/series/:series_id", axum::routing::patch(series::update_series).delete(series::delete_series))
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild)) .route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
.route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild)) .route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild))
.route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate)) .route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate))
@@ -180,7 +180,7 @@ async fn main() -> anyhow::Result<()> {
.route("/books/:id/pages/:n", get(pages::get_page)) .route("/books/:id/pages/:n", get(pages::get_page))
.route("/books/:id/progress", get(reading_progress::get_reading_progress).patch(reading_progress::update_reading_progress)) .route("/books/:id/progress", get(reading_progress::get_reading_progress).patch(reading_progress::update_reading_progress))
.route("/libraries/:library_id/series", get(series::list_series)) .route("/libraries/:library_id/series", get(series::list_series))
.route("/libraries/:library_id/series/:name/metadata", get(series::get_series_metadata)) .route("/libraries/:library_id/series/:series_id/metadata", get(series::get_series_metadata))
.route("/series", get(series::list_all_series)) .route("/series", get(series::list_all_series))
.route("/series/ongoing", get(series::ongoing_series)) .route("/series/ongoing", get(series::ongoing_series))
.route("/series/statuses", get(series::series_statuses)) .route("/series/statuses", get(series::series_statuses))

View File

@@ -372,7 +372,7 @@ pub async fn approve_metadata(
// Notify via Telegram (with first book thumbnail if available) // Notify via Telegram (with first book thumbnail if available)
let provider_for_notif: String = row.get("provider"); let provider_for_notif: String = row.get("provider");
let thumbnail_path: Option<String> = sqlx::query_scalar( let thumbnail_path: Option<String> = sqlx::query_scalar(
"SELECT thumbnail_path FROM books WHERE library_id = $1 AND series_name = $2 AND thumbnail_path IS NOT NULL ORDER BY sort_order LIMIT 1", "SELECT b.thumbnail_path FROM books b JOIN series s ON s.id = b.series_id WHERE b.library_id = $1 AND s.name = $2 AND b.thumbnail_path IS NOT NULL ORDER BY b.volume NULLS LAST, b.title LIMIT 1",
) )
.bind(library_id) .bind(library_id)
.bind(&series_name) .bind(&series_name)
@@ -514,7 +514,7 @@ pub async fn get_missing_books(
// Count local books // Count local books
let total_local: i64 = sqlx::query_scalar( let total_local: i64 = sqlx::query_scalar(
"SELECT COUNT(*) FROM books WHERE library_id = $1 AND COALESCE(NULLIF(series, ''), 'unclassified') = $2", "SELECT COUNT(*) FROM books b LEFT JOIN series s ON s.id = b.series_id WHERE b.library_id = $1 AND COALESCE(s.name, 'unclassified') = $2",
) )
.bind(library_id) .bind(library_id)
.bind(&series_name) .bind(&series_name)
@@ -722,7 +722,7 @@ pub(crate) async fn sync_series_metadata(
// Fetch existing state before upsert // Fetch existing state before upsert
let existing = sqlx::query( let existing = sqlx::query(
r#"SELECT description, publishers, start_year, total_volumes, status, authors, locked_fields r#"SELECT description, publishers, start_year, total_volumes, status, authors, locked_fields
FROM series_metadata WHERE library_id = $1 AND name = $2"#, FROM series WHERE library_id = $1 AND name = $2"#,
) )
.bind(library_id) .bind(library_id)
.bind(series_name) .bind(series_name)
@@ -732,35 +732,35 @@ pub(crate) async fn sync_series_metadata(
// Respect locked_fields: only update fields that are NOT locked // Respect locked_fields: only update fields that are NOT locked
sqlx::query( sqlx::query(
r#" r#"
INSERT INTO series_metadata (library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at) INSERT INTO series (id, library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW()) VALUES (gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
ON CONFLICT (library_id, name) ON CONFLICT (library_id, name)
DO UPDATE SET DO UPDATE SET
description = CASE description = CASE
WHEN (series_metadata.locked_fields->>'description')::boolean IS TRUE THEN series_metadata.description WHEN (series.locked_fields->>'description')::boolean IS TRUE THEN series.description
ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series_metadata.description) ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series.description)
END, END,
publishers = CASE publishers = CASE
WHEN (series_metadata.locked_fields->>'publishers')::boolean IS TRUE THEN series_metadata.publishers WHEN (series.locked_fields->>'publishers')::boolean IS TRUE THEN series.publishers
WHEN array_length(EXCLUDED.publishers, 1) > 0 THEN EXCLUDED.publishers WHEN array_length(EXCLUDED.publishers, 1) > 0 THEN EXCLUDED.publishers
ELSE series_metadata.publishers ELSE series.publishers
END, END,
start_year = CASE start_year = CASE
WHEN (series_metadata.locked_fields->>'start_year')::boolean IS TRUE THEN series_metadata.start_year WHEN (series.locked_fields->>'start_year')::boolean IS TRUE THEN series.start_year
ELSE COALESCE(EXCLUDED.start_year, series_metadata.start_year) ELSE COALESCE(EXCLUDED.start_year, series.start_year)
END, END,
total_volumes = CASE total_volumes = CASE
WHEN (series_metadata.locked_fields->>'total_volumes')::boolean IS TRUE THEN series_metadata.total_volumes WHEN (series.locked_fields->>'total_volumes')::boolean IS TRUE THEN series.total_volumes
ELSE COALESCE(EXCLUDED.total_volumes, series_metadata.total_volumes) ELSE COALESCE(EXCLUDED.total_volumes, series.total_volumes)
END, END,
status = CASE status = CASE
WHEN (series_metadata.locked_fields->>'status')::boolean IS TRUE THEN series_metadata.status WHEN (series.locked_fields->>'status')::boolean IS TRUE THEN series.status
ELSE COALESCE(EXCLUDED.status, series_metadata.status) ELSE COALESCE(EXCLUDED.status, series.status)
END, END,
authors = CASE authors = CASE
WHEN (series_metadata.locked_fields->>'authors')::boolean IS TRUE THEN series_metadata.authors WHEN (series.locked_fields->>'authors')::boolean IS TRUE THEN series.authors
WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors
ELSE series_metadata.authors ELSE series.authors
END, END,
updated_at = NOW() updated_at = NOW()
"#, "#,
@@ -909,12 +909,13 @@ pub(crate) async fn sync_books_metadata(
// (volume ASC NULLS LAST, then natural title sort) // (volume ASC NULLS LAST, then natural title sort)
let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as( let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as(
r#" r#"
SELECT id, volume, title FROM books SELECT b.id, b.volume, b.title FROM books b
WHERE library_id = $1 LEFT JOIN series s ON s.id = b.series_id
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2 WHERE b.library_id = $1
ORDER BY volume NULLS LAST, AND COALESCE(s.name, 'unclassified') = $2
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''), ORDER BY b.volume NULLS LAST,
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0), REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
title ASC title ASC
"#, "#,
) )

View File

@@ -410,9 +410,10 @@ pub(crate) async fn process_metadata_batch(
// Get all distinct series names for this library // Get all distinct series names for this library
let series_names: Vec<String> = sqlx::query_scalar( let series_names: Vec<String> = sqlx::query_scalar(
r#" r#"
SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified') SELECT DISTINCT COALESCE(s.name, 'unclassified')
FROM books FROM books b
WHERE library_id = $1 LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
ORDER BY 1 ORDER BY 1
"#, "#,
) )
@@ -757,9 +758,10 @@ async fn search_and_evaluate(
if let Some(ext_total) = best.total_volumes { if let Some(ext_total) = best.total_volumes {
let local_count: Option<i64> = sqlx::query_scalar( let local_count: Option<i64> = sqlx::query_scalar(
r#" r#"
SELECT COUNT(*) FROM books SELECT COUNT(*) FROM books b
WHERE library_id = $1 LEFT JOIN series s ON s.id = b.series_id
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2 WHERE b.library_id = $1
AND COALESCE(s.name, 'unclassified') = $2
"#, "#,
) )
.bind(library_id) .bind(library_id)
@@ -867,35 +869,35 @@ async fn sync_series_from_candidate(
sqlx::query( sqlx::query(
r#" r#"
INSERT INTO series_metadata (library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at) INSERT INTO series (id, library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW()) VALUES (gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
ON CONFLICT (library_id, name) ON CONFLICT (library_id, name)
DO UPDATE SET DO UPDATE SET
description = CASE description = CASE
WHEN (series_metadata.locked_fields->>'description')::boolean IS TRUE THEN series_metadata.description WHEN (series.locked_fields->>'description')::boolean IS TRUE THEN series.description
ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series_metadata.description) ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series.description)
END, END,
publishers = CASE publishers = CASE
WHEN (series_metadata.locked_fields->>'publishers')::boolean IS TRUE THEN series_metadata.publishers WHEN (series.locked_fields->>'publishers')::boolean IS TRUE THEN series.publishers
WHEN array_length(EXCLUDED.publishers, 1) > 0 THEN EXCLUDED.publishers WHEN array_length(EXCLUDED.publishers, 1) > 0 THEN EXCLUDED.publishers
ELSE series_metadata.publishers ELSE series.publishers
END, END,
start_year = CASE start_year = CASE
WHEN (series_metadata.locked_fields->>'start_year')::boolean IS TRUE THEN series_metadata.start_year WHEN (series.locked_fields->>'start_year')::boolean IS TRUE THEN series.start_year
ELSE COALESCE(EXCLUDED.start_year, series_metadata.start_year) ELSE COALESCE(EXCLUDED.start_year, series.start_year)
END, END,
total_volumes = CASE total_volumes = CASE
WHEN (series_metadata.locked_fields->>'total_volumes')::boolean IS TRUE THEN series_metadata.total_volumes WHEN (series.locked_fields->>'total_volumes')::boolean IS TRUE THEN series.total_volumes
ELSE COALESCE(EXCLUDED.total_volumes, series_metadata.total_volumes) ELSE COALESCE(EXCLUDED.total_volumes, series.total_volumes)
END, END,
status = CASE status = CASE
WHEN (series_metadata.locked_fields->>'status')::boolean IS TRUE THEN series_metadata.status WHEN (series.locked_fields->>'status')::boolean IS TRUE THEN series.status
ELSE COALESCE(EXCLUDED.status, series_metadata.status) ELSE COALESCE(EXCLUDED.status, series.status)
END, END,
authors = CASE authors = CASE
WHEN (series_metadata.locked_fields->>'authors')::boolean IS TRUE THEN series_metadata.authors WHEN (series.locked_fields->>'authors')::boolean IS TRUE THEN series.authors
WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors
ELSE series_metadata.authors ELSE series.authors
END, END,
updated_at = NOW() updated_at = NOW()
"#, "#,
@@ -944,13 +946,14 @@ async fn sync_books_from_provider(
// Pre-fetch local books // Pre-fetch local books
let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as( let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as(
r#" r#"
SELECT id, volume, title FROM books SELECT b.id, b.volume, b.title FROM books b
WHERE library_id = $1 LEFT JOIN series s ON s.id = b.series_id
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2 WHERE b.library_id = $1
ORDER BY volume NULLS LAST, AND COALESCE(s.name, 'unclassified') = $2
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''), ORDER BY b.volume NULLS LAST,
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0), REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''),
title ASC COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
b.title ASC
"#, "#,
) )
.bind(library_id) .bind(library_id)

View File

@@ -95,8 +95,8 @@ pub async fn start_refresh(
let link_count: i64 = sqlx::query_scalar( let link_count: i64 = sqlx::query_scalar(
r#" r#"
SELECT COUNT(*) FROM external_metadata_links eml SELECT COUNT(*) FROM external_metadata_links eml
LEFT JOIN series_metadata sm LEFT JOIN series sm
ON sm.library_id = eml.library_id AND sm.name = eml.series_name ON sm.library_id = eml.library_id AND sm.id = eml.series_id
WHERE eml.library_id = $1 WHERE eml.library_id = $1
AND eml.status = 'approved' AND eml.status = 'approved'
AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled') AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled')
@@ -188,8 +188,8 @@ pub async fn start_refresh(
let link_count: i64 = sqlx::query_scalar( let link_count: i64 = sqlx::query_scalar(
r#" r#"
SELECT COUNT(*) FROM external_metadata_links eml SELECT COUNT(*) FROM external_metadata_links eml
LEFT JOIN series_metadata sm LEFT JOIN series sm
ON sm.library_id = eml.library_id AND sm.name = eml.series_name ON sm.library_id = eml.library_id AND sm.id = eml.series_id
WHERE eml.library_id = $1 WHERE eml.library_id = $1
AND eml.status = 'approved' AND eml.status = 'approved'
AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled') AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled')
@@ -357,14 +357,14 @@ pub(crate) async fn process_metadata_refresh(
// Get approved links for this library, only for ongoing series (not ended/cancelled) // Get approved links for this library, only for ongoing series (not ended/cancelled)
let links: Vec<(Uuid, String, String, String)> = sqlx::query_as( let links: Vec<(Uuid, String, String, String)> = sqlx::query_as(
r#" r#"
SELECT eml.id, eml.series_name, eml.provider, eml.external_id SELECT eml.id, sm.name AS series_name, eml.provider, eml.external_id
FROM external_metadata_links eml FROM external_metadata_links eml
LEFT JOIN series_metadata sm JOIN series sm
ON sm.library_id = eml.library_id AND sm.name = eml.series_name ON sm.id = eml.series_id
WHERE eml.library_id = $1 WHERE eml.library_id = $1
AND eml.status = 'approved' AND eml.status = 'approved'
AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled') AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled')
ORDER BY eml.series_name ORDER BY sm.name
"#, "#,
) )
.bind(library_id) .bind(library_id)
@@ -541,13 +541,14 @@ pub(crate) async fn refresh_link(
// Pre-fetch local books // Pre-fetch local books
let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as( let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as(
r#" r#"
SELECT id, volume, title FROM books SELECT b.id, b.volume, b.title FROM books b
WHERE library_id = $1 LEFT JOIN series s ON s.id = b.series_id
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2 WHERE b.library_id = $1
ORDER BY volume NULLS LAST, AND COALESCE(s.name, 'unclassified') = $2
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''), ORDER BY b.volume NULLS LAST,
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0), REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''),
title ASC COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
b.title ASC
"#, "#,
) )
.bind(library_id) .bind(library_id)
@@ -741,7 +742,7 @@ async fn sync_series_with_diff(
// Fetch existing series metadata for diffing // Fetch existing series metadata for diffing
let existing = sqlx::query( let existing = sqlx::query(
r#"SELECT description, publishers, start_year, total_volumes, status, authors, locked_fields r#"SELECT description, publishers, start_year, total_volumes, status, authors, locked_fields
FROM series_metadata WHERE library_id = $1 AND name = $2"#, FROM series WHERE library_id = $1 AND name = $2"#,
) )
.bind(library_id) .bind(library_id)
.bind(series_name) .bind(series_name)
@@ -800,35 +801,35 @@ async fn sync_series_with_diff(
// Now do the actual upsert // Now do the actual upsert
sqlx::query( sqlx::query(
r#" r#"
INSERT INTO series_metadata (library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at) INSERT INTO series (id, library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW()) VALUES (gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
ON CONFLICT (library_id, name) ON CONFLICT (library_id, name)
DO UPDATE SET DO UPDATE SET
description = CASE description = CASE
WHEN (series_metadata.locked_fields->>'description')::boolean IS TRUE THEN series_metadata.description WHEN (series.locked_fields->>'description')::boolean IS TRUE THEN series.description
ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series_metadata.description) ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series.description)
END, END,
publishers = CASE publishers = CASE
WHEN (series_metadata.locked_fields->>'publishers')::boolean IS TRUE THEN series_metadata.publishers WHEN (series.locked_fields->>'publishers')::boolean IS TRUE THEN series.publishers
WHEN array_length(EXCLUDED.publishers, 1) > 0 THEN EXCLUDED.publishers WHEN array_length(EXCLUDED.publishers, 1) > 0 THEN EXCLUDED.publishers
ELSE series_metadata.publishers ELSE series.publishers
END, END,
start_year = CASE start_year = CASE
WHEN (series_metadata.locked_fields->>'start_year')::boolean IS TRUE THEN series_metadata.start_year WHEN (series.locked_fields->>'start_year')::boolean IS TRUE THEN series.start_year
ELSE COALESCE(EXCLUDED.start_year, series_metadata.start_year) ELSE COALESCE(EXCLUDED.start_year, series.start_year)
END, END,
total_volumes = CASE total_volumes = CASE
WHEN (series_metadata.locked_fields->>'total_volumes')::boolean IS TRUE THEN series_metadata.total_volumes WHEN (series.locked_fields->>'total_volumes')::boolean IS TRUE THEN series.total_volumes
ELSE COALESCE(EXCLUDED.total_volumes, series_metadata.total_volumes) ELSE COALESCE(EXCLUDED.total_volumes, series.total_volumes)
END, END,
status = CASE status = CASE
WHEN (series_metadata.locked_fields->>'status')::boolean IS TRUE THEN series_metadata.status WHEN (series.locked_fields->>'status')::boolean IS TRUE THEN series.status
ELSE COALESCE(EXCLUDED.status, series_metadata.status) ELSE COALESCE(EXCLUDED.status, series.status)
END, END,
authors = CASE authors = CASE
WHEN (series_metadata.locked_fields->>'authors')::boolean IS TRUE THEN series_metadata.authors WHEN (series.locked_fields->>'authors')::boolean IS TRUE THEN series.authors
WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors
ELSE series_metadata.authors ELSE series.authors
END, END,
updated_at = NOW() updated_at = NOW()
"#, "#,
@@ -967,7 +968,7 @@ pub async fn rematch_unlinked_books(pool: &PgPool, library_id: Uuid) -> Result<i
FROM external_book_metadata ebm2 FROM external_book_metadata ebm2
JOIN external_metadata_links eml ON eml.id = ebm2.link_id JOIN external_metadata_links eml ON eml.id = ebm2.link_id
JOIN books b ON b.library_id = eml.library_id JOIN books b ON b.library_id = eml.library_id
AND LOWER(COALESCE(NULLIF(b.series, ''), 'unclassified')) = LOWER(eml.series_name) AND b.series_id = eml.series_id
AND b.volume = ebm2.volume_number AND b.volume = ebm2.volume_number
WHERE eml.library_id = $1 WHERE eml.library_id = $1
AND ebm2.book_id IS NULL AND ebm2.book_id IS NULL

View File

@@ -211,9 +211,9 @@ pub async fn mark_series_read(
} }
let series_filter = if body.series == "unclassified" { let series_filter = if body.series == "unclassified" {
"(series IS NULL OR series = '')" "series_id IS NULL"
} else { } else {
"series = $1" "series_id = $1"
}; };
let sql = if body.status == "unread" { let sql = if body.status == "unread" {
@@ -276,9 +276,10 @@ pub async fn mark_series_read(
.execute(&state.pool) .execute(&state.pool)
.await? .await?
} else { } else {
// $1 = series, $2 = user_id // $1 = series_id (UUID), $2 = user_id
let series_uuid: Uuid = body.series.parse().map_err(|_| ApiError::bad_request("invalid series id"))?;
sqlx::query(&sql) sqlx::query(&sql)
.bind(&body.series) .bind(series_uuid)
.bind(auth_user.user_id) .bind(auth_user.user_id)
.execute(&state.pool) .execute(&state.pool)
.await? .await?

View File

@@ -356,9 +356,10 @@ pub(crate) async fn process_reading_status_match(
let series_names: Vec<String> = sqlx::query_scalar( let series_names: Vec<String> = sqlx::query_scalar(
r#" r#"
SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified') SELECT DISTINCT COALESCE(s.name, 'unclassified')
FROM books FROM books b
WHERE library_id = $1 LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
ORDER BY 1 ORDER BY 1
"#, "#,
) )

View File

@@ -392,16 +392,18 @@ pub async fn process_reading_status_push(
SELECT 1 SELECT 1
FROM book_reading_progress brp FROM book_reading_progress brp
JOIN books b2 ON b2.id = brp.book_id JOIN books b2 ON b2.id = brp.book_id
LEFT JOIN series s2 ON s2.id = b2.series_id
WHERE b2.library_id = asl.library_id WHERE b2.library_id = asl.library_id
AND COALESCE(NULLIF(b2.series, ''), 'unclassified') = asl.series_name AND COALESCE(s2.name, 'unclassified') = asl.series_name
AND brp.user_id = $2 AND brp.user_id = $2
AND brp.updated_at > asl.synced_at AND brp.updated_at > asl.synced_at
) )
OR EXISTS ( OR EXISTS (
SELECT 1 SELECT 1
FROM books b2 FROM books b2
LEFT JOIN series s2 ON s2.id = b2.series_id
WHERE b2.library_id = asl.library_id WHERE b2.library_id = asl.library_id
AND COALESCE(NULLIF(b2.series, ''), 'unclassified') = asl.series_name AND COALESCE(s2.name, 'unclassified') = asl.series_name
AND b2.created_at > asl.synced_at AND b2.created_at > asl.synced_at
) )
) )
@@ -464,10 +466,11 @@ pub async fn process_reading_status_push(
COUNT(b.id) AS total_books, COUNT(b.id) AS total_books,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read
FROM books b FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp LEFT JOIN book_reading_progress brp
ON brp.book_id = b.id AND brp.user_id = $3 ON brp.book_id = b.id AND brp.user_id = $3
WHERE b.library_id = $1 WHERE b.library_id = $1
AND COALESCE(NULLIF(b.series, ''), 'unclassified') = $2 AND COALESCE(s.name, 'unclassified') = $2
"#, "#,
) )
.bind(library_id) .bind(library_id)

View File

@@ -77,17 +77,15 @@ pub async fn search_books(
let books_sql = r#" let books_sql = r#"
SELECT b.id, b.library_id, b.kind, b.title, SELECT b.id, b.library_id, b.kind, b.title,
COALESCE(b.authors, CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END) as authors, COALESCE(b.authors, CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END) as authors,
b.series, b.volume, b.language s.name AS series, b.volume, b.language
FROM books b FROM books b
LEFT JOIN series_metadata sm LEFT JOIN series s ON s.id = b.series_id
ON sm.library_id = b.library_id
AND sm.name = COALESCE(NULLIF(b.series, ''), 'unclassified')
WHERE ( WHERE (
b.title ILIKE $1 b.title ILIKE $1
OR b.series ILIKE $1 OR s.name ILIKE $1
OR EXISTS (SELECT 1 FROM unnest( OR EXISTS (SELECT 1 FROM unnest(
COALESCE(b.authors, CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END) COALESCE(b.authors, CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END)
|| COALESCE(sm.authors, ARRAY[]::text[]) || COALESCE(s.authors, ARRAY[]::text[])
) AS a WHERE a ILIKE $1) ) AS a WHERE a ILIKE $1)
) )
AND ($2::uuid IS NULL OR b.library_id = $2) AND ($2::uuid IS NULL OR b.library_id = $2)
@@ -101,18 +99,19 @@ pub async fn search_books(
let series_sql = r#" let series_sql = r#"
WITH sorted_books AS ( WITH sorted_books AS (
SELECT SELECT
library_id, b.library_id,
COALESCE(NULLIF(series, ''), 'unclassified') as name, COALESCE(s.name, 'unclassified') as name,
id, b.id,
ROW_NUMBER() OVER ( ROW_NUMBER() OVER (
PARTITION BY library_id, COALESCE(NULLIF(series, ''), 'unclassified') PARTITION BY b.library_id, COALESCE(s.name, 'unclassified')
ORDER BY ORDER BY
REGEXP_REPLACE(LOWER(title), '[0-9]+', '', 'g'), REGEXP_REPLACE(LOWER(b.title), '[0-9]+', '', 'g'),
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0), COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
title ASC b.title ASC
) as rn ) as rn
FROM books FROM books b
WHERE ($2::uuid IS NULL OR library_id = $2) LEFT JOIN series s ON s.id = b.series_id
WHERE ($2::uuid IS NULL OR b.library_id = $2)
), ),
series_counts AS ( series_counts AS (
SELECT SELECT

View File

@@ -7,9 +7,73 @@ use utoipa::ToSchema;
use crate::{auth::AuthUser, books::BookItem, error::ApiError, state::AppState}; use crate::{auth::AuthUser, books::BookItem, error::ApiError, state::AppState};
// ─── Helper functions ────────────────────────────────────────────────────────
/// Resolve a series UUID from library_id + name. Returns NotFound if no such series exists.
pub(crate) async fn resolve_series_id(
pool: &sqlx::PgPool,
library_id: Uuid,
name: &str,
) -> Result<Uuid, ApiError> {
sqlx::query_scalar::<_, Uuid>(
"SELECT id FROM series WHERE library_id = $1 AND LOWER(name) = LOWER($2)"
)
.bind(library_id)
.bind(name)
.fetch_optional(pool)
.await?
.ok_or_else(|| ApiError::not_found(format!("series '{}' not found", name)))
}
/// Get or create a series row, returning its UUID.
pub(crate) async fn get_or_create_series(
pool: &sqlx::PgPool,
library_id: Uuid,
name: &str,
) -> Result<Uuid, ApiError> {
// Try to find existing first
if let Some(id) = sqlx::query_scalar::<_, Uuid>(
"SELECT id FROM series WHERE library_id = $1 AND LOWER(name) = LOWER($2)"
)
.bind(library_id)
.bind(name)
.fetch_optional(pool)
.await?
{
return Ok(id);
}
// Create new
let id = Uuid::new_v4();
sqlx::query(
"INSERT INTO series (id, library_id, name) VALUES ($1, $2, $3) \
ON CONFLICT (library_id, name) DO UPDATE SET name = EXCLUDED.name \
RETURNING id"
)
.bind(id)
.bind(library_id)
.bind(name)
.execute(pool)
.await?;
// Re-fetch in case of conflict (ON CONFLICT won't return the existing id via execute)
sqlx::query_scalar::<_, Uuid>(
"SELECT id FROM series WHERE library_id = $1 AND LOWER(name) = LOWER($2)"
)
.bind(library_id)
.bind(name)
.fetch_one(pool)
.await
.map_err(Into::into)
}
// ─── Structs ─────────────────────────────────────────────────────────────────
#[derive(Serialize, ToSchema)] #[derive(Serialize, ToSchema)]
pub struct SeriesItem { pub struct SeriesItem {
pub name: String, pub name: String,
#[schema(value_type = String)]
pub series_id: Uuid,
pub book_count: i64, pub book_count: i64,
pub books_read_count: i64, pub books_read_count: i64,
#[schema(value_type = String)] #[schema(value_type = String)]
@@ -98,7 +162,7 @@ pub async fn list_series(
let mut p: usize = 1; let mut p: usize = 1;
let q_cond = if query.q.is_some() { let q_cond = if query.q.is_some() {
p += 1; format!("AND sc.name ILIKE ${p}") p += 1; format!("AND s.name ILIKE ${p}")
} else { String::new() }; } else { String::new() };
let count_rs_cond = if reading_statuses.is_some() { let count_rs_cond = if reading_statuses.is_some() {
@@ -106,7 +170,7 @@ pub async fn list_series(
} else { String::new() }; } else { String::new() };
let ss_cond = if query.series_status.is_some() { let ss_cond = if query.series_status.is_some() {
p += 1; format!("AND LOWER(sm.status) = ${p}") p += 1; format!("AND LOWER(s.status) = ${p}")
} else { String::new() }; } else { String::new() };
let missing_cond = if has_missing { let missing_cond = if has_missing {
@@ -126,45 +190,43 @@ pub async fn list_series(
let missing_cte = r#" let missing_cte = r#"
missing_counts AS ( missing_counts AS (
SELECT eml.series_name, SELECT eml.series_id,
COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count
FROM external_metadata_links eml FROM external_metadata_links eml
JOIN external_book_metadata ebm ON ebm.link_id = eml.id JOIN external_book_metadata ebm ON ebm.link_id = eml.id
WHERE eml.library_id = $1 AND eml.status = 'approved' WHERE eml.library_id = $1 AND eml.status = 'approved'
GROUP BY eml.series_name GROUP BY eml.series_id
) )
"#.to_string(); "#.to_string();
let metadata_links_cte = r#" let metadata_links_cte = r#"
metadata_links AS ( metadata_links AS (
SELECT DISTINCT ON (eml.series_name, eml.library_id) SELECT DISTINCT ON (eml.series_id, eml.library_id)
eml.series_name, eml.library_id, eml.provider eml.series_id, eml.library_id, eml.provider
FROM external_metadata_links eml FROM external_metadata_links eml
WHERE eml.status = 'approved' WHERE eml.status = 'approved'
ORDER BY eml.series_name, eml.library_id, eml.created_at DESC ORDER BY eml.series_id, eml.library_id, eml.created_at DESC
) )
"#; "#;
let count_sql = format!( let count_sql = format!(
r#" r#"
WITH sorted_books AS ( WITH series_counts AS (
SELECT COALESCE(NULLIF(series, ''), 'unclassified') as name, id SELECT s.id as series_id, s.name,
FROM books WHERE library_id = $1 COUNT(b.id) as book_count,
),
series_counts AS (
SELECT sb.name,
COUNT(*) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count
FROM sorted_books sb FROM series s
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p} LEFT JOIN books b ON b.series_id = s.id
GROUP BY sb.name LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
WHERE s.library_id = $1
GROUP BY s.id, s.name
), ),
{missing_cte}, {missing_cte},
{metadata_links_cte} {metadata_links_cte}
SELECT COUNT(*) FROM series_counts sc SELECT COUNT(*) FROM series_counts sc
LEFT JOIN series_metadata sm ON sm.library_id = $1 AND sm.name = sc.name LEFT JOIN series s ON s.id = sc.series_id
LEFT JOIN missing_counts mc ON mc.series_name = sc.name LEFT JOIN missing_counts mc ON mc.series_id = sc.series_id
LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = $1 LEFT JOIN metadata_links ml ON ml.series_id = sc.series_id AND ml.library_id = $1
WHERE TRUE {q_cond} {count_rs_cond} {ss_cond} {missing_cond} {metadata_provider_cond} WHERE TRUE {q_cond} {count_rs_cond} {ss_cond} {missing_cond} {metadata_provider_cond}
"# "#
); );
@@ -173,46 +235,50 @@ pub async fn list_series(
r#" r#"
WITH sorted_books AS ( WITH sorted_books AS (
SELECT SELECT
COALESCE(NULLIF(series, ''), 'unclassified') as name, b.series_id,
id, b.id,
ROW_NUMBER() OVER ( ROW_NUMBER() OVER (
PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified') PARTITION BY b.series_id
ORDER BY ORDER BY
volume NULLS LAST, b.volume NULLS LAST,
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''), REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0), COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
title ASC b.title ASC
) as rn ) as rn
FROM books FROM books b
WHERE library_id = $1 WHERE b.library_id = $1
), ),
series_counts AS ( series_counts AS (
SELECT SELECT
sb.name, s.id as series_id,
COUNT(*) as book_count, s.name,
COUNT(b.id) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count
FROM sorted_books sb FROM series s
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p} LEFT JOIN books b ON b.series_id = s.id
GROUP BY sb.name LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
WHERE s.library_id = $1
GROUP BY s.id, s.name
), ),
{missing_cte}, {missing_cte},
{metadata_links_cte} {metadata_links_cte}
SELECT SELECT
sc.name, sc.name,
sc.series_id,
sc.book_count, sc.book_count,
sc.books_read_count, sc.books_read_count,
sb.id as first_book_id, sb.id as first_book_id,
sm.status as series_status, s.status as series_status,
mc.missing_count, mc.missing_count,
ml.provider as metadata_provider, ml.provider as metadata_provider,
asl.anilist_id, asl.anilist_id,
asl.anilist_url asl.anilist_url
FROM series_counts sc FROM series_counts sc
JOIN sorted_books sb ON sb.name = sc.name AND sb.rn = 1 JOIN sorted_books sb ON sb.series_id = sc.series_id AND sb.rn = 1
LEFT JOIN series_metadata sm ON sm.library_id = $1 AND sm.name = sc.name LEFT JOIN series s ON s.id = sc.series_id
LEFT JOIN missing_counts mc ON mc.series_name = sc.name LEFT JOIN missing_counts mc ON mc.series_id = sc.series_id
LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = $1 LEFT JOIN metadata_links ml ON ml.series_id = sc.series_id AND ml.library_id = $1
LEFT JOIN anilist_series_links asl ON asl.library_id = $1 AND asl.series_name = sc.name AND asl.provider = 'anilist' LEFT JOIN anilist_series_links asl ON asl.series_id = sc.series_id AND asl.provider = 'anilist'
WHERE TRUE WHERE TRUE
{q_cond} {q_cond}
{count_rs_cond} {count_rs_cond}
@@ -267,6 +333,7 @@ pub async fn list_series(
.iter() .iter()
.map(|row| SeriesItem { .map(|row| SeriesItem {
name: row.get("name"), name: row.get("name"),
series_id: row.get("series_id"),
book_count: row.get("book_count"), book_count: row.get("book_count"),
books_read_count: row.get("books_read_count"), books_read_count: row.get("books_read_count"),
first_book_id: row.get("first_book_id"), first_book_id: row.get("first_book_id"),
@@ -304,7 +371,7 @@ pub struct ListAllSeriesQuery {
/// Filter by metadata provider: a provider name (e.g. "google_books"), "linked" (any provider), or "unlinked" (no provider) /// Filter by metadata provider: a provider name (e.g. "google_books"), "linked" (any provider), or "unlinked" (no provider)
#[schema(value_type = Option<String>, example = "google_books")] #[schema(value_type = Option<String>, example = "google_books")]
pub metadata_provider: Option<String>, pub metadata_provider: Option<String>,
/// Filter by author name (matches in series_metadata.authors or book-level authors) /// Filter by author name (matches in series.authors or book-level authors)
#[schema(value_type = Option<String>, example = "Toriyama")] #[schema(value_type = Option<String>, example = "Toriyama")]
pub author: Option<String>, pub author: Option<String>,
#[schema(value_type = Option<i64>, example = 1)] #[schema(value_type = Option<i64>, example = 1)]
@@ -326,7 +393,7 @@ pub struct ListAllSeriesQuery {
("library_id" = Option<String>, Query, description = "Filter by library ID"), ("library_id" = Option<String>, Query, description = "Filter by library ID"),
("reading_status" = Option<String>, Query, description = "Filter by reading status, comma-separated (e.g. 'unread,reading')"), ("reading_status" = Option<String>, Query, description = "Filter by reading status, comma-separated (e.g. 'unread,reading')"),
("metadata_provider" = Option<String>, Query, description = "Filter by metadata provider: a provider name (e.g. 'google_books'), 'linked' (any provider), or 'unlinked' (no provider)"), ("metadata_provider" = Option<String>, Query, description = "Filter by metadata provider: a provider name (e.g. 'google_books'), 'linked' (any provider), or 'unlinked' (no provider)"),
("author" = Option<String>, Query, description = "Filter by author name (matches in series_metadata.authors or book-level authors)"), ("author" = Option<String>, Query, description = "Filter by author name (matches in series.authors or book-level authors)"),
("page" = Option<i64>, Query, description = "Page number (1-indexed, default 1)"), ("page" = Option<i64>, Query, description = "Page number (1-indexed, default 1)"),
("limit" = Option<i64>, Query, description = "Items per page (max 200, default 50)"), ("limit" = Option<i64>, Query, description = "Items per page (max 200, default 50)"),
("sort" = Option<String>, Query, description = "Sort order: 'title' (default) or 'latest' (most recently added first)"), ("sort" = Option<String>, Query, description = "Sort order: 'title' (default) or 'latest' (most recently added first)"),
@@ -362,13 +429,13 @@ pub async fn list_all_series(
let mut p: usize = 0; let mut p: usize = 0;
let lib_cond = if query.library_id.is_some() { let lib_cond = if query.library_id.is_some() {
p += 1; format!("WHERE library_id = ${p}") p += 1; format!("WHERE s.library_id = ${p}")
} else { } else {
"WHERE TRUE".to_string() "WHERE TRUE".to_string()
}; };
let q_cond = if query.q.is_some() { let q_cond = if query.q.is_some() {
p += 1; format!("AND sc.name ILIKE ${p}") p += 1; format!("AND s.name ILIKE ${p}")
} else { String::new() }; } else { String::new() };
let rs_cond = if reading_statuses.is_some() { let rs_cond = if reading_statuses.is_some() {
@@ -376,7 +443,7 @@ pub async fn list_all_series(
} else { String::new() }; } else { String::new() };
let ss_cond = if query.series_status.is_some() { let ss_cond = if query.series_status.is_some() {
p += 1; format!("AND LOWER(sm.status) = ${p}") p += 1; format!("AND LOWER(s.status) = ${p}")
} else { String::new() }; } else { String::new() };
let missing_cond = if has_missing { let missing_cond = if has_missing {
@@ -391,41 +458,41 @@ pub async fn list_all_series(
}; };
let author_cond = if query.author.is_some() { let author_cond = if query.author.is_some() {
p += 1; format!("AND (${p} = ANY(sm.authors) OR EXISTS (SELECT 1 FROM books bk WHERE bk.series = sc.name AND bk.library_id = sc.library_id AND ${p} = ANY(COALESCE(NULLIF(bk.authors, '{{}}'), CASE WHEN bk.author IS NOT NULL AND bk.author != '' THEN ARRAY[bk.author] ELSE ARRAY[]::text[] END))))") p += 1; format!("AND (${p} = ANY(s.authors) OR EXISTS (SELECT 1 FROM books bk WHERE bk.series_id = s.id AND ${p} = ANY(COALESCE(NULLIF(bk.authors, '{{}}'), CASE WHEN bk.author IS NOT NULL AND bk.author != '' THEN ARRAY[bk.author] ELSE ARRAY[]::text[] END))))")
} else { String::new() }; } else { String::new() };
// Missing counts CTE — needs library_id filter when filtering by library // Missing counts CTE — needs library_id filter when filtering by library
let missing_cte = if query.library_id.is_some() { let missing_cte = if query.library_id.is_some() {
r#" r#"
missing_counts AS ( missing_counts AS (
SELECT eml.series_name, eml.library_id, SELECT eml.series_id,
COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count
FROM external_metadata_links eml FROM external_metadata_links eml
JOIN external_book_metadata ebm ON ebm.link_id = eml.id JOIN external_book_metadata ebm ON ebm.link_id = eml.id
WHERE eml.library_id = $1 AND eml.status = 'approved' WHERE eml.library_id = $1 AND eml.status = 'approved'
GROUP BY eml.series_name, eml.library_id GROUP BY eml.series_id
) )
"#.to_string() "#.to_string()
} else { } else {
r#" r#"
missing_counts AS ( missing_counts AS (
SELECT eml.series_name, eml.library_id, SELECT eml.series_id,
COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count
FROM external_metadata_links eml FROM external_metadata_links eml
JOIN external_book_metadata ebm ON ebm.link_id = eml.id JOIN external_book_metadata ebm ON ebm.link_id = eml.id
WHERE eml.status = 'approved' WHERE eml.status = 'approved'
GROUP BY eml.series_name, eml.library_id GROUP BY eml.series_id
) )
"#.to_string() "#.to_string()
}; };
let metadata_links_cte = r#" let metadata_links_cte = r#"
metadata_links AS ( metadata_links AS (
SELECT DISTINCT ON (eml.series_name, eml.library_id) SELECT DISTINCT ON (eml.series_id, eml.library_id)
eml.series_name, eml.library_id, eml.provider eml.series_id, eml.library_id, eml.provider
FROM external_metadata_links eml FROM external_metadata_links eml
WHERE eml.status = 'approved' WHERE eml.status = 'approved'
ORDER BY eml.series_name, eml.library_id, eml.created_at DESC ORDER BY eml.series_id, eml.library_id, eml.created_at DESC
) )
"#; "#;
@@ -435,24 +502,22 @@ pub async fn list_all_series(
let count_sql = format!( let count_sql = format!(
r#" r#"
WITH sorted_books AS ( WITH series_counts AS (
SELECT COALESCE(NULLIF(series, ''), 'unclassified') as name, id, library_id SELECT s.id as series_id, s.name, s.library_id,
FROM books {lib_cond} COUNT(b.id) as book_count,
),
series_counts AS (
SELECT sb.name, sb.library_id,
COUNT(*) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count
FROM sorted_books sb FROM series s
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p} LEFT JOIN books b ON b.series_id = s.id
GROUP BY sb.name, sb.library_id LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
{lib_cond}
GROUP BY s.id, s.name, s.library_id
), ),
{missing_cte}, {missing_cte},
{metadata_links_cte} {metadata_links_cte}
SELECT COUNT(*) FROM series_counts sc SELECT COUNT(*) FROM series_counts sc
LEFT JOIN series_metadata sm ON sm.library_id = sc.library_id AND sm.name = sc.name LEFT JOIN series s ON s.id = sc.series_id
LEFT JOIN missing_counts mc ON mc.series_name = sc.name AND mc.library_id = sc.library_id LEFT JOIN missing_counts mc ON mc.series_id = sc.series_id
LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = sc.library_id LEFT JOIN metadata_links ml ON ml.series_id = sc.series_id AND ml.library_id = sc.library_id
WHERE TRUE {q_cond} {rs_cond} {ss_cond} {missing_cond} {metadata_provider_cond} {author_cond} WHERE TRUE {q_cond} {rs_cond} {ss_cond} {missing_cond} {metadata_provider_cond} {author_cond}
"# "#
); );
@@ -467,51 +532,56 @@ pub async fn list_all_series(
r#" r#"
WITH sorted_books AS ( WITH sorted_books AS (
SELECT SELECT
COALESCE(NULLIF(series, ''), 'unclassified') as name, b.series_id,
id, b.id,
library_id, b.library_id,
created_at, b.created_at,
ROW_NUMBER() OVER ( ROW_NUMBER() OVER (
PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified') PARTITION BY b.series_id
ORDER BY ORDER BY
volume NULLS LAST, b.volume NULLS LAST,
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''), REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0), COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
title ASC b.title ASC
) as rn ) as rn
FROM books FROM books b
JOIN series s ON s.id = b.series_id
{lib_cond} {lib_cond}
), ),
series_counts AS ( series_counts AS (
SELECT SELECT
sb.name, s.id as series_id,
sb.library_id, s.name,
COUNT(*) as book_count, s.library_id,
COUNT(b.id) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count, COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count,
MAX(sb.created_at) as latest_created_at MAX(b.created_at) as latest_created_at
FROM sorted_books sb FROM series s
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p} LEFT JOIN books b ON b.series_id = s.id
GROUP BY sb.name, sb.library_id LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
{lib_cond}
GROUP BY s.id, s.name, s.library_id
), ),
{missing_cte}, {missing_cte},
{metadata_links_cte} {metadata_links_cte}
SELECT SELECT
sc.name, sc.name,
sc.series_id,
sc.book_count, sc.book_count,
sc.books_read_count, sc.books_read_count,
sb.id as first_book_id, sb.id as first_book_id,
sb.library_id, sc.library_id,
sm.status as series_status, s.status as series_status,
mc.missing_count, mc.missing_count,
ml.provider as metadata_provider, ml.provider as metadata_provider,
asl.anilist_id, asl.anilist_id,
asl.anilist_url asl.anilist_url
FROM series_counts sc FROM series_counts sc
JOIN sorted_books sb ON sb.name = sc.name AND sb.rn = 1 JOIN sorted_books sb ON sb.series_id = sc.series_id AND sb.rn = 1
LEFT JOIN series_metadata sm ON sm.library_id = sc.library_id AND sm.name = sc.name LEFT JOIN series s ON s.id = sc.series_id
LEFT JOIN missing_counts mc ON mc.series_name = sc.name AND mc.library_id = sc.library_id LEFT JOIN missing_counts mc ON mc.series_id = sc.series_id
LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = sc.library_id LEFT JOIN metadata_links ml ON ml.series_id = sc.series_id AND ml.library_id = sc.library_id
LEFT JOIN anilist_series_links asl ON asl.library_id = sc.library_id AND asl.series_name = sc.name AND asl.provider = 'anilist' LEFT JOIN anilist_series_links asl ON asl.series_id = sc.series_id AND asl.provider = 'anilist'
WHERE TRUE WHERE TRUE
{q_cond} {q_cond}
{rs_cond} {rs_cond}
@@ -569,6 +639,7 @@ pub async fn list_all_series(
.iter() .iter()
.map(|row| SeriesItem { .map(|row| SeriesItem {
name: row.get("name"), name: row.get("name"),
series_id: row.get("series_id"),
book_count: row.get("book_count"), book_count: row.get("book_count"),
books_read_count: row.get("books_read_count"), books_read_count: row.get("books_read_count"),
first_book_id: row.get("first_book_id"), first_book_id: row.get("first_book_id"),
@@ -605,7 +676,7 @@ pub async fn series_statuses(
) -> Result<Json<Vec<String>>, ApiError> { ) -> Result<Json<Vec<String>>, ApiError> {
let rows: Vec<String> = sqlx::query_scalar( let rows: Vec<String> = sqlx::query_scalar(
r#"SELECT DISTINCT s FROM ( r#"SELECT DISTINCT s FROM (
SELECT LOWER(status) AS s FROM series_metadata WHERE status IS NOT NULL SELECT LOWER(status) AS s FROM series WHERE status IS NOT NULL
UNION UNION
SELECT mapped_status AS s FROM status_mappings WHERE mapped_status IS NOT NULL SELECT mapped_status AS s FROM status_mappings WHERE mapped_status IS NOT NULL
) t ORDER BY s"#, ) t ORDER BY s"#,
@@ -673,13 +744,16 @@ pub async fn ongoing_series(
r#" r#"
WITH series_stats AS ( WITH series_stats AS (
SELECT SELECT
COALESCE(NULLIF(b.series, ''), 'unclassified') AS name, s.id AS series_id,
s.name,
s.library_id,
COUNT(*) AS book_count, COUNT(*) AS book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read_count, COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read_count,
MAX(brp.last_read_at) AS last_read_at MAX(brp.last_read_at) AS last_read_at
FROM books b FROM series s
JOIN books b ON b.series_id = s.id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2 LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2
GROUP BY COALESCE(NULLIF(b.series, ''), 'unclassified') GROUP BY s.id, s.name, s.library_id
HAVING ( HAVING (
COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0 COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0
AND COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') < COUNT(*) AND COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') < COUNT(*)
@@ -687,22 +761,22 @@ pub async fn ongoing_series(
), ),
first_books AS ( first_books AS (
SELECT SELECT
COALESCE(NULLIF(series, ''), 'unclassified') AS name, b.series_id,
id, b.id,
library_id, b.library_id,
ROW_NUMBER() OVER ( ROW_NUMBER() OVER (
PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified') PARTITION BY b.series_id
ORDER BY ORDER BY
volume NULLS LAST, b.volume NULLS LAST,
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''), REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0), COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
title ASC b.title ASC
) AS rn ) AS rn
FROM books FROM books b
) )
SELECT ss.name, ss.book_count, ss.books_read_count, fb.id AS first_book_id, fb.library_id SELECT ss.name, ss.series_id, ss.book_count, ss.books_read_count, fb.id AS first_book_id, fb.library_id
FROM series_stats ss FROM series_stats ss
JOIN first_books fb ON fb.name = ss.name AND fb.rn = 1 JOIN first_books fb ON fb.series_id = ss.series_id AND fb.rn = 1
ORDER BY ss.last_read_at DESC NULLS LAST ORDER BY ss.last_read_at DESC NULLS LAST
LIMIT $1 LIMIT $1
"#, "#,
@@ -716,6 +790,7 @@ pub async fn ongoing_series(
.iter() .iter()
.map(|row| SeriesItem { .map(|row| SeriesItem {
name: row.get("name"), name: row.get("name"),
series_id: row.get("series_id"),
book_count: row.get("book_count"), book_count: row.get("book_count"),
books_read_count: row.get("books_read_count"), books_read_count: row.get("books_read_count"),
first_book_id: row.get("first_book_id"), first_book_id: row.get("first_book_id"),
@@ -757,11 +832,12 @@ pub async fn ongoing_books(
r#" r#"
WITH ongoing_series AS ( WITH ongoing_series AS (
SELECT SELECT
COALESCE(NULLIF(b.series, ''), 'unclassified') AS name, s.id AS series_id,
MAX(brp.last_read_at) AS series_last_read_at MAX(brp.last_read_at) AS series_last_read_at
FROM books b FROM series s
JOIN books b ON b.series_id = s.id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2 LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2
GROUP BY COALESCE(NULLIF(b.series, ''), 'unclassified') GROUP BY s.id
HAVING ( HAVING (
COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0 COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0
AND COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') < COUNT(*) AND COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') < COUNT(*)
@@ -769,18 +845,19 @@ pub async fn ongoing_books(
), ),
next_books AS ( next_books AS (
SELECT SELECT
b.id, b.library_id, b.kind, b.format, b.title, b.author, b.authors, b.series, b.volume, b.id, b.library_id, b.kind, b.format, b.title, b.author, b.authors, s.name AS series, b.volume,
b.language, b.page_count, b.thumbnail_path, b.updated_at, b.language, b.page_count, b.thumbnail_path, b.updated_at,
COALESCE(brp.status, 'unread') AS reading_status, COALESCE(brp.status, 'unread') AS reading_status,
brp.current_page AS reading_current_page, brp.current_page AS reading_current_page,
brp.last_read_at AS reading_last_read_at, brp.last_read_at AS reading_last_read_at,
os.series_last_read_at, os.series_last_read_at,
ROW_NUMBER() OVER ( ROW_NUMBER() OVER (
PARTITION BY COALESCE(NULLIF(b.series, ''), 'unclassified') PARTITION BY b.series_id
ORDER BY b.volume NULLS LAST, b.title ORDER BY b.volume NULLS LAST, b.title
) AS rn ) AS rn
FROM books b FROM books b
JOIN ongoing_series os ON COALESCE(NULLIF(b.series, ''), 'unclassified') = os.name JOIN ongoing_series os ON b.series_id = os.series_id
JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2 LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2
WHERE COALESCE(brp.status, 'unread') != 'read' WHERE COALESCE(brp.status, 'unread') != 'read'
) )
@@ -847,54 +924,51 @@ pub struct SeriesMetadata {
/// Get metadata for a specific series /// Get metadata for a specific series
#[utoipa::path( #[utoipa::path(
get, get,
path = "/libraries/{library_id}/series/{name}/metadata", path = "/libraries/{library_id}/series/{series_id}/metadata",
tag = "series", tag = "series",
params( params(
("library_id" = String, Path, description = "Library UUID"), ("library_id" = String, Path, description = "Library UUID"),
("name" = String, Path, description = "Series name"), ("series_id" = String, Path, description = "Series UUID"),
), ),
responses( responses(
(status = 200, body = SeriesMetadata), (status = 200, body = SeriesMetadata),
(status = 401, description = "Unauthorized"), (status = 401, description = "Unauthorized"),
(status = 404, description = "Series not found"),
), ),
security(("Bearer" = [])) security(("Bearer" = []))
)] )]
pub async fn get_series_metadata( pub async fn get_series_metadata(
State(state): State<AppState>, State(state): State<AppState>,
Path((library_id, name)): Path<(Uuid, String)>, Path((library_id, series_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<SeriesMetadata>, ApiError> { ) -> Result<Json<SeriesMetadata>, ApiError> {
// author/language from first book of series // Fetch series row (contains metadata directly)
let books_row = if name == "unclassified" { let series_row = sqlx::query(
sqlx::query("SELECT author, language FROM books WHERE library_id = $1 AND (series IS NULL OR series = '') LIMIT 1") "SELECT authors, description, publishers, start_year, total_volumes, status, locked_fields, book_author, book_language \
.bind(library_id) FROM series WHERE id = $1 AND library_id = $2"
.fetch_optional(&state.pool)
.await?
} else {
sqlx::query("SELECT author, language FROM books WHERE library_id = $1 AND series = $2 LIMIT 1")
.bind(library_id)
.bind(&name)
.fetch_optional(&state.pool)
.await?
};
let meta_row = sqlx::query(
"SELECT authors, description, publishers, start_year, total_volumes, status, locked_fields FROM series_metadata WHERE library_id = $1 AND name = $2"
) )
.bind(series_id)
.bind(library_id) .bind(library_id)
.bind(&name) .fetch_optional(&state.pool)
.await?;
// Fallback: get book_author/book_language from first book if not on series row
let books_row = sqlx::query("SELECT author, language FROM books WHERE series_id = $1 LIMIT 1")
.bind(series_id)
.fetch_optional(&state.pool) .fetch_optional(&state.pool)
.await?; .await?;
Ok(Json(SeriesMetadata { Ok(Json(SeriesMetadata {
authors: meta_row.as_ref().map(|r| r.get::<Vec<String>, _>("authors")).unwrap_or_default(), authors: series_row.as_ref().map(|r| r.get::<Vec<String>, _>("authors")).unwrap_or_default(),
description: meta_row.as_ref().and_then(|r| r.get("description")), description: series_row.as_ref().and_then(|r| r.get("description")),
publishers: meta_row.as_ref().map(|r| r.get::<Vec<String>, _>("publishers")).unwrap_or_default(), publishers: series_row.as_ref().map(|r| r.get::<Vec<String>, _>("publishers")).unwrap_or_default(),
start_year: meta_row.as_ref().and_then(|r| r.get("start_year")), start_year: series_row.as_ref().and_then(|r| r.get("start_year")),
total_volumes: meta_row.as_ref().and_then(|r| r.get("total_volumes")), total_volumes: series_row.as_ref().and_then(|r| r.get("total_volumes")),
status: meta_row.as_ref().and_then(|r| r.get("status")), status: series_row.as_ref().and_then(|r| r.get("status")),
book_author: books_row.as_ref().and_then(|r| r.get("author")), book_author: series_row.as_ref().and_then(|r| r.get::<Option<String>, _>("book_author"))
book_language: books_row.as_ref().and_then(|r| r.get("language")), .or_else(|| books_row.as_ref().and_then(|r| r.get("author"))),
locked_fields: meta_row.as_ref().map(|r| r.get::<serde_json::Value, _>("locked_fields")).unwrap_or(serde_json::json!({})), book_language: series_row.as_ref().and_then(|r| r.get::<Option<String>, _>("book_language"))
.or_else(|| books_row.as_ref().and_then(|r| r.get("language"))),
locked_fields: series_row.as_ref().map(|r| r.get::<serde_json::Value, _>("locked_fields")).unwrap_or(serde_json::json!({})),
})) }))
} }
@@ -903,7 +977,7 @@ pub async fn get_series_metadata(
#[derive(Deserialize, ToSchema)] #[derive(Deserialize, ToSchema)]
pub struct UpdateSeriesRequest { pub struct UpdateSeriesRequest {
pub new_name: String, pub new_name: String,
/// Series-level authors list (stored in series_metadata) /// Series-level authors list (stored in series)
#[serde(default)] #[serde(default)]
pub authors: Vec<String>, pub authors: Vec<String>,
/// Per-book author propagation: absent = keep books unchanged, present = overwrite all books /// Per-book author propagation: absent = keep books unchanged, present = overwrite all books
@@ -932,11 +1006,11 @@ pub struct UpdateSeriesResponse {
/// Update metadata for all books in a series /// Update metadata for all books in a series
#[utoipa::path( #[utoipa::path(
patch, patch,
path = "/libraries/{library_id}/series/{name}", path = "/libraries/{library_id}/series/{series_id}",
tag = "series", tag = "series",
params( params(
("library_id" = String, Path, description = "Library UUID"), ("library_id" = String, Path, description = "Library UUID"),
("name" = String, Path, description = "Series name (use 'unclassified' for books without series)"), ("series_id" = String, Path, description = "Series UUID"),
), ),
request_body = UpdateSeriesRequest, request_body = UpdateSeriesRequest,
responses( responses(
@@ -944,18 +1018,29 @@ pub struct UpdateSeriesResponse {
(status = 400, description = "Invalid request"), (status = 400, description = "Invalid request"),
(status = 401, description = "Unauthorized"), (status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden - Admin scope required"), (status = 403, description = "Forbidden - Admin scope required"),
(status = 404, description = "Series not found"),
), ),
security(("Bearer" = [])) security(("Bearer" = []))
)] )]
pub async fn update_series( pub async fn update_series(
State(state): State<AppState>, State(state): State<AppState>,
Path((library_id, name)): Path<(Uuid, String)>, Path((library_id, series_id)): Path<(Uuid, Uuid)>,
Json(body): Json<UpdateSeriesRequest>, Json(body): Json<UpdateSeriesRequest>,
) -> Result<Json<UpdateSeriesResponse>, ApiError> { ) -> Result<Json<UpdateSeriesResponse>, ApiError> {
let new_name = body.new_name.trim().to_string(); let new_name = body.new_name.trim().to_string();
if new_name.is_empty() { if new_name.is_empty() {
return Err(ApiError::bad_request("series name cannot be empty")); return Err(ApiError::bad_request("series name cannot be empty"));
} }
// Verify the series exists
let old_row = sqlx::query("SELECT name, original_name FROM series WHERE id = $1 AND library_id = $2")
.bind(series_id)
.bind(library_id)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found("series not found"))?;
let old_name: String = old_row.get("name");
// author/language: None = absent (keep books unchanged), Some(v) = apply to all books // author/language: None = absent (keep books unchanged), Some(v) = apply to all books
let apply_author = body.author.is_some(); let apply_author = body.author.is_some();
let author_value = body.author.flatten().as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string); let author_value = body.author.flatten().as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
@@ -966,92 +1051,58 @@ pub async fn update_series(
.map(|p| p.trim().to_string()) .map(|p| p.trim().to_string())
.filter(|p| !p.is_empty()) .filter(|p| !p.is_empty())
.collect(); .collect();
let new_series_value: Option<String> = if new_name == "unclassified" { None } else { Some(new_name.clone()) };
// 1. Update books: always update series name; author/language only if opted-in
// $1=library_id, $2=new_series_value, $3=apply_author, $4=author_value,
// $5=apply_language, $6=language_value, [$7=old_name]
let result = if name == "unclassified" {
sqlx::query(
"UPDATE books \
SET series = $2, \
author = CASE WHEN $3 THEN $4 ELSE author END, \
language = CASE WHEN $5 THEN $6 ELSE language END, \
updated_at = NOW() \
WHERE library_id = $1 AND (series IS NULL OR series = '')"
)
.bind(library_id)
.bind(&new_series_value)
.bind(apply_author)
.bind(&author_value)
.bind(apply_language)
.bind(&language_value)
.execute(&state.pool)
.await?
} else {
sqlx::query(
"UPDATE books \
SET series = $2, \
author = CASE WHEN $3 THEN $4 ELSE author END, \
language = CASE WHEN $5 THEN $6 ELSE language END, \
updated_at = NOW() \
WHERE library_id = $1 AND series = $7"
)
.bind(library_id)
.bind(&new_series_value)
.bind(apply_author)
.bind(&author_value)
.bind(apply_language)
.bind(&language_value)
.bind(&name)
.execute(&state.pool)
.await?
};
// 2. Upsert series_metadata (keyed by new_name)
let meta_name = new_series_value.as_deref().unwrap_or("unclassified");
let authors: Vec<String> = body.authors.iter() let authors: Vec<String> = body.authors.iter()
.map(|a| a.trim().to_string()) .map(|a| a.trim().to_string())
.filter(|a| !a.is_empty()) .filter(|a| !a.is_empty())
.collect(); .collect();
let locked_fields = body.locked_fields.clone().unwrap_or(serde_json::json!({})); let locked_fields = body.locked_fields.clone().unwrap_or(serde_json::json!({}));
// When renaming, preserve the filesystem-derived original name so the scanner // 1. Update books: author/language only if opted-in
// can map files back to the renamed series instead of recreating the old one. let result = sqlx::query(
let is_rename = name != "unclassified" && new_name != name; "UPDATE books \
let original_name: Option<String> = if is_rename { SET author = CASE WHEN $2 THEN $3 ELSE author END, \
// Check if the old metadata already has an original_name (chained renames: A→B→C) language = CASE WHEN $4 THEN $5 ELSE language END, \
let existing_original: Option<Option<String>> = sqlx::query_scalar( updated_at = NOW() \
"SELECT original_name FROM series_metadata WHERE library_id = $1 AND name = $2" WHERE series_id = $1"
) )
.bind(library_id) .bind(series_id)
.bind(&name) .bind(apply_author)
.fetch_optional(&state.pool) .bind(&author_value)
.bind(apply_language)
.bind(&language_value)
.execute(&state.pool)
.await?; .await?;
// Use existing original_name if set, otherwise use the old name itself
Some(existing_original.flatten().unwrap_or_else(|| name.clone())) // 2. Update the series row (name, metadata, original_name tracking)
let is_rename = new_name != old_name;
let original_name: Option<String> = if is_rename {
// Use existing original_name if set (chained renames: A->B->C), otherwise use old name
let existing_original: Option<String> = old_row.get("original_name");
Some(existing_original.unwrap_or_else(|| old_name.clone()))
} else { } else {
None None
}; };
sqlx::query( sqlx::query(
r#" r#"
INSERT INTO series_metadata (library_id, name, authors, description, publishers, start_year, total_volumes, status, locked_fields, original_name, updated_at) UPDATE series
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, NOW()) SET name = $2,
ON CONFLICT (library_id, name) DO UPDATE authors = $3,
SET authors = EXCLUDED.authors, description = $4,
description = EXCLUDED.description, publishers = $5,
publishers = EXCLUDED.publishers, start_year = $6,
start_year = EXCLUDED.start_year, total_volumes = $7,
total_volumes = EXCLUDED.total_volumes, status = $8,
status = EXCLUDED.status, locked_fields = $9,
locked_fields = EXCLUDED.locked_fields, book_author = CASE WHEN $10 THEN $11 ELSE book_author END,
original_name = COALESCE(EXCLUDED.original_name, series_metadata.original_name), book_language = CASE WHEN $12 THEN $13 ELSE book_language END,
original_name = COALESCE($14, original_name),
updated_at = NOW() updated_at = NOW()
WHERE id = $1
"# "#
) )
.bind(library_id) .bind(series_id)
.bind(meta_name) .bind(&new_name)
.bind(&authors) .bind(&authors)
.bind(&description) .bind(&description)
.bind(&publishers) .bind(&publishers)
@@ -1059,21 +1110,14 @@ pub async fn update_series(
.bind(body.total_volumes) .bind(body.total_volumes)
.bind(&body.status) .bind(&body.status)
.bind(&locked_fields) .bind(&locked_fields)
.bind(apply_author)
.bind(&author_value)
.bind(apply_language)
.bind(&language_value)
.bind(&original_name) .bind(&original_name)
.execute(&state.pool) .execute(&state.pool)
.await?; .await?;
// 3. If renamed, delete the old series_metadata entry
if is_rename {
sqlx::query(
"DELETE FROM series_metadata WHERE library_id = $1 AND name = $2"
)
.bind(library_id)
.bind(&name)
.execute(&state.pool)
.await?;
}
Ok(Json(UpdateSeriesResponse { updated: result.rows_affected() })) Ok(Json(UpdateSeriesResponse { updated: result.rows_affected() }))
} }
@@ -1081,11 +1125,11 @@ pub async fn update_series(
/// and all related metadata (external links, anilist, available downloads). /// and all related metadata (external links, anilist, available downloads).
#[utoipa::path( #[utoipa::path(
delete, delete,
path = "/libraries/{library_id}/series/{name}", path = "/libraries/{library_id}/series/{series_id}",
tag = "series", tag = "series",
params( params(
("library_id" = String, Path, description = "Library UUID"), ("library_id" = String, Path, description = "Library UUID"),
("name" = String, Path, description = "Series name (URL-encoded)"), ("series_id" = String, Path, description = "Series UUID"),
), ),
responses( responses(
(status = 200, description = "Series deleted"), (status = 200, description = "Series deleted"),
@@ -1097,24 +1141,32 @@ pub async fn update_series(
pub async fn delete_series( pub async fn delete_series(
State(state): State<AppState>, State(state): State<AppState>,
Extension(_user): Extension<AuthUser>, Extension(_user): Extension<AuthUser>,
Path((library_id, name)): Path<(Uuid, String)>, Path((library_id, series_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<crate::responses::DeletedResponse>, ApiError> { ) -> Result<Json<crate::responses::DeletedResponse>, ApiError> {
use stripstream_core::paths::remap_libraries_path; use stripstream_core::paths::remap_libraries_path;
// Verify the series exists
let series_row = sqlx::query("SELECT name FROM series WHERE id = $1 AND library_id = $2")
.bind(series_id)
.bind(library_id)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found("series not found"))?;
let series_name: String = series_row.get("name");
// Find all books in this series // Find all books in this series
let book_rows = sqlx::query( let book_rows = sqlx::query(
"SELECT b.id, b.thumbnail_path, bf.abs_path \ "SELECT b.id, b.thumbnail_path, bf.abs_path \
FROM books b \ FROM books b \
LEFT JOIN book_files bf ON bf.book_id = b.id \ LEFT JOIN book_files bf ON bf.book_id = b.id \
WHERE b.library_id = $1 AND LOWER(COALESCE(NULLIF(b.series, ''), 'unclassified')) = LOWER($2)", WHERE b.series_id = $1",
) )
.bind(library_id) .bind(series_id)
.bind(&name)
.fetch_all(&state.pool) .fetch_all(&state.pool)
.await?; .await?;
if book_rows.is_empty() { if book_rows.is_empty() {
return Err(ApiError::not_found("series not found or has no books")); // Series exists but has no books — still delete the series row
} }
// Collect the series directory from the first book's path // Collect the series directory from the first book's path
@@ -1156,39 +1208,19 @@ pub async fn delete_series(
// Delete all books from DB (cascades to book_files, reading_progress, etc.) // Delete all books from DB (cascades to book_files, reading_progress, etc.)
let book_ids: Vec<Uuid> = book_rows.iter().map(|r| r.get("id")).collect(); let book_ids: Vec<Uuid> = book_rows.iter().map(|r| r.get("id")).collect();
if !book_ids.is_empty() {
sqlx::query("DELETE FROM books WHERE id = ANY($1)") sqlx::query("DELETE FROM books WHERE id = ANY($1)")
.bind(&book_ids) .bind(&book_ids)
.execute(&state.pool) .execute(&state.pool)
.await?; .await?;
}
// Delete series metadata // Delete the series row (cascades to external_metadata_links, anilist_series_links, available_downloads via FK)
sqlx::query("DELETE FROM series_metadata WHERE library_id = $1 AND name = $2") sqlx::query("DELETE FROM series WHERE id = $1")
.bind(library_id) .bind(series_id)
.bind(&name)
.execute(&state.pool) .execute(&state.pool)
.await?; .await?;
// Delete external metadata links (cascades to external_book_metadata)
sqlx::query("DELETE FROM external_metadata_links WHERE library_id = $1 AND LOWER(series_name) = LOWER($2)")
.bind(library_id)
.bind(&name)
.execute(&state.pool)
.await?;
// Delete anilist link
let _ = sqlx::query("DELETE FROM anilist_series_links WHERE library_id = $1 AND LOWER(series_name) = LOWER($2)")
.bind(library_id)
.bind(&name)
.execute(&state.pool)
.await;
// Delete available downloads
let _ = sqlx::query("DELETE FROM available_downloads WHERE library_id = $1 AND LOWER(series_name) = LOWER($2)")
.bind(library_id)
.bind(&name)
.execute(&state.pool)
.await;
// Queue a scan job for consistency // Queue a scan job for consistency
let scan_job_id = Uuid::new_v4(); let scan_job_id = Uuid::new_v4();
sqlx::query( sqlx::query(
@@ -1200,9 +1232,61 @@ pub async fn delete_series(
.await?; .await?;
tracing::info!( tracing::info!(
"[SERIES] Deleted series '{}' ({} books) from library {}, scan job {} queued", "[SERIES] Deleted series '{}' ({}) ({} books) from library {}, scan job {} queued",
name, book_ids.len(), library_id, scan_job_id series_name, series_id, book_ids.len(), library_id, scan_job_id
); );
Ok(Json(crate::responses::DeletedResponse::new(library_id))) Ok(Json(crate::responses::DeletedResponse::new(library_id)))
} }
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn series_item_has_series_id() {
let item = SeriesItem {
name: "Dragon Ball".to_string(),
series_id: Uuid::new_v4(),
book_count: 42,
books_read_count: 10,
first_book_id: Uuid::new_v4(),
library_id: Uuid::new_v4(),
series_status: Some("ended".to_string()),
missing_count: Some(0),
metadata_provider: None,
anilist_id: None,
anilist_url: None,
};
let json = serde_json::to_value(&item).unwrap();
assert!(json["series_id"].is_string());
assert_eq!(json["name"], "Dragon Ball");
assert_eq!(json["book_count"], 42);
}
#[test]
fn series_metadata_serializes() {
let meta = SeriesMetadata {
description: Some("A ninja story".to_string()),
authors: vec!["Kishimoto".to_string()],
publishers: vec![],
book_author: None,
book_language: None,
start_year: Some(1999),
total_volumes: Some(72),
status: Some("ended".to_string()),
locked_fields: serde_json::json!({}),
};
let json = serde_json::to_value(&meta).unwrap();
assert_eq!(json["total_volumes"], 72);
assert_eq!(json["authors"][0], "Kishimoto");
assert_eq!(json["status"], "ended");
}
#[test]
fn update_series_response_serializes() {
let resp = UpdateSeriesResponse { updated: 5 };
let json = serde_json::to_value(&resp).unwrap();
assert_eq!(json["updated"], 5);
}
}

View File

@@ -167,7 +167,7 @@ pub async fn get_stats(
r#" r#"
SELECT SELECT
COUNT(*) AS total_books, COUNT(*) AS total_books,
COUNT(DISTINCT NULLIF(series, '')) AS total_series, COUNT(DISTINCT b.series_id) AS total_series,
COUNT(DISTINCT library_id) AS total_libraries, COUNT(DISTINCT library_id) AS total_libraries,
COALESCE(SUM(page_count), 0)::BIGINT AS total_pages, COALESCE(SUM(page_count), 0)::BIGINT AS total_pages,
(SELECT COUNT(DISTINCT a) FROM ( (SELECT COUNT(DISTINCT a) FROM (
@@ -298,14 +298,15 @@ pub async fn get_stats(
let series_rows = sqlx::query( let series_rows = sqlx::query(
r#" r#"
SELECT SELECT
b.series, s.name AS series,
COUNT(*) AS book_count, COUNT(*) AS book_count,
COUNT(*) FILTER (WHERE brp.status = 'read') AS read_count, COUNT(*) FILTER (WHERE brp.status = 'read') AS read_count,
COALESCE(SUM(b.page_count), 0)::BIGINT AS total_pages COALESCE(SUM(b.page_count), 0)::BIGINT AS total_pages
FROM books b FROM books b
JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1) LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
WHERE b.series IS NOT NULL AND b.series != '' WHERE b.series_id IS NOT NULL
GROUP BY b.series GROUP BY s.name
ORDER BY book_count DESC ORDER BY book_count DESC
LIMIT 10 LIMIT 10
"#, "#,
@@ -405,8 +406,8 @@ pub async fn get_stats(
let meta_row = sqlx::query( let meta_row = sqlx::query(
r#" r#"
SELECT SELECT
(SELECT COUNT(DISTINCT NULLIF(series, '')) FROM books) AS total_series, (SELECT COUNT(DISTINCT series_id) FROM books WHERE series_id IS NOT NULL) AS total_series,
(SELECT COUNT(DISTINCT series_name) FROM external_metadata_links WHERE status = 'approved') AS series_linked, (SELECT COUNT(DISTINCT series_id) FROM external_metadata_links WHERE status = 'approved') AS series_linked,
(SELECT COUNT(*) FROM books WHERE summary IS NOT NULL AND summary != '') AS books_with_summary, (SELECT COUNT(*) FROM books WHERE summary IS NOT NULL AND summary != '') AS books_with_summary,
(SELECT COUNT(*) FROM books WHERE isbn IS NOT NULL AND isbn != '') AS books_with_isbn (SELECT COUNT(*) FROM books WHERE isbn IS NOT NULL AND isbn != '') AS books_with_isbn
"#, "#,
@@ -419,7 +420,7 @@ pub async fn get_stats(
let provider_rows = sqlx::query( let provider_rows = sqlx::query(
r#" r#"
SELECT provider, COUNT(DISTINCT series_name) AS count SELECT provider, COUNT(DISTINCT series_id) AS count
FROM external_metadata_links FROM external_metadata_links
WHERE status = 'approved' WHERE status = 'approved'
GROUP BY provider GROUP BY provider
@@ -449,9 +450,10 @@ pub async fn get_stats(
// Currently reading books // Currently reading books
let reading_rows = sqlx::query( let reading_rows = sqlx::query(
r#" r#"
SELECT b.id AS book_id, b.title, b.series, brp.current_page, b.page_count, u.username SELECT b.id AS book_id, b.title, s.name AS series, brp.current_page, b.page_count, u.username
FROM book_reading_progress brp FROM book_reading_progress brp
JOIN books b ON b.id = brp.book_id JOIN books b ON b.id = brp.book_id
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN users u ON u.id = brp.user_id LEFT JOIN users u ON u.id = brp.user_id
WHERE brp.status = 'reading' AND brp.current_page IS NOT NULL WHERE brp.status = 'reading' AND brp.current_page IS NOT NULL
AND ($1::uuid IS NULL OR brp.user_id = $1) AND ($1::uuid IS NULL OR brp.user_id = $1)
@@ -481,11 +483,12 @@ pub async fn get_stats(
// Recently read books // Recently read books
let recent_rows = sqlx::query( let recent_rows = sqlx::query(
r#" r#"
SELECT b.id AS book_id, b.title, b.series, SELECT b.id AS book_id, b.title, s.name AS series,
TO_CHAR(brp.last_read_at, 'YYYY-MM-DD') AS last_read_at, TO_CHAR(brp.last_read_at, 'YYYY-MM-DD') AS last_read_at,
u.username u.username
FROM book_reading_progress brp FROM book_reading_progress brp
JOIN books b ON b.id = brp.book_id JOIN books b ON b.id = brp.book_id
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN users u ON u.id = brp.user_id LEFT JOIN users u ON u.id = brp.user_id
WHERE brp.status = 'read' AND brp.last_read_at IS NOT NULL WHERE brp.status = 'read' AND brp.last_read_at IS NOT NULL
AND ($1::uuid IS NULL OR brp.user_id = $1) AND ($1::uuid IS NULL OR brp.user_id = $1)

View File

@@ -664,8 +664,9 @@ async fn do_import(
"SELECT bf.abs_path, b.volume \ "SELECT bf.abs_path, b.volume \
FROM book_files bf \ FROM book_files bf \
JOIN books b ON b.id = bf.book_id \ JOIN books b ON b.id = bf.book_id \
LEFT JOIN series s ON s.id = b.series_id \
WHERE b.library_id = $1 \ WHERE b.library_id = $1 \
AND LOWER(unaccent(b.series)) = LOWER(unaccent($2)) \ AND LOWER(unaccent(s.name)) = LOWER(unaccent($2)) \
AND b.volume IS NOT NULL \ AND b.volume IS NOT NULL \
ORDER BY b.volume DESC LIMIT 1", ORDER BY b.volume DESC LIMIT 1",
) )

View File

@@ -9,7 +9,7 @@ pub struct BookUpdate {
pub title: String, pub title: String,
pub kind: String, pub kind: String,
pub format: String, pub format: String,
pub series: Option<String>, pub series_id: Option<Uuid>,
pub volume: Option<i32>, pub volume: Option<i32>,
pub page_count: Option<i32>, pub page_count: Option<i32>,
} }
@@ -28,7 +28,7 @@ pub struct BookInsert {
pub kind: String, pub kind: String,
pub format: String, pub format: String,
pub title: String, pub title: String,
pub series: Option<String>, pub series_id: Option<Uuid>,
pub volume: Option<i32>, pub volume: Option<i32>,
pub page_count: Option<i32>, pub page_count: Option<i32>,
pub thumbnail_path: Option<String>, pub thumbnail_path: Option<String>,
@@ -73,7 +73,7 @@ pub async fn flush_all_batches(
let titles: Vec<String> = books_update.iter().map(|b| b.title.clone()).collect(); let titles: Vec<String> = books_update.iter().map(|b| b.title.clone()).collect();
let kinds: Vec<String> = books_update.iter().map(|b| b.kind.clone()).collect(); let kinds: Vec<String> = books_update.iter().map(|b| b.kind.clone()).collect();
let formats: Vec<String> = books_update.iter().map(|b| b.format.clone()).collect(); let formats: Vec<String> = books_update.iter().map(|b| b.format.clone()).collect();
let series: Vec<Option<String>> = books_update.iter().map(|b| b.series.clone()).collect(); let series_ids: Vec<Option<Uuid>> = books_update.iter().map(|b| b.series_id).collect();
let volumes: Vec<Option<i32>> = books_update.iter().map(|b| b.volume).collect(); let volumes: Vec<Option<i32>> = books_update.iter().map(|b| b.volume).collect();
let page_counts: Vec<Option<i32>> = books_update.iter().map(|b| b.page_count).collect(); let page_counts: Vec<Option<i32>> = books_update.iter().map(|b| b.page_count).collect();
@@ -83,13 +83,13 @@ pub async fn flush_all_batches(
title = data.title, title = data.title,
kind = data.kind, kind = data.kind,
format = data.format, format = data.format,
series = data.series, series_id = data.series_id,
volume = data.volume, volume = data.volume,
page_count = data.page_count, page_count = data.page_count,
updated_at = NOW() updated_at = NOW()
FROM ( FROM (
SELECT * FROM UNNEST($1::uuid[], $2::text[], $3::text[], $4::text[], $5::text[], $6::int[], $7::int[]) SELECT * FROM UNNEST($1::uuid[], $2::text[], $3::text[], $4::text[], $5::uuid[], $6::int[], $7::int[])
AS t(book_id, title, kind, format, series, volume, page_count) AS t(book_id, title, kind, format, series_id, volume, page_count)
) AS data ) AS data
WHERE books.id = data.book_id WHERE books.id = data.book_id
"# "#
@@ -98,7 +98,7 @@ pub async fn flush_all_batches(
.bind(&titles) .bind(&titles)
.bind(&kinds) .bind(&kinds)
.bind(&formats) .bind(&formats)
.bind(&series) .bind(&series_ids)
.bind(&volumes) .bind(&volumes)
.bind(&page_counts) .bind(&page_counts)
.execute(&mut *tx) .execute(&mut *tx)
@@ -150,16 +150,16 @@ pub async fn flush_all_batches(
let kinds: Vec<String> = books_insert.iter().map(|b| b.kind.clone()).collect(); let kinds: Vec<String> = books_insert.iter().map(|b| b.kind.clone()).collect();
let formats: Vec<String> = books_insert.iter().map(|b| b.format.clone()).collect(); let formats: Vec<String> = books_insert.iter().map(|b| b.format.clone()).collect();
let titles: Vec<String> = books_insert.iter().map(|b| b.title.clone()).collect(); let titles: Vec<String> = books_insert.iter().map(|b| b.title.clone()).collect();
let series: Vec<Option<String>> = books_insert.iter().map(|b| b.series.clone()).collect(); let series_ids: Vec<Option<Uuid>> = books_insert.iter().map(|b| b.series_id).collect();
let volumes: Vec<Option<i32>> = books_insert.iter().map(|b| b.volume).collect(); let volumes: Vec<Option<i32>> = books_insert.iter().map(|b| b.volume).collect();
let page_counts: Vec<Option<i32>> = books_insert.iter().map(|b| b.page_count).collect(); let page_counts: Vec<Option<i32>> = books_insert.iter().map(|b| b.page_count).collect();
let thumbnail_paths: Vec<Option<String>> = books_insert.iter().map(|b| b.thumbnail_path.clone()).collect(); let thumbnail_paths: Vec<Option<String>> = books_insert.iter().map(|b| b.thumbnail_path.clone()).collect();
sqlx::query( sqlx::query(
r#" r#"
INSERT INTO books (id, library_id, kind, format, title, series, volume, page_count, thumbnail_path) INSERT INTO books (id, library_id, kind, format, title, series_id, volume, page_count, thumbnail_path)
SELECT * FROM UNNEST($1::uuid[], $2::uuid[], $3::text[], $4::text[], $5::text[], $6::text[], $7::int[], $8::int[], $9::text[]) SELECT * FROM UNNEST($1::uuid[], $2::uuid[], $3::text[], $4::text[], $5::text[], $6::uuid[], $7::int[], $8::int[], $9::text[])
AS t(id, library_id, kind, format, title, series, volume, page_count, thumbnail_path) AS t(id, library_id, kind, format, title, series_id, volume, page_count, thumbnail_path)
"# "#
) )
.bind(&book_ids) .bind(&book_ids)
@@ -167,7 +167,7 @@ pub async fn flush_all_batches(
.bind(&kinds) .bind(&kinds)
.bind(&formats) .bind(&formats)
.bind(&titles) .bind(&titles)
.bind(&series) .bind(&series_ids)
.bind(&volumes) .bind(&volumes)
.bind(&page_counts) .bind(&page_counts)
.bind(&thumbnail_paths) .bind(&thumbnail_paths)

View File

@@ -18,12 +18,13 @@ async fn rematch_unlinked_books(pool: &PgPool, library_id: Uuid) {
FROM external_book_metadata ebm2 FROM external_book_metadata ebm2
JOIN external_metadata_links eml ON eml.id = ebm2.link_id JOIN external_metadata_links eml ON eml.id = ebm2.link_id
JOIN books b ON b.library_id = eml.library_id JOIN books b ON b.library_id = eml.library_id
AND LOWER(COALESCE(NULLIF(b.series, ''), 'unclassified')) = LOWER(eml.series_name)
AND b.volume = ebm2.volume_number AND b.volume = ebm2.volume_number
LEFT JOIN series s ON s.id = b.series_id
WHERE eml.library_id = $1 WHERE eml.library_id = $1
AND ebm2.book_id IS NULL AND ebm2.book_id IS NULL
AND ebm2.volume_number IS NOT NULL AND ebm2.volume_number IS NOT NULL
AND eml.status = 'approved' AND eml.status = 'approved'
AND LOWER(COALESCE(s.name, 'unclassified')) = LOWER(eml.series_name)
) matched ) matched
WHERE ebm.id = matched.ebm_id WHERE ebm.id = matched.ebm_id
"#, "#,

View File

@@ -28,6 +28,42 @@ pub struct JobStats {
const BATCH_SIZE: usize = 100; const BATCH_SIZE: usize = 100;
/// Look up a series by name in the local cache, or INSERT INTO series ... ON CONFLICT DO NOTHING
/// then SELECT to get the id. Updates the cache on creation.
async fn get_or_create_series_id(
pool: &sqlx::PgPool,
library_id: Uuid,
name: &str,
cache: &mut HashMap<String, Uuid>,
) -> Result<Uuid> {
// Check local cache first
if let Some(&id) = cache.get(name) {
return Ok(id);
}
// Try to insert; ON CONFLICT DO NOTHING handles races / existing rows
sqlx::query(
"INSERT INTO series (id, library_id, name) VALUES ($1, $2, $3) ON CONFLICT (library_id, name) DO NOTHING",
)
.bind(Uuid::new_v4())
.bind(library_id)
.bind(name)
.execute(pool)
.await?;
// Always SELECT to get the actual id (whether we just inserted or it already existed)
let id: Uuid = sqlx::query_scalar(
"SELECT id FROM series WHERE library_id = $1 AND name = $2",
)
.bind(library_id)
.bind(name)
.fetch_one(pool)
.await?;
cache.insert(name.to_string(), id);
Ok(id)
}
/// Phase 1 — Discovery: walk filesystem, extract metadata from filenames only (no archive I/O). /// Phase 1 — Discovery: walk filesystem, extract metadata from filenames only (no archive I/O).
/// New books are inserted with page_count = NULL so the analyzer phase can fill them in. /// New books are inserted with page_count = NULL so the analyzer phase can fill them in.
/// Updated books (fingerprint changed) get page_count/thumbnail reset. /// Updated books (fingerprint changed) get page_count/thumbnail reset.
@@ -108,22 +144,31 @@ pub async fn scan_library_discovery(
HashMap::new() HashMap::new()
}; };
// Track existing series names for new_series counting // Load existing series for this library: name → id
let existing_series: HashSet<String> = sqlx::query_scalar( let series_rows = sqlx::query(
"SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified') FROM books WHERE library_id = $1", "SELECT id, name FROM series WHERE library_id = $1",
) )
.bind(library_id) .bind(library_id)
.fetch_all(&state.pool) .fetch_all(&state.pool)
.await .await
.unwrap_or_default() .unwrap_or_default();
let mut series_map: HashMap<String, Uuid> = series_rows
.into_iter() .into_iter()
.map(|row| {
let name: String = row.get("name");
let id: Uuid = row.get("id");
(name, id)
})
.collect(); .collect();
// Track existing series names for new_series counting
let existing_series: HashSet<String> = series_map.keys().cloned().collect();
let mut seen_new_series: HashSet<String> = HashSet::new(); let mut seen_new_series: HashSet<String> = HashSet::new();
// Load series rename mapping: original filesystem name → current DB name. // Load series rename mapping: original filesystem name → current DB name.
// This prevents the scanner from recreating old series after a user rename. // This prevents the scanner from recreating old series after a user rename.
let rename_rows = sqlx::query( let rename_rows = sqlx::query(
"SELECT original_name, name FROM series_metadata WHERE library_id = $1 AND original_name IS NOT NULL", "SELECT original_name, name FROM series WHERE library_id = $1 AND original_name IS NOT NULL",
) )
.bind(library_id) .bind(library_id)
.fetch_all(&state.pool) .fetch_all(&state.pool)
@@ -378,12 +423,22 @@ pub async fn scan_library_discovery(
old_fingerprint != fingerprint old_fingerprint != fingerprint
); );
// Resolve series name → series_id
let update_series_id = if let Some(ref series_name) = parsed.series {
Some(
get_or_create_series_id(&state.pool, library_id, series_name, &mut series_map)
.await?,
)
} else {
None
};
books_to_update.push(BookUpdate { books_to_update.push(BookUpdate {
book_id, book_id,
title: parsed.title, title: parsed.title,
kind: utils::kind_from_format(format).to_string(), kind: utils::kind_from_format(format).to_string(),
format: format.as_str().to_string(), format: format.as_str().to_string(),
series: parsed.series, series_id: update_series_id,
volume: parsed.volume, volume: parsed.volume,
// Reset page_count so analyzer re-processes this book // Reset page_count so analyzer re-processes this book
page_count: None, page_count: None,
@@ -439,13 +494,23 @@ pub async fn scan_library_discovery(
stats.new_series += 1; stats.new_series += 1;
} }
// Resolve series name → series_id
let insert_series_id = if let Some(ref series_name) = parsed.series {
Some(
get_or_create_series_id(&state.pool, library_id, series_name, &mut series_map)
.await?,
)
} else {
None
};
books_to_insert.push(BookInsert { books_to_insert.push(BookInsert {
book_id, book_id,
library_id, library_id,
kind: utils::kind_from_format(format).to_string(), kind: utils::kind_from_format(format).to_string(),
format: format.as_str().to_string(), format: format.as_str().to_string(),
title: parsed.title, title: parsed.title,
series: parsed.series, series_id: insert_series_id,
volume: parsed.volume, volume: parsed.volume,
page_count: None, page_count: None,
thumbnail_path: None, thumbnail_path: None,
@@ -642,4 +707,34 @@ mod tests {
// No existing files in DB — nothing to delete anyway // No existing files in DB — nothing to delete anyway
assert!(!should_skip_deletions(true, 10, 0, 0)); assert!(!should_skip_deletions(true, 10, 0, 0));
} }
#[test]
fn batch_structs_use_series_id() {
use crate::batch::{BookInsert, BookUpdate};
let series_id = Uuid::new_v4();
let book = BookInsert {
book_id: Uuid::new_v4(),
library_id: Uuid::new_v4(),
kind: "comic".to_string(),
format: "cbz".to_string(),
title: "Test".to_string(),
series_id: Some(series_id),
volume: Some(1),
page_count: None,
thumbnail_path: None,
};
assert_eq!(book.series_id, Some(series_id));
let update = BookUpdate {
book_id: Uuid::new_v4(),
title: "Test".to_string(),
kind: "comic".to_string(),
format: "cbz".to_string(),
series_id: None,
volume: None,
page_count: None,
};
assert_eq!(update.series_id, None);
}
} }