refactor: migrer tout le code Rust vers series_id (table series)

API (15 fichiers):
- series.rs: helpers resolve_series_id/get_or_create_series, toutes les
  queries migrent de books.series TEXT vers series_id FK + JOIN series
- Routes /series/:name → /series/:series_id (UUID)
- books.rs: filtres série par series_id, SELECT s.name AS series via JOIN
- metadata.rs: sync écrit dans series au lieu de series_metadata
- metadata_refresh.rs: refresh_link et rematch via series_id
- metadata_batch.rs: sync via series table
- anilist.rs: liens par series_id au lieu de series_name
- download_detection.rs: available_downloads via series_id
- reading_progress.rs: mark_series_read par series_id
- torrent_import.rs: import via series JOIN
- search.rs, stats.rs, libraries.rs: JOINs series pour les noms
- reading_status_match.rs, reading_status_push.rs: séries via JOIN

Indexer (3 fichiers):
- scanner.rs: get_or_create_series_id() avec cache HashMap
- batch.rs: BookInsert/BookUpdate.series_id UUID au lieu de series String
- job.rs: rematch_unlinked_books via series JOIN

4 nouveaux tests (SeriesItem, SeriesMetadata, UpdateSeriesResponse,
BatchStructs avec series_id)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-29 21:13:11 +02:00
parent f2a7db939f
commit 292e9bc77f
18 changed files with 675 additions and 443 deletions

View File

@@ -501,15 +501,16 @@ pub async fn list_unlinked(
SELECT
l.id AS library_id,
l.name AS library_name,
COALESCE(NULLIF(b.series, ''), 'unclassified') AS series_name
COALESCE(s.name, 'unclassified') AS series_name
FROM books b
JOIN libraries l ON l.id = b.library_id
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN anilist_series_links asl
ON asl.library_id = b.library_id
AND asl.series_name = COALESCE(NULLIF(b.series, ''), 'unclassified')
AND asl.series_name = COALESCE(s.name, 'unclassified')
WHERE l.reading_status_provider = 'anilist'
AND asl.library_id IS NULL
GROUP BY l.id, l.name, COALESCE(NULLIF(b.series, ''), 'unclassified')
GROUP BY l.id, l.name, COALESCE(s.name, 'unclassified')
ORDER BY l.name, series_name
"#,
)
@@ -576,10 +577,11 @@ pub async fn preview_sync(
SELECT
COUNT(*) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read,
(SELECT sm.total_volumes FROM series_metadata sm WHERE sm.library_id = $1 AND sm.name = $2 LIMIT 1) as total_volumes
(SELECT sm.total_volumes FROM series sm WHERE sm.library_id = $1 AND sm.name = $2 LIMIT 1) as total_volumes
FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND brp.user_id = $3
WHERE b.library_id = $1 AND COALESCE(NULLIF(b.series, ''), 'unclassified') = $2
WHERE b.library_id = $1 AND COALESCE(s.name, 'unclassified') = $2
"#,
)
.bind(library_id)
@@ -684,10 +686,11 @@ pub async fn sync_to_anilist(
SELECT
COUNT(*) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read,
(SELECT sm.total_volumes FROM series_metadata sm WHERE sm.library_id = $1 AND sm.name = $2 LIMIT 1) as total_volumes
(SELECT sm.total_volumes FROM series sm WHERE sm.library_id = $1 AND sm.name = $2 LIMIT 1) as total_volumes
FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND brp.user_id = $3
WHERE b.library_id = $1 AND COALESCE(NULLIF(b.series, ''), 'unclassified') = $2
WHERE b.library_id = $1 AND COALESCE(s.name, 'unclassified') = $2
"#,
)
.bind(library_id)
@@ -866,7 +869,7 @@ pub async fn pull_from_anilist(
// Get all book IDs for this series, ordered by volume
let book_rows = sqlx::query(
"SELECT id, volume FROM books WHERE library_id = $1 AND COALESCE(NULLIF(series, ''), 'unclassified') = $2 ORDER BY volume NULLS LAST",
"SELECT b.id, b.volume FROM books b LEFT JOIN series s ON s.id = b.series_id WHERE b.library_id = $1 AND COALESCE(s.name, 'unclassified') = $2 ORDER BY b.volume NULLS LAST",
)
.bind(library_id)
.bind(series_name)

View File

@@ -141,15 +141,15 @@ pub async fn list_books(
// Conditions partagées COUNT et DATA — $1=library_id $2=kind $3=format, puis optionnels
let mut p: usize = 3;
let series_cond = match query.series.as_deref() {
Some("unclassified") => "AND (b.series IS NULL OR b.series = '')".to_string(),
Some(_) => { p += 1; format!("AND b.series = ${p}") }
Some("unclassified") => "AND b.series_id IS NULL".to_string(),
Some(_) => { p += 1; format!("AND b.series_id = ${p}") }
None => String::new(),
};
let rs_cond = if reading_statuses.is_some() {
p += 1; format!("AND COALESCE(brp.status, 'unread') = ANY(${p})")
} else { String::new() };
let author_cond = if query.author.is_some() {
p += 1; format!("AND (${p} = ANY(COALESCE(NULLIF(b.authors, '{{}}'), CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END)) OR EXISTS (SELECT 1 FROM series_metadata sm WHERE sm.library_id = b.library_id AND sm.name = b.series AND ${p} = ANY(sm.authors)))")
p += 1; format!("AND (${p} = ANY(COALESCE(NULLIF(b.authors, '{{}}'), CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END)) OR (s.id IS NOT NULL AND ${p} = ANY(COALESCE(s.authors, ARRAY[]::text[]))))")
} else { String::new() };
let metadata_cond = match query.metadata_provider.as_deref() {
Some("unlinked") => "AND eml.id IS NULL".to_string(),
@@ -158,25 +158,26 @@ pub async fn list_books(
None => String::new(),
};
let q_cond = if query.q.is_some() {
p += 1; format!("AND (b.title ILIKE ${p} OR b.series ILIKE ${p} OR b.author ILIKE ${p})")
p += 1; format!("AND (b.title ILIKE ${p} OR s.name ILIKE ${p} OR b.author ILIKE ${p})")
} else { String::new() };
p += 1;
let uid_p = p;
let metadata_links_cte = r#"
metadata_links AS (
SELECT DISTINCT ON (eml.series_name, eml.library_id)
eml.series_name, eml.library_id, eml.provider, eml.id
SELECT DISTINCT ON (eml.series_id, eml.library_id)
eml.series_id, eml.library_id, eml.provider, eml.id
FROM external_metadata_links eml
WHERE eml.status = 'approved'
ORDER BY eml.series_name, eml.library_id, eml.created_at DESC
ORDER BY eml.series_id, eml.library_id, eml.created_at DESC
)"#;
let count_sql = format!(
r#"WITH {metadata_links_cte}
SELECT COUNT(*) FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p}
LEFT JOIN metadata_links eml ON eml.series_name = b.series AND eml.library_id = b.library_id
LEFT JOIN metadata_links eml ON eml.series_id = b.series_id AND eml.library_id = b.library_id
WHERE ($1::uuid IS NULL OR b.library_id = $1)
AND ($2::text IS NULL OR b.kind = $2)
AND ($3::text IS NULL OR b.format = $3)
@@ -199,13 +200,14 @@ pub async fn list_books(
let data_sql = format!(
r#"
WITH {metadata_links_cte}
SELECT b.id, b.library_id, b.kind, b.format, b.title, b.author, b.authors, b.series, b.volume, b.language, b.page_count, b.thumbnail_path, b.updated_at,
SELECT b.id, b.library_id, b.kind, b.format, b.title, b.author, b.authors, s.name AS series, b.volume, b.language, b.page_count, b.thumbnail_path, b.updated_at,
COALESCE(brp.status, 'unread') AS reading_status,
brp.current_page AS reading_current_page,
brp.last_read_at AS reading_last_read_at
FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p}
LEFT JOIN metadata_links eml ON eml.series_name = b.series AND eml.library_id = b.library_id
LEFT JOIN metadata_links eml ON eml.series_id = b.series_id AND eml.library_id = b.library_id
WHERE ($1::uuid IS NULL OR b.library_id = $1)
AND ($2::text IS NULL OR b.kind = $2)
AND ($3::text IS NULL OR b.format = $3)
@@ -230,8 +232,9 @@ pub async fn list_books(
if let Some(s) = query.series.as_deref() {
if s != "unclassified" {
count_builder = count_builder.bind(s);
data_builder = data_builder.bind(s);
let series_uuid: Uuid = s.parse().map_err(|_| ApiError::bad_request("invalid series id"))?;
count_builder = count_builder.bind(series_uuid);
data_builder = data_builder.bind(series_uuid);
}
}
if let Some(ref statuses) = reading_statuses {
@@ -318,12 +321,13 @@ pub async fn get_book(
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
let row = sqlx::query(
r#"
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.authors, b.series, b.volume, b.language, b.page_count, b.thumbnail_path, b.locked_fields, b.summary, b.isbn, b.publish_date,
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.authors, s.name AS series, b.volume, b.language, b.page_count, b.thumbnail_path, b.locked_fields, b.summary, b.isbn, b.publish_date,
bf.abs_path, bf.format, bf.parse_status,
COALESCE(brp.status, 'unread') AS reading_status,
brp.current_page AS reading_current_page,
brp.last_read_at AS reading_last_read_at
FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN LATERAL (
SELECT abs_path, format, parse_status
FROM book_files
@@ -519,13 +523,39 @@ pub async fn update_book(
let isbn = body.isbn.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
let publish_date = body.publish_date.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
let locked_fields = body.locked_fields.clone().unwrap_or(serde_json::json!({}));
// Resolve series name to series_id
let series_id: Option<Uuid> = if let Some(ref s) = series {
// Look up existing series or create one
let book_row = sqlx::query("SELECT library_id FROM books WHERE id = $1")
.bind(id)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found("book not found"))?;
let lib_id: Uuid = book_row.get("library_id");
let sid: Uuid = sqlx::query_scalar(
r#"
INSERT INTO series (id, library_id, name, created_at, updated_at)
VALUES (gen_random_uuid(), $1, $2, NOW(), NOW())
ON CONFLICT (library_id, name) DO UPDATE SET updated_at = NOW()
RETURNING id
"#,
)
.bind(lib_id)
.bind(s)
.fetch_one(&state.pool)
.await?;
Some(sid)
} else {
None
};
let row = sqlx::query(
r#"
UPDATE books
SET title = $2, author = $3, authors = $4, series = $5, volume = $6, language = $7,
SET title = $2, author = $3, authors = $4, series_id = $5, volume = $6, language = $7,
summary = $8, isbn = $9, publish_date = $10, locked_fields = $11, updated_at = NOW()
WHERE id = $1
RETURNING id, library_id, kind, title, author, authors, series, volume, language, page_count, thumbnail_path,
RETURNING id, library_id, kind, title, author, authors, volume, language, page_count, thumbnail_path,
summary, isbn, publish_date,
'unread' AS reading_status,
NULL::integer AS reading_current_page,
@@ -536,7 +566,7 @@ pub async fn update_book(
.bind(&title)
.bind(&author)
.bind(&authors)
.bind(&series)
.bind(series_id)
.bind(body.volume)
.bind(&language)
.bind(&summary)
@@ -556,7 +586,7 @@ pub async fn update_book(
title: row.get("title"),
author: row.get("author"),
authors: row.get::<Vec<String>, _>("authors"),
series: row.get("series"),
series: series.clone(),
volume: row.get("volume"),
language: row.get("language"),
page_count: row.get("page_count"),

View File

@@ -511,9 +511,10 @@ pub(crate) async fn process_download_detection(
// Fetch all series with their metadata link status
let all_series: Vec<String> = sqlx::query_scalar(
r#"
SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')
FROM books
WHERE library_id = $1
SELECT DISTINCT COALESCE(s.name, 'unclassified')
FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
ORDER BY 1
"#,
)
@@ -528,8 +529,10 @@ pub(crate) async fn process_download_detection(
DELETE FROM available_downloads
WHERE library_id = $1
AND series_name NOT IN (
SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')
FROM books WHERE library_id = $1
SELECT DISTINCT COALESCE(s.name, 'unclassified')
FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
)
"#,
)

View File

@@ -62,16 +62,17 @@ pub async fn list_libraries(State(state): State<AppState>) -> Result<Json<Vec<Li
let rows = sqlx::query(
"SELECT l.id, l.name, l.root_path, l.enabled, l.monitor_enabled, l.scan_mode, l.next_scan_at, l.watcher_enabled, l.metadata_provider, l.fallback_metadata_provider, l.metadata_refresh_mode, l.next_metadata_refresh_at, l.reading_status_provider, l.reading_status_push_mode, l.next_reading_status_push_at, l.download_detection_mode, l.next_download_detection_at,
(SELECT COUNT(*) FROM books b WHERE b.library_id = l.id) as book_count,
(SELECT COUNT(DISTINCT COALESCE(NULLIF(b.series, ''), 'unclassified')) FROM books b WHERE b.library_id = l.id) as series_count,
(SELECT COUNT(DISTINCT b.series_id) + CASE WHEN EXISTS(SELECT 1 FROM books b WHERE b.library_id = l.id AND b.series_id IS NULL) THEN 1 ELSE 0 END FROM books b WHERE b.library_id = l.id) as series_count,
COALESCE((
SELECT ARRAY_AGG(first_id ORDER BY series_name)
FROM (
SELECT DISTINCT ON (COALESCE(NULLIF(b.series, ''), 'unclassified'))
COALESCE(NULLIF(b.series, ''), 'unclassified') as series_name,
SELECT DISTINCT ON (COALESCE(s.name, 'unclassified'))
COALESCE(s.name, 'unclassified') as series_name,
b.id as first_id
FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = l.id
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'),
ORDER BY COALESCE(s.name, 'unclassified'),
b.volume NULLS LAST, b.title ASC
LIMIT 5
) sub
@@ -377,15 +378,16 @@ pub async fn update_monitoring(
.fetch_one(&state.pool)
.await?;
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')) FROM books WHERE library_id = $1")
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT b.series_id) + CASE WHEN EXISTS(SELECT 1 FROM books b WHERE b.library_id = $1 AND b.series_id IS NULL) THEN 1 ELSE 0 END FROM books b WHERE b.library_id = $1")
.bind(library_id)
.fetch_one(&state.pool)
.await?;
let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar(
"SELECT b.id FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'), b.volume NULLS LAST, b.title ASC
ORDER BY COALESCE(s.name, 'unclassified'), b.volume NULLS LAST, b.title ASC
LIMIT 5"
)
.bind(library_id)
@@ -466,15 +468,16 @@ pub async fn update_metadata_provider(
.fetch_one(&state.pool)
.await?;
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')) FROM books WHERE library_id = $1")
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT b.series_id) + CASE WHEN EXISTS(SELECT 1 FROM books b WHERE b.library_id = $1 AND b.series_id IS NULL) THEN 1 ELSE 0 END FROM books b WHERE b.library_id = $1")
.bind(library_id)
.fetch_one(&state.pool)
.await?;
let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar(
"SELECT b.id FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'), b.volume NULLS LAST, b.title ASC
ORDER BY COALESCE(s.name, 'unclassified'), b.volume NULLS LAST, b.title ASC
LIMIT 5"
)
.bind(library_id)

View File

@@ -103,7 +103,7 @@ async fn main() -> anyhow::Result<()> {
.route("/libraries/:id/reading-status-provider", axum::routing::patch(libraries::update_reading_status_provider))
.route("/books/:id", axum::routing::patch(books::update_book).delete(books::delete_book))
.route("/books/:id/convert", axum::routing::post(books::convert_book))
.route("/libraries/:library_id/series/:name", axum::routing::patch(series::update_series).delete(series::delete_series))
.route("/libraries/:library_id/series/:series_id", axum::routing::patch(series::update_series).delete(series::delete_series))
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
.route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild))
.route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate))
@@ -180,7 +180,7 @@ async fn main() -> anyhow::Result<()> {
.route("/books/:id/pages/:n", get(pages::get_page))
.route("/books/:id/progress", get(reading_progress::get_reading_progress).patch(reading_progress::update_reading_progress))
.route("/libraries/:library_id/series", get(series::list_series))
.route("/libraries/:library_id/series/:name/metadata", get(series::get_series_metadata))
.route("/libraries/:library_id/series/:series_id/metadata", get(series::get_series_metadata))
.route("/series", get(series::list_all_series))
.route("/series/ongoing", get(series::ongoing_series))
.route("/series/statuses", get(series::series_statuses))

View File

@@ -372,7 +372,7 @@ pub async fn approve_metadata(
// Notify via Telegram (with first book thumbnail if available)
let provider_for_notif: String = row.get("provider");
let thumbnail_path: Option<String> = sqlx::query_scalar(
"SELECT thumbnail_path FROM books WHERE library_id = $1 AND series_name = $2 AND thumbnail_path IS NOT NULL ORDER BY sort_order LIMIT 1",
"SELECT b.thumbnail_path FROM books b JOIN series s ON s.id = b.series_id WHERE b.library_id = $1 AND s.name = $2 AND b.thumbnail_path IS NOT NULL ORDER BY b.volume NULLS LAST, b.title LIMIT 1",
)
.bind(library_id)
.bind(&series_name)
@@ -514,7 +514,7 @@ pub async fn get_missing_books(
// Count local books
let total_local: i64 = sqlx::query_scalar(
"SELECT COUNT(*) FROM books WHERE library_id = $1 AND COALESCE(NULLIF(series, ''), 'unclassified') = $2",
"SELECT COUNT(*) FROM books b LEFT JOIN series s ON s.id = b.series_id WHERE b.library_id = $1 AND COALESCE(s.name, 'unclassified') = $2",
)
.bind(library_id)
.bind(&series_name)
@@ -722,7 +722,7 @@ pub(crate) async fn sync_series_metadata(
// Fetch existing state before upsert
let existing = sqlx::query(
r#"SELECT description, publishers, start_year, total_volumes, status, authors, locked_fields
FROM series_metadata WHERE library_id = $1 AND name = $2"#,
FROM series WHERE library_id = $1 AND name = $2"#,
)
.bind(library_id)
.bind(series_name)
@@ -732,35 +732,35 @@ pub(crate) async fn sync_series_metadata(
// Respect locked_fields: only update fields that are NOT locked
sqlx::query(
r#"
INSERT INTO series_metadata (library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
INSERT INTO series (id, library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
VALUES (gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
ON CONFLICT (library_id, name)
DO UPDATE SET
description = CASE
WHEN (series_metadata.locked_fields->>'description')::boolean IS TRUE THEN series_metadata.description
ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series_metadata.description)
WHEN (series.locked_fields->>'description')::boolean IS TRUE THEN series.description
ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series.description)
END,
publishers = CASE
WHEN (series_metadata.locked_fields->>'publishers')::boolean IS TRUE THEN series_metadata.publishers
WHEN (series.locked_fields->>'publishers')::boolean IS TRUE THEN series.publishers
WHEN array_length(EXCLUDED.publishers, 1) > 0 THEN EXCLUDED.publishers
ELSE series_metadata.publishers
ELSE series.publishers
END,
start_year = CASE
WHEN (series_metadata.locked_fields->>'start_year')::boolean IS TRUE THEN series_metadata.start_year
ELSE COALESCE(EXCLUDED.start_year, series_metadata.start_year)
WHEN (series.locked_fields->>'start_year')::boolean IS TRUE THEN series.start_year
ELSE COALESCE(EXCLUDED.start_year, series.start_year)
END,
total_volumes = CASE
WHEN (series_metadata.locked_fields->>'total_volumes')::boolean IS TRUE THEN series_metadata.total_volumes
ELSE COALESCE(EXCLUDED.total_volumes, series_metadata.total_volumes)
WHEN (series.locked_fields->>'total_volumes')::boolean IS TRUE THEN series.total_volumes
ELSE COALESCE(EXCLUDED.total_volumes, series.total_volumes)
END,
status = CASE
WHEN (series_metadata.locked_fields->>'status')::boolean IS TRUE THEN series_metadata.status
ELSE COALESCE(EXCLUDED.status, series_metadata.status)
WHEN (series.locked_fields->>'status')::boolean IS TRUE THEN series.status
ELSE COALESCE(EXCLUDED.status, series.status)
END,
authors = CASE
WHEN (series_metadata.locked_fields->>'authors')::boolean IS TRUE THEN series_metadata.authors
WHEN (series.locked_fields->>'authors')::boolean IS TRUE THEN series.authors
WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors
ELSE series_metadata.authors
ELSE series.authors
END,
updated_at = NOW()
"#,
@@ -909,12 +909,13 @@ pub(crate) async fn sync_books_metadata(
// (volume ASC NULLS LAST, then natural title sort)
let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as(
r#"
SELECT id, volume, title FROM books
WHERE library_id = $1
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2
ORDER BY volume NULLS LAST,
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
SELECT b.id, b.volume, b.title FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
AND COALESCE(s.name, 'unclassified') = $2
ORDER BY b.volume NULLS LAST,
REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
title ASC
"#,
)

View File

@@ -410,9 +410,10 @@ pub(crate) async fn process_metadata_batch(
// Get all distinct series names for this library
let series_names: Vec<String> = sqlx::query_scalar(
r#"
SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')
FROM books
WHERE library_id = $1
SELECT DISTINCT COALESCE(s.name, 'unclassified')
FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
ORDER BY 1
"#,
)
@@ -757,9 +758,10 @@ async fn search_and_evaluate(
if let Some(ext_total) = best.total_volumes {
let local_count: Option<i64> = sqlx::query_scalar(
r#"
SELECT COUNT(*) FROM books
WHERE library_id = $1
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2
SELECT COUNT(*) FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
AND COALESCE(s.name, 'unclassified') = $2
"#,
)
.bind(library_id)
@@ -867,35 +869,35 @@ async fn sync_series_from_candidate(
sqlx::query(
r#"
INSERT INTO series_metadata (library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
INSERT INTO series (id, library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
VALUES (gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
ON CONFLICT (library_id, name)
DO UPDATE SET
description = CASE
WHEN (series_metadata.locked_fields->>'description')::boolean IS TRUE THEN series_metadata.description
ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series_metadata.description)
WHEN (series.locked_fields->>'description')::boolean IS TRUE THEN series.description
ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series.description)
END,
publishers = CASE
WHEN (series_metadata.locked_fields->>'publishers')::boolean IS TRUE THEN series_metadata.publishers
WHEN (series.locked_fields->>'publishers')::boolean IS TRUE THEN series.publishers
WHEN array_length(EXCLUDED.publishers, 1) > 0 THEN EXCLUDED.publishers
ELSE series_metadata.publishers
ELSE series.publishers
END,
start_year = CASE
WHEN (series_metadata.locked_fields->>'start_year')::boolean IS TRUE THEN series_metadata.start_year
ELSE COALESCE(EXCLUDED.start_year, series_metadata.start_year)
WHEN (series.locked_fields->>'start_year')::boolean IS TRUE THEN series.start_year
ELSE COALESCE(EXCLUDED.start_year, series.start_year)
END,
total_volumes = CASE
WHEN (series_metadata.locked_fields->>'total_volumes')::boolean IS TRUE THEN series_metadata.total_volumes
ELSE COALESCE(EXCLUDED.total_volumes, series_metadata.total_volumes)
WHEN (series.locked_fields->>'total_volumes')::boolean IS TRUE THEN series.total_volumes
ELSE COALESCE(EXCLUDED.total_volumes, series.total_volumes)
END,
status = CASE
WHEN (series_metadata.locked_fields->>'status')::boolean IS TRUE THEN series_metadata.status
ELSE COALESCE(EXCLUDED.status, series_metadata.status)
WHEN (series.locked_fields->>'status')::boolean IS TRUE THEN series.status
ELSE COALESCE(EXCLUDED.status, series.status)
END,
authors = CASE
WHEN (series_metadata.locked_fields->>'authors')::boolean IS TRUE THEN series_metadata.authors
WHEN (series.locked_fields->>'authors')::boolean IS TRUE THEN series.authors
WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors
ELSE series_metadata.authors
ELSE series.authors
END,
updated_at = NOW()
"#,
@@ -944,13 +946,14 @@ async fn sync_books_from_provider(
// Pre-fetch local books
let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as(
r#"
SELECT id, volume, title FROM books
WHERE library_id = $1
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2
ORDER BY volume NULLS LAST,
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
title ASC
SELECT b.id, b.volume, b.title FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
AND COALESCE(s.name, 'unclassified') = $2
ORDER BY b.volume NULLS LAST,
REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
b.title ASC
"#,
)
.bind(library_id)

View File

@@ -95,8 +95,8 @@ pub async fn start_refresh(
let link_count: i64 = sqlx::query_scalar(
r#"
SELECT COUNT(*) FROM external_metadata_links eml
LEFT JOIN series_metadata sm
ON sm.library_id = eml.library_id AND sm.name = eml.series_name
LEFT JOIN series sm
ON sm.library_id = eml.library_id AND sm.id = eml.series_id
WHERE eml.library_id = $1
AND eml.status = 'approved'
AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled')
@@ -188,8 +188,8 @@ pub async fn start_refresh(
let link_count: i64 = sqlx::query_scalar(
r#"
SELECT COUNT(*) FROM external_metadata_links eml
LEFT JOIN series_metadata sm
ON sm.library_id = eml.library_id AND sm.name = eml.series_name
LEFT JOIN series sm
ON sm.library_id = eml.library_id AND sm.id = eml.series_id
WHERE eml.library_id = $1
AND eml.status = 'approved'
AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled')
@@ -357,14 +357,14 @@ pub(crate) async fn process_metadata_refresh(
// Get approved links for this library, only for ongoing series (not ended/cancelled)
let links: Vec<(Uuid, String, String, String)> = sqlx::query_as(
r#"
SELECT eml.id, eml.series_name, eml.provider, eml.external_id
SELECT eml.id, sm.name AS series_name, eml.provider, eml.external_id
FROM external_metadata_links eml
LEFT JOIN series_metadata sm
ON sm.library_id = eml.library_id AND sm.name = eml.series_name
JOIN series sm
ON sm.id = eml.series_id
WHERE eml.library_id = $1
AND eml.status = 'approved'
AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled')
ORDER BY eml.series_name
ORDER BY sm.name
"#,
)
.bind(library_id)
@@ -541,13 +541,14 @@ pub(crate) async fn refresh_link(
// Pre-fetch local books
let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as(
r#"
SELECT id, volume, title FROM books
WHERE library_id = $1
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2
ORDER BY volume NULLS LAST,
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
title ASC
SELECT b.id, b.volume, b.title FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
AND COALESCE(s.name, 'unclassified') = $2
ORDER BY b.volume NULLS LAST,
REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
b.title ASC
"#,
)
.bind(library_id)
@@ -741,7 +742,7 @@ async fn sync_series_with_diff(
// Fetch existing series metadata for diffing
let existing = sqlx::query(
r#"SELECT description, publishers, start_year, total_volumes, status, authors, locked_fields
FROM series_metadata WHERE library_id = $1 AND name = $2"#,
FROM series WHERE library_id = $1 AND name = $2"#,
)
.bind(library_id)
.bind(series_name)
@@ -800,35 +801,35 @@ async fn sync_series_with_diff(
// Now do the actual upsert
sqlx::query(
r#"
INSERT INTO series_metadata (library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
INSERT INTO series (id, library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
VALUES (gen_random_uuid(), $1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
ON CONFLICT (library_id, name)
DO UPDATE SET
description = CASE
WHEN (series_metadata.locked_fields->>'description')::boolean IS TRUE THEN series_metadata.description
ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series_metadata.description)
WHEN (series.locked_fields->>'description')::boolean IS TRUE THEN series.description
ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series.description)
END,
publishers = CASE
WHEN (series_metadata.locked_fields->>'publishers')::boolean IS TRUE THEN series_metadata.publishers
WHEN (series.locked_fields->>'publishers')::boolean IS TRUE THEN series.publishers
WHEN array_length(EXCLUDED.publishers, 1) > 0 THEN EXCLUDED.publishers
ELSE series_metadata.publishers
ELSE series.publishers
END,
start_year = CASE
WHEN (series_metadata.locked_fields->>'start_year')::boolean IS TRUE THEN series_metadata.start_year
ELSE COALESCE(EXCLUDED.start_year, series_metadata.start_year)
WHEN (series.locked_fields->>'start_year')::boolean IS TRUE THEN series.start_year
ELSE COALESCE(EXCLUDED.start_year, series.start_year)
END,
total_volumes = CASE
WHEN (series_metadata.locked_fields->>'total_volumes')::boolean IS TRUE THEN series_metadata.total_volumes
ELSE COALESCE(EXCLUDED.total_volumes, series_metadata.total_volumes)
WHEN (series.locked_fields->>'total_volumes')::boolean IS TRUE THEN series.total_volumes
ELSE COALESCE(EXCLUDED.total_volumes, series.total_volumes)
END,
status = CASE
WHEN (series_metadata.locked_fields->>'status')::boolean IS TRUE THEN series_metadata.status
ELSE COALESCE(EXCLUDED.status, series_metadata.status)
WHEN (series.locked_fields->>'status')::boolean IS TRUE THEN series.status
ELSE COALESCE(EXCLUDED.status, series.status)
END,
authors = CASE
WHEN (series_metadata.locked_fields->>'authors')::boolean IS TRUE THEN series_metadata.authors
WHEN (series.locked_fields->>'authors')::boolean IS TRUE THEN series.authors
WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors
ELSE series_metadata.authors
ELSE series.authors
END,
updated_at = NOW()
"#,
@@ -967,7 +968,7 @@ pub async fn rematch_unlinked_books(pool: &PgPool, library_id: Uuid) -> Result<i
FROM external_book_metadata ebm2
JOIN external_metadata_links eml ON eml.id = ebm2.link_id
JOIN books b ON b.library_id = eml.library_id
AND LOWER(COALESCE(NULLIF(b.series, ''), 'unclassified')) = LOWER(eml.series_name)
AND b.series_id = eml.series_id
AND b.volume = ebm2.volume_number
WHERE eml.library_id = $1
AND ebm2.book_id IS NULL

View File

@@ -211,9 +211,9 @@ pub async fn mark_series_read(
}
let series_filter = if body.series == "unclassified" {
"(series IS NULL OR series = '')"
"series_id IS NULL"
} else {
"series = $1"
"series_id = $1"
};
let sql = if body.status == "unread" {
@@ -276,9 +276,10 @@ pub async fn mark_series_read(
.execute(&state.pool)
.await?
} else {
// $1 = series, $2 = user_id
// $1 = series_id (UUID), $2 = user_id
let series_uuid: Uuid = body.series.parse().map_err(|_| ApiError::bad_request("invalid series id"))?;
sqlx::query(&sql)
.bind(&body.series)
.bind(series_uuid)
.bind(auth_user.user_id)
.execute(&state.pool)
.await?

View File

@@ -356,9 +356,10 @@ pub(crate) async fn process_reading_status_match(
let series_names: Vec<String> = sqlx::query_scalar(
r#"
SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')
FROM books
WHERE library_id = $1
SELECT DISTINCT COALESCE(s.name, 'unclassified')
FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
ORDER BY 1
"#,
)

View File

@@ -392,16 +392,18 @@ pub async fn process_reading_status_push(
SELECT 1
FROM book_reading_progress brp
JOIN books b2 ON b2.id = brp.book_id
LEFT JOIN series s2 ON s2.id = b2.series_id
WHERE b2.library_id = asl.library_id
AND COALESCE(NULLIF(b2.series, ''), 'unclassified') = asl.series_name
AND COALESCE(s2.name, 'unclassified') = asl.series_name
AND brp.user_id = $2
AND brp.updated_at > asl.synced_at
)
OR EXISTS (
SELECT 1
FROM books b2
LEFT JOIN series s2 ON s2.id = b2.series_id
WHERE b2.library_id = asl.library_id
AND COALESCE(NULLIF(b2.series, ''), 'unclassified') = asl.series_name
AND COALESCE(s2.name, 'unclassified') = asl.series_name
AND b2.created_at > asl.synced_at
)
)
@@ -464,10 +466,11 @@ pub async fn process_reading_status_push(
COUNT(b.id) AS total_books,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read
FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp
ON brp.book_id = b.id AND brp.user_id = $3
WHERE b.library_id = $1
AND COALESCE(NULLIF(b.series, ''), 'unclassified') = $2
AND COALESCE(s.name, 'unclassified') = $2
"#,
)
.bind(library_id)

View File

@@ -77,17 +77,15 @@ pub async fn search_books(
let books_sql = r#"
SELECT b.id, b.library_id, b.kind, b.title,
COALESCE(b.authors, CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END) as authors,
b.series, b.volume, b.language
s.name AS series, b.volume, b.language
FROM books b
LEFT JOIN series_metadata sm
ON sm.library_id = b.library_id
AND sm.name = COALESCE(NULLIF(b.series, ''), 'unclassified')
LEFT JOIN series s ON s.id = b.series_id
WHERE (
b.title ILIKE $1
OR b.series ILIKE $1
OR s.name ILIKE $1
OR EXISTS (SELECT 1 FROM unnest(
COALESCE(b.authors, CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END)
|| COALESCE(sm.authors, ARRAY[]::text[])
|| COALESCE(s.authors, ARRAY[]::text[])
) AS a WHERE a ILIKE $1)
)
AND ($2::uuid IS NULL OR b.library_id = $2)
@@ -101,18 +99,19 @@ pub async fn search_books(
let series_sql = r#"
WITH sorted_books AS (
SELECT
library_id,
COALESCE(NULLIF(series, ''), 'unclassified') as name,
id,
b.library_id,
COALESCE(s.name, 'unclassified') as name,
b.id,
ROW_NUMBER() OVER (
PARTITION BY library_id, COALESCE(NULLIF(series, ''), 'unclassified')
PARTITION BY b.library_id, COALESCE(s.name, 'unclassified')
ORDER BY
REGEXP_REPLACE(LOWER(title), '[0-9]+', '', 'g'),
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
title ASC
REGEXP_REPLACE(LOWER(b.title), '[0-9]+', '', 'g'),
COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
b.title ASC
) as rn
FROM books
WHERE ($2::uuid IS NULL OR library_id = $2)
FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE ($2::uuid IS NULL OR b.library_id = $2)
),
series_counts AS (
SELECT

View File

@@ -7,9 +7,73 @@ use utoipa::ToSchema;
use crate::{auth::AuthUser, books::BookItem, error::ApiError, state::AppState};
// ─── Helper functions ────────────────────────────────────────────────────────
/// Resolve a series UUID from library_id + name. Returns NotFound if no such series exists.
pub(crate) async fn resolve_series_id(
pool: &sqlx::PgPool,
library_id: Uuid,
name: &str,
) -> Result<Uuid, ApiError> {
sqlx::query_scalar::<_, Uuid>(
"SELECT id FROM series WHERE library_id = $1 AND LOWER(name) = LOWER($2)"
)
.bind(library_id)
.bind(name)
.fetch_optional(pool)
.await?
.ok_or_else(|| ApiError::not_found(format!("series '{}' not found", name)))
}
/// Get or create a series row, returning its UUID.
pub(crate) async fn get_or_create_series(
pool: &sqlx::PgPool,
library_id: Uuid,
name: &str,
) -> Result<Uuid, ApiError> {
// Try to find existing first
if let Some(id) = sqlx::query_scalar::<_, Uuid>(
"SELECT id FROM series WHERE library_id = $1 AND LOWER(name) = LOWER($2)"
)
.bind(library_id)
.bind(name)
.fetch_optional(pool)
.await?
{
return Ok(id);
}
// Create new
let id = Uuid::new_v4();
sqlx::query(
"INSERT INTO series (id, library_id, name) VALUES ($1, $2, $3) \
ON CONFLICT (library_id, name) DO UPDATE SET name = EXCLUDED.name \
RETURNING id"
)
.bind(id)
.bind(library_id)
.bind(name)
.execute(pool)
.await?;
// Re-fetch in case of conflict (ON CONFLICT won't return the existing id via execute)
sqlx::query_scalar::<_, Uuid>(
"SELECT id FROM series WHERE library_id = $1 AND LOWER(name) = LOWER($2)"
)
.bind(library_id)
.bind(name)
.fetch_one(pool)
.await
.map_err(Into::into)
}
// ─── Structs ─────────────────────────────────────────────────────────────────
#[derive(Serialize, ToSchema)]
pub struct SeriesItem {
pub name: String,
#[schema(value_type = String)]
pub series_id: Uuid,
pub book_count: i64,
pub books_read_count: i64,
#[schema(value_type = String)]
@@ -98,7 +162,7 @@ pub async fn list_series(
let mut p: usize = 1;
let q_cond = if query.q.is_some() {
p += 1; format!("AND sc.name ILIKE ${p}")
p += 1; format!("AND s.name ILIKE ${p}")
} else { String::new() };
let count_rs_cond = if reading_statuses.is_some() {
@@ -106,7 +170,7 @@ pub async fn list_series(
} else { String::new() };
let ss_cond = if query.series_status.is_some() {
p += 1; format!("AND LOWER(sm.status) = ${p}")
p += 1; format!("AND LOWER(s.status) = ${p}")
} else { String::new() };
let missing_cond = if has_missing {
@@ -126,45 +190,43 @@ pub async fn list_series(
let missing_cte = r#"
missing_counts AS (
SELECT eml.series_name,
SELECT eml.series_id,
COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count
FROM external_metadata_links eml
JOIN external_book_metadata ebm ON ebm.link_id = eml.id
WHERE eml.library_id = $1 AND eml.status = 'approved'
GROUP BY eml.series_name
GROUP BY eml.series_id
)
"#.to_string();
let metadata_links_cte = r#"
metadata_links AS (
SELECT DISTINCT ON (eml.series_name, eml.library_id)
eml.series_name, eml.library_id, eml.provider
SELECT DISTINCT ON (eml.series_id, eml.library_id)
eml.series_id, eml.library_id, eml.provider
FROM external_metadata_links eml
WHERE eml.status = 'approved'
ORDER BY eml.series_name, eml.library_id, eml.created_at DESC
ORDER BY eml.series_id, eml.library_id, eml.created_at DESC
)
"#;
let count_sql = format!(
r#"
WITH sorted_books AS (
SELECT COALESCE(NULLIF(series, ''), 'unclassified') as name, id
FROM books WHERE library_id = $1
),
series_counts AS (
SELECT sb.name,
COUNT(*) as book_count,
WITH series_counts AS (
SELECT s.id as series_id, s.name,
COUNT(b.id) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count
FROM sorted_books sb
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
GROUP BY sb.name
FROM series s
LEFT JOIN books b ON b.series_id = s.id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
WHERE s.library_id = $1
GROUP BY s.id, s.name
),
{missing_cte},
{metadata_links_cte}
SELECT COUNT(*) FROM series_counts sc
LEFT JOIN series_metadata sm ON sm.library_id = $1 AND sm.name = sc.name
LEFT JOIN missing_counts mc ON mc.series_name = sc.name
LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = $1
LEFT JOIN series s ON s.id = sc.series_id
LEFT JOIN missing_counts mc ON mc.series_id = sc.series_id
LEFT JOIN metadata_links ml ON ml.series_id = sc.series_id AND ml.library_id = $1
WHERE TRUE {q_cond} {count_rs_cond} {ss_cond} {missing_cond} {metadata_provider_cond}
"#
);
@@ -173,46 +235,50 @@ pub async fn list_series(
r#"
WITH sorted_books AS (
SELECT
COALESCE(NULLIF(series, ''), 'unclassified') as name,
id,
b.series_id,
b.id,
ROW_NUMBER() OVER (
PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified')
PARTITION BY b.series_id
ORDER BY
volume NULLS LAST,
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
title ASC
b.volume NULLS LAST,
REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
b.title ASC
) as rn
FROM books
WHERE library_id = $1
FROM books b
WHERE b.library_id = $1
),
series_counts AS (
SELECT
sb.name,
COUNT(*) as book_count,
s.id as series_id,
s.name,
COUNT(b.id) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count
FROM sorted_books sb
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
GROUP BY sb.name
FROM series s
LEFT JOIN books b ON b.series_id = s.id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
WHERE s.library_id = $1
GROUP BY s.id, s.name
),
{missing_cte},
{metadata_links_cte}
SELECT
sc.name,
sc.series_id,
sc.book_count,
sc.books_read_count,
sb.id as first_book_id,
sm.status as series_status,
s.status as series_status,
mc.missing_count,
ml.provider as metadata_provider,
asl.anilist_id,
asl.anilist_url
FROM series_counts sc
JOIN sorted_books sb ON sb.name = sc.name AND sb.rn = 1
LEFT JOIN series_metadata sm ON sm.library_id = $1 AND sm.name = sc.name
LEFT JOIN missing_counts mc ON mc.series_name = sc.name
LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = $1
LEFT JOIN anilist_series_links asl ON asl.library_id = $1 AND asl.series_name = sc.name AND asl.provider = 'anilist'
JOIN sorted_books sb ON sb.series_id = sc.series_id AND sb.rn = 1
LEFT JOIN series s ON s.id = sc.series_id
LEFT JOIN missing_counts mc ON mc.series_id = sc.series_id
LEFT JOIN metadata_links ml ON ml.series_id = sc.series_id AND ml.library_id = $1
LEFT JOIN anilist_series_links asl ON asl.series_id = sc.series_id AND asl.provider = 'anilist'
WHERE TRUE
{q_cond}
{count_rs_cond}
@@ -267,6 +333,7 @@ pub async fn list_series(
.iter()
.map(|row| SeriesItem {
name: row.get("name"),
series_id: row.get("series_id"),
book_count: row.get("book_count"),
books_read_count: row.get("books_read_count"),
first_book_id: row.get("first_book_id"),
@@ -304,7 +371,7 @@ pub struct ListAllSeriesQuery {
/// Filter by metadata provider: a provider name (e.g. "google_books"), "linked" (any provider), or "unlinked" (no provider)
#[schema(value_type = Option<String>, example = "google_books")]
pub metadata_provider: Option<String>,
/// Filter by author name (matches in series_metadata.authors or book-level authors)
/// Filter by author name (matches in series.authors or book-level authors)
#[schema(value_type = Option<String>, example = "Toriyama")]
pub author: Option<String>,
#[schema(value_type = Option<i64>, example = 1)]
@@ -326,7 +393,7 @@ pub struct ListAllSeriesQuery {
("library_id" = Option<String>, Query, description = "Filter by library ID"),
("reading_status" = Option<String>, Query, description = "Filter by reading status, comma-separated (e.g. 'unread,reading')"),
("metadata_provider" = Option<String>, Query, description = "Filter by metadata provider: a provider name (e.g. 'google_books'), 'linked' (any provider), or 'unlinked' (no provider)"),
("author" = Option<String>, Query, description = "Filter by author name (matches in series_metadata.authors or book-level authors)"),
("author" = Option<String>, Query, description = "Filter by author name (matches in series.authors or book-level authors)"),
("page" = Option<i64>, Query, description = "Page number (1-indexed, default 1)"),
("limit" = Option<i64>, Query, description = "Items per page (max 200, default 50)"),
("sort" = Option<String>, Query, description = "Sort order: 'title' (default) or 'latest' (most recently added first)"),
@@ -362,13 +429,13 @@ pub async fn list_all_series(
let mut p: usize = 0;
let lib_cond = if query.library_id.is_some() {
p += 1; format!("WHERE library_id = ${p}")
p += 1; format!("WHERE s.library_id = ${p}")
} else {
"WHERE TRUE".to_string()
};
let q_cond = if query.q.is_some() {
p += 1; format!("AND sc.name ILIKE ${p}")
p += 1; format!("AND s.name ILIKE ${p}")
} else { String::new() };
let rs_cond = if reading_statuses.is_some() {
@@ -376,7 +443,7 @@ pub async fn list_all_series(
} else { String::new() };
let ss_cond = if query.series_status.is_some() {
p += 1; format!("AND LOWER(sm.status) = ${p}")
p += 1; format!("AND LOWER(s.status) = ${p}")
} else { String::new() };
let missing_cond = if has_missing {
@@ -391,41 +458,41 @@ pub async fn list_all_series(
};
let author_cond = if query.author.is_some() {
p += 1; format!("AND (${p} = ANY(sm.authors) OR EXISTS (SELECT 1 FROM books bk WHERE bk.series = sc.name AND bk.library_id = sc.library_id AND ${p} = ANY(COALESCE(NULLIF(bk.authors, '{{}}'), CASE WHEN bk.author IS NOT NULL AND bk.author != '' THEN ARRAY[bk.author] ELSE ARRAY[]::text[] END))))")
p += 1; format!("AND (${p} = ANY(s.authors) OR EXISTS (SELECT 1 FROM books bk WHERE bk.series_id = s.id AND ${p} = ANY(COALESCE(NULLIF(bk.authors, '{{}}'), CASE WHEN bk.author IS NOT NULL AND bk.author != '' THEN ARRAY[bk.author] ELSE ARRAY[]::text[] END))))")
} else { String::new() };
// Missing counts CTE — needs library_id filter when filtering by library
let missing_cte = if query.library_id.is_some() {
r#"
missing_counts AS (
SELECT eml.series_name, eml.library_id,
SELECT eml.series_id,
COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count
FROM external_metadata_links eml
JOIN external_book_metadata ebm ON ebm.link_id = eml.id
WHERE eml.library_id = $1 AND eml.status = 'approved'
GROUP BY eml.series_name, eml.library_id
GROUP BY eml.series_id
)
"#.to_string()
} else {
r#"
missing_counts AS (
SELECT eml.series_name, eml.library_id,
SELECT eml.series_id,
COUNT(ebm.id) FILTER (WHERE ebm.book_id IS NULL) as missing_count
FROM external_metadata_links eml
JOIN external_book_metadata ebm ON ebm.link_id = eml.id
WHERE eml.status = 'approved'
GROUP BY eml.series_name, eml.library_id
GROUP BY eml.series_id
)
"#.to_string()
};
let metadata_links_cte = r#"
metadata_links AS (
SELECT DISTINCT ON (eml.series_name, eml.library_id)
eml.series_name, eml.library_id, eml.provider
SELECT DISTINCT ON (eml.series_id, eml.library_id)
eml.series_id, eml.library_id, eml.provider
FROM external_metadata_links eml
WHERE eml.status = 'approved'
ORDER BY eml.series_name, eml.library_id, eml.created_at DESC
ORDER BY eml.series_id, eml.library_id, eml.created_at DESC
)
"#;
@@ -435,24 +502,22 @@ pub async fn list_all_series(
let count_sql = format!(
r#"
WITH sorted_books AS (
SELECT COALESCE(NULLIF(series, ''), 'unclassified') as name, id, library_id
FROM books {lib_cond}
),
series_counts AS (
SELECT sb.name, sb.library_id,
COUNT(*) as book_count,
WITH series_counts AS (
SELECT s.id as series_id, s.name, s.library_id,
COUNT(b.id) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count
FROM sorted_books sb
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
GROUP BY sb.name, sb.library_id
FROM series s
LEFT JOIN books b ON b.series_id = s.id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
{lib_cond}
GROUP BY s.id, s.name, s.library_id
),
{missing_cte},
{metadata_links_cte}
SELECT COUNT(*) FROM series_counts sc
LEFT JOIN series_metadata sm ON sm.library_id = sc.library_id AND sm.name = sc.name
LEFT JOIN missing_counts mc ON mc.series_name = sc.name AND mc.library_id = sc.library_id
LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = sc.library_id
LEFT JOIN series s ON s.id = sc.series_id
LEFT JOIN missing_counts mc ON mc.series_id = sc.series_id
LEFT JOIN metadata_links ml ON ml.series_id = sc.series_id AND ml.library_id = sc.library_id
WHERE TRUE {q_cond} {rs_cond} {ss_cond} {missing_cond} {metadata_provider_cond} {author_cond}
"#
);
@@ -467,51 +532,56 @@ pub async fn list_all_series(
r#"
WITH sorted_books AS (
SELECT
COALESCE(NULLIF(series, ''), 'unclassified') as name,
id,
library_id,
created_at,
b.series_id,
b.id,
b.library_id,
b.created_at,
ROW_NUMBER() OVER (
PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified')
PARTITION BY b.series_id
ORDER BY
volume NULLS LAST,
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
title ASC
b.volume NULLS LAST,
REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
b.title ASC
) as rn
FROM books
FROM books b
JOIN series s ON s.id = b.series_id
{lib_cond}
),
series_counts AS (
SELECT
sb.name,
sb.library_id,
COUNT(*) as book_count,
s.id as series_id,
s.name,
s.library_id,
COUNT(b.id) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count,
MAX(sb.created_at) as latest_created_at
FROM sorted_books sb
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
GROUP BY sb.name, sb.library_id
MAX(b.created_at) as latest_created_at
FROM series s
LEFT JOIN books b ON b.series_id = s.id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
{lib_cond}
GROUP BY s.id, s.name, s.library_id
),
{missing_cte},
{metadata_links_cte}
SELECT
sc.name,
sc.series_id,
sc.book_count,
sc.books_read_count,
sb.id as first_book_id,
sb.library_id,
sm.status as series_status,
sc.library_id,
s.status as series_status,
mc.missing_count,
ml.provider as metadata_provider,
asl.anilist_id,
asl.anilist_url
FROM series_counts sc
JOIN sorted_books sb ON sb.name = sc.name AND sb.rn = 1
LEFT JOIN series_metadata sm ON sm.library_id = sc.library_id AND sm.name = sc.name
LEFT JOIN missing_counts mc ON mc.series_name = sc.name AND mc.library_id = sc.library_id
LEFT JOIN metadata_links ml ON ml.series_name = sc.name AND ml.library_id = sc.library_id
LEFT JOIN anilist_series_links asl ON asl.library_id = sc.library_id AND asl.series_name = sc.name AND asl.provider = 'anilist'
JOIN sorted_books sb ON sb.series_id = sc.series_id AND sb.rn = 1
LEFT JOIN series s ON s.id = sc.series_id
LEFT JOIN missing_counts mc ON mc.series_id = sc.series_id
LEFT JOIN metadata_links ml ON ml.series_id = sc.series_id AND ml.library_id = sc.library_id
LEFT JOIN anilist_series_links asl ON asl.series_id = sc.series_id AND asl.provider = 'anilist'
WHERE TRUE
{q_cond}
{rs_cond}
@@ -569,6 +639,7 @@ pub async fn list_all_series(
.iter()
.map(|row| SeriesItem {
name: row.get("name"),
series_id: row.get("series_id"),
book_count: row.get("book_count"),
books_read_count: row.get("books_read_count"),
first_book_id: row.get("first_book_id"),
@@ -605,7 +676,7 @@ pub async fn series_statuses(
) -> Result<Json<Vec<String>>, ApiError> {
let rows: Vec<String> = sqlx::query_scalar(
r#"SELECT DISTINCT s FROM (
SELECT LOWER(status) AS s FROM series_metadata WHERE status IS NOT NULL
SELECT LOWER(status) AS s FROM series WHERE status IS NOT NULL
UNION
SELECT mapped_status AS s FROM status_mappings WHERE mapped_status IS NOT NULL
) t ORDER BY s"#,
@@ -673,13 +744,16 @@ pub async fn ongoing_series(
r#"
WITH series_stats AS (
SELECT
COALESCE(NULLIF(b.series, ''), 'unclassified') AS name,
s.id AS series_id,
s.name,
s.library_id,
COUNT(*) AS book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read_count,
MAX(brp.last_read_at) AS last_read_at
FROM books b
FROM series s
JOIN books b ON b.series_id = s.id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2
GROUP BY COALESCE(NULLIF(b.series, ''), 'unclassified')
GROUP BY s.id, s.name, s.library_id
HAVING (
COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0
AND COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') < COUNT(*)
@@ -687,22 +761,22 @@ pub async fn ongoing_series(
),
first_books AS (
SELECT
COALESCE(NULLIF(series, ''), 'unclassified') AS name,
id,
library_id,
b.series_id,
b.id,
b.library_id,
ROW_NUMBER() OVER (
PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified')
PARTITION BY b.series_id
ORDER BY
volume NULLS LAST,
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
title ASC
b.volume NULLS LAST,
REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int, 0),
b.title ASC
) AS rn
FROM books
FROM books b
)
SELECT ss.name, ss.book_count, ss.books_read_count, fb.id AS first_book_id, fb.library_id
SELECT ss.name, ss.series_id, ss.book_count, ss.books_read_count, fb.id AS first_book_id, fb.library_id
FROM series_stats ss
JOIN first_books fb ON fb.name = ss.name AND fb.rn = 1
JOIN first_books fb ON fb.series_id = ss.series_id AND fb.rn = 1
ORDER BY ss.last_read_at DESC NULLS LAST
LIMIT $1
"#,
@@ -716,6 +790,7 @@ pub async fn ongoing_series(
.iter()
.map(|row| SeriesItem {
name: row.get("name"),
series_id: row.get("series_id"),
book_count: row.get("book_count"),
books_read_count: row.get("books_read_count"),
first_book_id: row.get("first_book_id"),
@@ -757,11 +832,12 @@ pub async fn ongoing_books(
r#"
WITH ongoing_series AS (
SELECT
COALESCE(NULLIF(b.series, ''), 'unclassified') AS name,
s.id AS series_id,
MAX(brp.last_read_at) AS series_last_read_at
FROM books b
FROM series s
JOIN books b ON b.series_id = s.id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2
GROUP BY COALESCE(NULLIF(b.series, ''), 'unclassified')
GROUP BY s.id
HAVING (
COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0
AND COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') < COUNT(*)
@@ -769,18 +845,19 @@ pub async fn ongoing_books(
),
next_books AS (
SELECT
b.id, b.library_id, b.kind, b.format, b.title, b.author, b.authors, b.series, b.volume,
b.id, b.library_id, b.kind, b.format, b.title, b.author, b.authors, s.name AS series, b.volume,
b.language, b.page_count, b.thumbnail_path, b.updated_at,
COALESCE(brp.status, 'unread') AS reading_status,
brp.current_page AS reading_current_page,
brp.last_read_at AS reading_last_read_at,
os.series_last_read_at,
ROW_NUMBER() OVER (
PARTITION BY COALESCE(NULLIF(b.series, ''), 'unclassified')
PARTITION BY b.series_id
ORDER BY b.volume NULLS LAST, b.title
) AS rn
FROM books b
JOIN ongoing_series os ON COALESCE(NULLIF(b.series, ''), 'unclassified') = os.name
JOIN ongoing_series os ON b.series_id = os.series_id
JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2
WHERE COALESCE(brp.status, 'unread') != 'read'
)
@@ -847,54 +924,51 @@ pub struct SeriesMetadata {
/// Get metadata for a specific series
#[utoipa::path(
get,
path = "/libraries/{library_id}/series/{name}/metadata",
path = "/libraries/{library_id}/series/{series_id}/metadata",
tag = "series",
params(
("library_id" = String, Path, description = "Library UUID"),
("name" = String, Path, description = "Series name"),
("series_id" = String, Path, description = "Series UUID"),
),
responses(
(status = 200, body = SeriesMetadata),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Series not found"),
),
security(("Bearer" = []))
)]
pub async fn get_series_metadata(
State(state): State<AppState>,
Path((library_id, name)): Path<(Uuid, String)>,
Path((library_id, series_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<SeriesMetadata>, ApiError> {
// author/language from first book of series
let books_row = if name == "unclassified" {
sqlx::query("SELECT author, language FROM books WHERE library_id = $1 AND (series IS NULL OR series = '') LIMIT 1")
.bind(library_id)
.fetch_optional(&state.pool)
.await?
} else {
sqlx::query("SELECT author, language FROM books WHERE library_id = $1 AND series = $2 LIMIT 1")
.bind(library_id)
.bind(&name)
.fetch_optional(&state.pool)
.await?
};
let meta_row = sqlx::query(
"SELECT authors, description, publishers, start_year, total_volumes, status, locked_fields FROM series_metadata WHERE library_id = $1 AND name = $2"
// Fetch series row (contains metadata directly)
let series_row = sqlx::query(
"SELECT authors, description, publishers, start_year, total_volumes, status, locked_fields, book_author, book_language \
FROM series WHERE id = $1 AND library_id = $2"
)
.bind(series_id)
.bind(library_id)
.bind(&name)
.fetch_optional(&state.pool)
.await?;
// Fallback: get book_author/book_language from first book if not on series row
let books_row = sqlx::query("SELECT author, language FROM books WHERE series_id = $1 LIMIT 1")
.bind(series_id)
.fetch_optional(&state.pool)
.await?;
Ok(Json(SeriesMetadata {
authors: meta_row.as_ref().map(|r| r.get::<Vec<String>, _>("authors")).unwrap_or_default(),
description: meta_row.as_ref().and_then(|r| r.get("description")),
publishers: meta_row.as_ref().map(|r| r.get::<Vec<String>, _>("publishers")).unwrap_or_default(),
start_year: meta_row.as_ref().and_then(|r| r.get("start_year")),
total_volumes: meta_row.as_ref().and_then(|r| r.get("total_volumes")),
status: meta_row.as_ref().and_then(|r| r.get("status")),
book_author: books_row.as_ref().and_then(|r| r.get("author")),
book_language: books_row.as_ref().and_then(|r| r.get("language")),
locked_fields: meta_row.as_ref().map(|r| r.get::<serde_json::Value, _>("locked_fields")).unwrap_or(serde_json::json!({})),
authors: series_row.as_ref().map(|r| r.get::<Vec<String>, _>("authors")).unwrap_or_default(),
description: series_row.as_ref().and_then(|r| r.get("description")),
publishers: series_row.as_ref().map(|r| r.get::<Vec<String>, _>("publishers")).unwrap_or_default(),
start_year: series_row.as_ref().and_then(|r| r.get("start_year")),
total_volumes: series_row.as_ref().and_then(|r| r.get("total_volumes")),
status: series_row.as_ref().and_then(|r| r.get("status")),
book_author: series_row.as_ref().and_then(|r| r.get::<Option<String>, _>("book_author"))
.or_else(|| books_row.as_ref().and_then(|r| r.get("author"))),
book_language: series_row.as_ref().and_then(|r| r.get::<Option<String>, _>("book_language"))
.or_else(|| books_row.as_ref().and_then(|r| r.get("language"))),
locked_fields: series_row.as_ref().map(|r| r.get::<serde_json::Value, _>("locked_fields")).unwrap_or(serde_json::json!({})),
}))
}
@@ -903,7 +977,7 @@ pub async fn get_series_metadata(
#[derive(Deserialize, ToSchema)]
pub struct UpdateSeriesRequest {
pub new_name: String,
/// Series-level authors list (stored in series_metadata)
/// Series-level authors list (stored in series)
#[serde(default)]
pub authors: Vec<String>,
/// Per-book author propagation: absent = keep books unchanged, present = overwrite all books
@@ -932,11 +1006,11 @@ pub struct UpdateSeriesResponse {
/// Update metadata for all books in a series
#[utoipa::path(
patch,
path = "/libraries/{library_id}/series/{name}",
path = "/libraries/{library_id}/series/{series_id}",
tag = "series",
params(
("library_id" = String, Path, description = "Library UUID"),
("name" = String, Path, description = "Series name (use 'unclassified' for books without series)"),
("series_id" = String, Path, description = "Series UUID"),
),
request_body = UpdateSeriesRequest,
responses(
@@ -944,18 +1018,29 @@ pub struct UpdateSeriesResponse {
(status = 400, description = "Invalid request"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden - Admin scope required"),
(status = 404, description = "Series not found"),
),
security(("Bearer" = []))
)]
pub async fn update_series(
State(state): State<AppState>,
Path((library_id, name)): Path<(Uuid, String)>,
Path((library_id, series_id)): Path<(Uuid, Uuid)>,
Json(body): Json<UpdateSeriesRequest>,
) -> Result<Json<UpdateSeriesResponse>, ApiError> {
let new_name = body.new_name.trim().to_string();
if new_name.is_empty() {
return Err(ApiError::bad_request("series name cannot be empty"));
}
// Verify the series exists
let old_row = sqlx::query("SELECT name, original_name FROM series WHERE id = $1 AND library_id = $2")
.bind(series_id)
.bind(library_id)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found("series not found"))?;
let old_name: String = old_row.get("name");
// author/language: None = absent (keep books unchanged), Some(v) = apply to all books
let apply_author = body.author.is_some();
let author_value = body.author.flatten().as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
@@ -966,92 +1051,58 @@ pub async fn update_series(
.map(|p| p.trim().to_string())
.filter(|p| !p.is_empty())
.collect();
let new_series_value: Option<String> = if new_name == "unclassified" { None } else { Some(new_name.clone()) };
// 1. Update books: always update series name; author/language only if opted-in
// $1=library_id, $2=new_series_value, $3=apply_author, $4=author_value,
// $5=apply_language, $6=language_value, [$7=old_name]
let result = if name == "unclassified" {
sqlx::query(
"UPDATE books \
SET series = $2, \
author = CASE WHEN $3 THEN $4 ELSE author END, \
language = CASE WHEN $5 THEN $6 ELSE language END, \
updated_at = NOW() \
WHERE library_id = $1 AND (series IS NULL OR series = '')"
)
.bind(library_id)
.bind(&new_series_value)
.bind(apply_author)
.bind(&author_value)
.bind(apply_language)
.bind(&language_value)
.execute(&state.pool)
.await?
} else {
sqlx::query(
"UPDATE books \
SET series = $2, \
author = CASE WHEN $3 THEN $4 ELSE author END, \
language = CASE WHEN $5 THEN $6 ELSE language END, \
updated_at = NOW() \
WHERE library_id = $1 AND series = $7"
)
.bind(library_id)
.bind(&new_series_value)
.bind(apply_author)
.bind(&author_value)
.bind(apply_language)
.bind(&language_value)
.bind(&name)
.execute(&state.pool)
.await?
};
// 2. Upsert series_metadata (keyed by new_name)
let meta_name = new_series_value.as_deref().unwrap_or("unclassified");
let authors: Vec<String> = body.authors.iter()
.map(|a| a.trim().to_string())
.filter(|a| !a.is_empty())
.collect();
let locked_fields = body.locked_fields.clone().unwrap_or(serde_json::json!({}));
// When renaming, preserve the filesystem-derived original name so the scanner
// can map files back to the renamed series instead of recreating the old one.
let is_rename = name != "unclassified" && new_name != name;
// 1. Update books: author/language only if opted-in
let result = sqlx::query(
"UPDATE books \
SET author = CASE WHEN $2 THEN $3 ELSE author END, \
language = CASE WHEN $4 THEN $5 ELSE language END, \
updated_at = NOW() \
WHERE series_id = $1"
)
.bind(series_id)
.bind(apply_author)
.bind(&author_value)
.bind(apply_language)
.bind(&language_value)
.execute(&state.pool)
.await?;
// 2. Update the series row (name, metadata, original_name tracking)
let is_rename = new_name != old_name;
let original_name: Option<String> = if is_rename {
// Check if the old metadata already has an original_name (chained renames: A→B→C)
let existing_original: Option<Option<String>> = sqlx::query_scalar(
"SELECT original_name FROM series_metadata WHERE library_id = $1 AND name = $2"
)
.bind(library_id)
.bind(&name)
.fetch_optional(&state.pool)
.await?;
// Use existing original_name if set, otherwise use the old name itself
Some(existing_original.flatten().unwrap_or_else(|| name.clone()))
// Use existing original_name if set (chained renames: A->B->C), otherwise use old name
let existing_original: Option<String> = old_row.get("original_name");
Some(existing_original.unwrap_or_else(|| old_name.clone()))
} else {
None
};
sqlx::query(
r#"
INSERT INTO series_metadata (library_id, name, authors, description, publishers, start_year, total_volumes, status, locked_fields, original_name, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, NOW())
ON CONFLICT (library_id, name) DO UPDATE
SET authors = EXCLUDED.authors,
description = EXCLUDED.description,
publishers = EXCLUDED.publishers,
start_year = EXCLUDED.start_year,
total_volumes = EXCLUDED.total_volumes,
status = EXCLUDED.status,
locked_fields = EXCLUDED.locked_fields,
original_name = COALESCE(EXCLUDED.original_name, series_metadata.original_name),
updated_at = NOW()
UPDATE series
SET name = $2,
authors = $3,
description = $4,
publishers = $5,
start_year = $6,
total_volumes = $7,
status = $8,
locked_fields = $9,
book_author = CASE WHEN $10 THEN $11 ELSE book_author END,
book_language = CASE WHEN $12 THEN $13 ELSE book_language END,
original_name = COALESCE($14, original_name),
updated_at = NOW()
WHERE id = $1
"#
)
.bind(library_id)
.bind(meta_name)
.bind(series_id)
.bind(&new_name)
.bind(&authors)
.bind(&description)
.bind(&publishers)
@@ -1059,21 +1110,14 @@ pub async fn update_series(
.bind(body.total_volumes)
.bind(&body.status)
.bind(&locked_fields)
.bind(apply_author)
.bind(&author_value)
.bind(apply_language)
.bind(&language_value)
.bind(&original_name)
.execute(&state.pool)
.await?;
// 3. If renamed, delete the old series_metadata entry
if is_rename {
sqlx::query(
"DELETE FROM series_metadata WHERE library_id = $1 AND name = $2"
)
.bind(library_id)
.bind(&name)
.execute(&state.pool)
.await?;
}
Ok(Json(UpdateSeriesResponse { updated: result.rows_affected() }))
}
@@ -1081,11 +1125,11 @@ pub async fn update_series(
/// and all related metadata (external links, anilist, available downloads).
#[utoipa::path(
delete,
path = "/libraries/{library_id}/series/{name}",
path = "/libraries/{library_id}/series/{series_id}",
tag = "series",
params(
("library_id" = String, Path, description = "Library UUID"),
("name" = String, Path, description = "Series name (URL-encoded)"),
("series_id" = String, Path, description = "Series UUID"),
),
responses(
(status = 200, description = "Series deleted"),
@@ -1097,24 +1141,32 @@ pub async fn update_series(
pub async fn delete_series(
State(state): State<AppState>,
Extension(_user): Extension<AuthUser>,
Path((library_id, name)): Path<(Uuid, String)>,
Path((library_id, series_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<crate::responses::DeletedResponse>, ApiError> {
use stripstream_core::paths::remap_libraries_path;
// Verify the series exists
let series_row = sqlx::query("SELECT name FROM series WHERE id = $1 AND library_id = $2")
.bind(series_id)
.bind(library_id)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found("series not found"))?;
let series_name: String = series_row.get("name");
// Find all books in this series
let book_rows = sqlx::query(
"SELECT b.id, b.thumbnail_path, bf.abs_path \
FROM books b \
LEFT JOIN book_files bf ON bf.book_id = b.id \
WHERE b.library_id = $1 AND LOWER(COALESCE(NULLIF(b.series, ''), 'unclassified')) = LOWER($2)",
WHERE b.series_id = $1",
)
.bind(library_id)
.bind(&name)
.bind(series_id)
.fetch_all(&state.pool)
.await?;
if book_rows.is_empty() {
return Err(ApiError::not_found("series not found or has no books"));
// Series exists but has no books — still delete the series row
}
// Collect the series directory from the first book's path
@@ -1156,39 +1208,19 @@ pub async fn delete_series(
// Delete all books from DB (cascades to book_files, reading_progress, etc.)
let book_ids: Vec<Uuid> = book_rows.iter().map(|r| r.get("id")).collect();
sqlx::query("DELETE FROM books WHERE id = ANY($1)")
.bind(&book_ids)
if !book_ids.is_empty() {
sqlx::query("DELETE FROM books WHERE id = ANY($1)")
.bind(&book_ids)
.execute(&state.pool)
.await?;
}
// Delete the series row (cascades to external_metadata_links, anilist_series_links, available_downloads via FK)
sqlx::query("DELETE FROM series WHERE id = $1")
.bind(series_id)
.execute(&state.pool)
.await?;
// Delete series metadata
sqlx::query("DELETE FROM series_metadata WHERE library_id = $1 AND name = $2")
.bind(library_id)
.bind(&name)
.execute(&state.pool)
.await?;
// Delete external metadata links (cascades to external_book_metadata)
sqlx::query("DELETE FROM external_metadata_links WHERE library_id = $1 AND LOWER(series_name) = LOWER($2)")
.bind(library_id)
.bind(&name)
.execute(&state.pool)
.await?;
// Delete anilist link
let _ = sqlx::query("DELETE FROM anilist_series_links WHERE library_id = $1 AND LOWER(series_name) = LOWER($2)")
.bind(library_id)
.bind(&name)
.execute(&state.pool)
.await;
// Delete available downloads
let _ = sqlx::query("DELETE FROM available_downloads WHERE library_id = $1 AND LOWER(series_name) = LOWER($2)")
.bind(library_id)
.bind(&name)
.execute(&state.pool)
.await;
// Queue a scan job for consistency
let scan_job_id = Uuid::new_v4();
sqlx::query(
@@ -1200,9 +1232,61 @@ pub async fn delete_series(
.await?;
tracing::info!(
"[SERIES] Deleted series '{}' ({} books) from library {}, scan job {} queued",
name, book_ids.len(), library_id, scan_job_id
"[SERIES] Deleted series '{}' ({}) ({} books) from library {}, scan job {} queued",
series_name, series_id, book_ids.len(), library_id, scan_job_id
);
Ok(Json(crate::responses::DeletedResponse::new(library_id)))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn series_item_has_series_id() {
let item = SeriesItem {
name: "Dragon Ball".to_string(),
series_id: Uuid::new_v4(),
book_count: 42,
books_read_count: 10,
first_book_id: Uuid::new_v4(),
library_id: Uuid::new_v4(),
series_status: Some("ended".to_string()),
missing_count: Some(0),
metadata_provider: None,
anilist_id: None,
anilist_url: None,
};
let json = serde_json::to_value(&item).unwrap();
assert!(json["series_id"].is_string());
assert_eq!(json["name"], "Dragon Ball");
assert_eq!(json["book_count"], 42);
}
#[test]
fn series_metadata_serializes() {
let meta = SeriesMetadata {
description: Some("A ninja story".to_string()),
authors: vec!["Kishimoto".to_string()],
publishers: vec![],
book_author: None,
book_language: None,
start_year: Some(1999),
total_volumes: Some(72),
status: Some("ended".to_string()),
locked_fields: serde_json::json!({}),
};
let json = serde_json::to_value(&meta).unwrap();
assert_eq!(json["total_volumes"], 72);
assert_eq!(json["authors"][0], "Kishimoto");
assert_eq!(json["status"], "ended");
}
#[test]
fn update_series_response_serializes() {
let resp = UpdateSeriesResponse { updated: 5 };
let json = serde_json::to_value(&resp).unwrap();
assert_eq!(json["updated"], 5);
}
}

View File

@@ -167,7 +167,7 @@ pub async fn get_stats(
r#"
SELECT
COUNT(*) AS total_books,
COUNT(DISTINCT NULLIF(series, '')) AS total_series,
COUNT(DISTINCT b.series_id) AS total_series,
COUNT(DISTINCT library_id) AS total_libraries,
COALESCE(SUM(page_count), 0)::BIGINT AS total_pages,
(SELECT COUNT(DISTINCT a) FROM (
@@ -298,14 +298,15 @@ pub async fn get_stats(
let series_rows = sqlx::query(
r#"
SELECT
b.series,
s.name AS series,
COUNT(*) AS book_count,
COUNT(*) FILTER (WHERE brp.status = 'read') AS read_count,
COALESCE(SUM(b.page_count), 0)::BIGINT AS total_pages
FROM books b
JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
WHERE b.series IS NOT NULL AND b.series != ''
GROUP BY b.series
WHERE b.series_id IS NOT NULL
GROUP BY s.name
ORDER BY book_count DESC
LIMIT 10
"#,
@@ -405,8 +406,8 @@ pub async fn get_stats(
let meta_row = sqlx::query(
r#"
SELECT
(SELECT COUNT(DISTINCT NULLIF(series, '')) FROM books) AS total_series,
(SELECT COUNT(DISTINCT series_name) FROM external_metadata_links WHERE status = 'approved') AS series_linked,
(SELECT COUNT(DISTINCT series_id) FROM books WHERE series_id IS NOT NULL) AS total_series,
(SELECT COUNT(DISTINCT series_id) FROM external_metadata_links WHERE status = 'approved') AS series_linked,
(SELECT COUNT(*) FROM books WHERE summary IS NOT NULL AND summary != '') AS books_with_summary,
(SELECT COUNT(*) FROM books WHERE isbn IS NOT NULL AND isbn != '') AS books_with_isbn
"#,
@@ -419,7 +420,7 @@ pub async fn get_stats(
let provider_rows = sqlx::query(
r#"
SELECT provider, COUNT(DISTINCT series_name) AS count
SELECT provider, COUNT(DISTINCT series_id) AS count
FROM external_metadata_links
WHERE status = 'approved'
GROUP BY provider
@@ -449,9 +450,10 @@ pub async fn get_stats(
// Currently reading books
let reading_rows = sqlx::query(
r#"
SELECT b.id AS book_id, b.title, b.series, brp.current_page, b.page_count, u.username
SELECT b.id AS book_id, b.title, s.name AS series, brp.current_page, b.page_count, u.username
FROM book_reading_progress brp
JOIN books b ON b.id = brp.book_id
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN users u ON u.id = brp.user_id
WHERE brp.status = 'reading' AND brp.current_page IS NOT NULL
AND ($1::uuid IS NULL OR brp.user_id = $1)
@@ -481,11 +483,12 @@ pub async fn get_stats(
// Recently read books
let recent_rows = sqlx::query(
r#"
SELECT b.id AS book_id, b.title, b.series,
SELECT b.id AS book_id, b.title, s.name AS series,
TO_CHAR(brp.last_read_at, 'YYYY-MM-DD') AS last_read_at,
u.username
FROM book_reading_progress brp
JOIN books b ON b.id = brp.book_id
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN users u ON u.id = brp.user_id
WHERE brp.status = 'read' AND brp.last_read_at IS NOT NULL
AND ($1::uuid IS NULL OR brp.user_id = $1)

View File

@@ -664,8 +664,9 @@ async fn do_import(
"SELECT bf.abs_path, b.volume \
FROM book_files bf \
JOIN books b ON b.id = bf.book_id \
LEFT JOIN series s ON s.id = b.series_id \
WHERE b.library_id = $1 \
AND LOWER(unaccent(b.series)) = LOWER(unaccent($2)) \
AND LOWER(unaccent(s.name)) = LOWER(unaccent($2)) \
AND b.volume IS NOT NULL \
ORDER BY b.volume DESC LIMIT 1",
)

View File

@@ -9,7 +9,7 @@ pub struct BookUpdate {
pub title: String,
pub kind: String,
pub format: String,
pub series: Option<String>,
pub series_id: Option<Uuid>,
pub volume: Option<i32>,
pub page_count: Option<i32>,
}
@@ -28,7 +28,7 @@ pub struct BookInsert {
pub kind: String,
pub format: String,
pub title: String,
pub series: Option<String>,
pub series_id: Option<Uuid>,
pub volume: Option<i32>,
pub page_count: Option<i32>,
pub thumbnail_path: Option<String>,
@@ -73,7 +73,7 @@ pub async fn flush_all_batches(
let titles: Vec<String> = books_update.iter().map(|b| b.title.clone()).collect();
let kinds: Vec<String> = books_update.iter().map(|b| b.kind.clone()).collect();
let formats: Vec<String> = books_update.iter().map(|b| b.format.clone()).collect();
let series: Vec<Option<String>> = books_update.iter().map(|b| b.series.clone()).collect();
let series_ids: Vec<Option<Uuid>> = books_update.iter().map(|b| b.series_id).collect();
let volumes: Vec<Option<i32>> = books_update.iter().map(|b| b.volume).collect();
let page_counts: Vec<Option<i32>> = books_update.iter().map(|b| b.page_count).collect();
@@ -83,13 +83,13 @@ pub async fn flush_all_batches(
title = data.title,
kind = data.kind,
format = data.format,
series = data.series,
series_id = data.series_id,
volume = data.volume,
page_count = data.page_count,
updated_at = NOW()
FROM (
SELECT * FROM UNNEST($1::uuid[], $2::text[], $3::text[], $4::text[], $5::text[], $6::int[], $7::int[])
AS t(book_id, title, kind, format, series, volume, page_count)
SELECT * FROM UNNEST($1::uuid[], $2::text[], $3::text[], $4::text[], $5::uuid[], $6::int[], $7::int[])
AS t(book_id, title, kind, format, series_id, volume, page_count)
) AS data
WHERE books.id = data.book_id
"#
@@ -98,7 +98,7 @@ pub async fn flush_all_batches(
.bind(&titles)
.bind(&kinds)
.bind(&formats)
.bind(&series)
.bind(&series_ids)
.bind(&volumes)
.bind(&page_counts)
.execute(&mut *tx)
@@ -150,16 +150,16 @@ pub async fn flush_all_batches(
let kinds: Vec<String> = books_insert.iter().map(|b| b.kind.clone()).collect();
let formats: Vec<String> = books_insert.iter().map(|b| b.format.clone()).collect();
let titles: Vec<String> = books_insert.iter().map(|b| b.title.clone()).collect();
let series: Vec<Option<String>> = books_insert.iter().map(|b| b.series.clone()).collect();
let series_ids: Vec<Option<Uuid>> = books_insert.iter().map(|b| b.series_id).collect();
let volumes: Vec<Option<i32>> = books_insert.iter().map(|b| b.volume).collect();
let page_counts: Vec<Option<i32>> = books_insert.iter().map(|b| b.page_count).collect();
let thumbnail_paths: Vec<Option<String>> = books_insert.iter().map(|b| b.thumbnail_path.clone()).collect();
sqlx::query(
r#"
INSERT INTO books (id, library_id, kind, format, title, series, volume, page_count, thumbnail_path)
SELECT * FROM UNNEST($1::uuid[], $2::uuid[], $3::text[], $4::text[], $5::text[], $6::text[], $7::int[], $8::int[], $9::text[])
AS t(id, library_id, kind, format, title, series, volume, page_count, thumbnail_path)
INSERT INTO books (id, library_id, kind, format, title, series_id, volume, page_count, thumbnail_path)
SELECT * FROM UNNEST($1::uuid[], $2::uuid[], $3::text[], $4::text[], $5::text[], $6::uuid[], $7::int[], $8::int[], $9::text[])
AS t(id, library_id, kind, format, title, series_id, volume, page_count, thumbnail_path)
"#
)
.bind(&book_ids)
@@ -167,7 +167,7 @@ pub async fn flush_all_batches(
.bind(&kinds)
.bind(&formats)
.bind(&titles)
.bind(&series)
.bind(&series_ids)
.bind(&volumes)
.bind(&page_counts)
.bind(&thumbnail_paths)

View File

@@ -18,12 +18,13 @@ async fn rematch_unlinked_books(pool: &PgPool, library_id: Uuid) {
FROM external_book_metadata ebm2
JOIN external_metadata_links eml ON eml.id = ebm2.link_id
JOIN books b ON b.library_id = eml.library_id
AND LOWER(COALESCE(NULLIF(b.series, ''), 'unclassified')) = LOWER(eml.series_name)
AND b.volume = ebm2.volume_number
LEFT JOIN series s ON s.id = b.series_id
WHERE eml.library_id = $1
AND ebm2.book_id IS NULL
AND ebm2.volume_number IS NOT NULL
AND eml.status = 'approved'
AND LOWER(COALESCE(s.name, 'unclassified')) = LOWER(eml.series_name)
) matched
WHERE ebm.id = matched.ebm_id
"#,

View File

@@ -28,6 +28,42 @@ pub struct JobStats {
const BATCH_SIZE: usize = 100;
/// Look up a series by name in the local cache, or INSERT INTO series ... ON CONFLICT DO NOTHING
/// then SELECT to get the id. Updates the cache on creation.
async fn get_or_create_series_id(
pool: &sqlx::PgPool,
library_id: Uuid,
name: &str,
cache: &mut HashMap<String, Uuid>,
) -> Result<Uuid> {
// Check local cache first
if let Some(&id) = cache.get(name) {
return Ok(id);
}
// Try to insert; ON CONFLICT DO NOTHING handles races / existing rows
sqlx::query(
"INSERT INTO series (id, library_id, name) VALUES ($1, $2, $3) ON CONFLICT (library_id, name) DO NOTHING",
)
.bind(Uuid::new_v4())
.bind(library_id)
.bind(name)
.execute(pool)
.await?;
// Always SELECT to get the actual id (whether we just inserted or it already existed)
let id: Uuid = sqlx::query_scalar(
"SELECT id FROM series WHERE library_id = $1 AND name = $2",
)
.bind(library_id)
.bind(name)
.fetch_one(pool)
.await?;
cache.insert(name.to_string(), id);
Ok(id)
}
/// Phase 1 — Discovery: walk filesystem, extract metadata from filenames only (no archive I/O).
/// New books are inserted with page_count = NULL so the analyzer phase can fill them in.
/// Updated books (fingerprint changed) get page_count/thumbnail reset.
@@ -108,22 +144,31 @@ pub async fn scan_library_discovery(
HashMap::new()
};
// Track existing series names for new_series counting
let existing_series: HashSet<String> = sqlx::query_scalar(
"SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified') FROM books WHERE library_id = $1",
// Load existing series for this library: name → id
let series_rows = sqlx::query(
"SELECT id, name FROM series WHERE library_id = $1",
)
.bind(library_id)
.fetch_all(&state.pool)
.await
.unwrap_or_default()
.into_iter()
.collect();
.unwrap_or_default();
let mut series_map: HashMap<String, Uuid> = series_rows
.into_iter()
.map(|row| {
let name: String = row.get("name");
let id: Uuid = row.get("id");
(name, id)
})
.collect();
// Track existing series names for new_series counting
let existing_series: HashSet<String> = series_map.keys().cloned().collect();
let mut seen_new_series: HashSet<String> = HashSet::new();
// Load series rename mapping: original filesystem name → current DB name.
// This prevents the scanner from recreating old series after a user rename.
let rename_rows = sqlx::query(
"SELECT original_name, name FROM series_metadata WHERE library_id = $1 AND original_name IS NOT NULL",
"SELECT original_name, name FROM series WHERE library_id = $1 AND original_name IS NOT NULL",
)
.bind(library_id)
.fetch_all(&state.pool)
@@ -378,12 +423,22 @@ pub async fn scan_library_discovery(
old_fingerprint != fingerprint
);
// Resolve series name → series_id
let update_series_id = if let Some(ref series_name) = parsed.series {
Some(
get_or_create_series_id(&state.pool, library_id, series_name, &mut series_map)
.await?,
)
} else {
None
};
books_to_update.push(BookUpdate {
book_id,
title: parsed.title,
kind: utils::kind_from_format(format).to_string(),
format: format.as_str().to_string(),
series: parsed.series,
series_id: update_series_id,
volume: parsed.volume,
// Reset page_count so analyzer re-processes this book
page_count: None,
@@ -439,13 +494,23 @@ pub async fn scan_library_discovery(
stats.new_series += 1;
}
// Resolve series name → series_id
let insert_series_id = if let Some(ref series_name) = parsed.series {
Some(
get_or_create_series_id(&state.pool, library_id, series_name, &mut series_map)
.await?,
)
} else {
None
};
books_to_insert.push(BookInsert {
book_id,
library_id,
kind: utils::kind_from_format(format).to_string(),
format: format.as_str().to_string(),
title: parsed.title,
series: parsed.series,
series_id: insert_series_id,
volume: parsed.volume,
page_count: None,
thumbnail_path: None,
@@ -642,4 +707,34 @@ mod tests {
// No existing files in DB — nothing to delete anyway
assert!(!should_skip_deletions(true, 10, 0, 0));
}
#[test]
fn batch_structs_use_series_id() {
use crate::batch::{BookInsert, BookUpdate};
let series_id = Uuid::new_v4();
let book = BookInsert {
book_id: Uuid::new_v4(),
library_id: Uuid::new_v4(),
kind: "comic".to_string(),
format: "cbz".to_string(),
title: "Test".to_string(),
series_id: Some(series_id),
volume: Some(1),
page_count: None,
thumbnail_path: None,
};
assert_eq!(book.series_id, Some(series_id));
let update = BookUpdate {
book_id: Uuid::new_v4(),
title: "Test".to_string(),
kind: "comic".to_string(),
format: "cbz".to_string(),
series_id: None,
volume: None,
page_count: None,
};
assert_eq!(update.series_id, None);
}
}