feat: table series avec UUID PK — migration complète backend + frontend

Migration DB (0070 + 0071):
- Backup automatique de book_reading_progress avant migration
- Crée table series (fusion de series_metadata) avec UUID PK
- Ajoute series_id FK à books, external_metadata_links, anilist_series_links,
  available_downloads, download_detection_results
- Supprime les colonnes TEXT legacy et la table series_metadata

Backend API + Indexer:
- Toutes les queries SQL migrées vers series_id FK + JOIN series
- Routes /series/:name → /series/:series_id (UUID)
- Nouvel endpoint GET /series/by-name/:name pour lookup par nom
- match_title_volumes() factorisé entre prowlarr.rs et download_detection.rs
- Fix scheduler.rs: settings → app_settings
- OpenAPI mis à jour avec les nouveaux endpoints

Frontend:
- Routes /libraries/[id]/series/[name] → /series/[seriesId]
- Tous les composants (Edit, Delete, MarkRead, Prowlarr, Metadata,
  ReadingStatus) utilisent seriesId
- compressVolumes() pour afficher T1→3 au lieu de T1 T2 T3
- Titre release en entier (plus de truncate) dans available downloads

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-29 22:51:00 +02:00
parent 292e9bc77f
commit ccc7f375f6
38 changed files with 463 additions and 286 deletions

View File

@@ -287,11 +287,11 @@ pub async fn search_manga(
/// Get AniList link for a specific series
#[utoipa::path(
get,
path = "/anilist/series/{library_id}/{series_name}",
path = "/anilist/series/{library_id}/{series_id}",
tag = "anilist",
params(
("library_id" = String, Path, description = "Library UUID"),
("series_name" = String, Path, description = "Series name"),
("series_id" = String, Path, description = "Series UUID"),
),
responses(
(status = 200, body = AnilistSeriesLinkResponse),
@@ -302,15 +302,16 @@ pub async fn search_manga(
)]
pub async fn get_series_link(
State(state): State<AppState>,
Path((library_id, series_name)): Path<(Uuid, String)>,
Path((library_id, series_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<AnilistSeriesLinkResponse>, ApiError> {
let row = sqlx::query(
"SELECT library_id, series_name, anilist_id, anilist_title, anilist_url, status, linked_at, synced_at
FROM anilist_series_links
WHERE library_id = $1 AND series_name = $2",
"SELECT asl.library_id, s.name AS series_name, asl.anilist_id, asl.anilist_title, asl.anilist_url, asl.status, asl.linked_at, asl.synced_at
FROM anilist_series_links asl
JOIN series s ON s.id = asl.series_id
WHERE asl.library_id = $1 AND asl.series_id = $2",
)
.bind(library_id)
.bind(&series_name)
.bind(series_id)
.fetch_optional(&state.pool)
.await?;
@@ -331,11 +332,11 @@ pub async fn get_series_link(
/// Link a series to an AniList media ID
#[utoipa::path(
post,
path = "/anilist/series/{library_id}/{series_name}/link",
path = "/anilist/series/{library_id}/{series_id}/link",
tag = "anilist",
params(
("library_id" = String, Path, description = "Library UUID"),
("series_name" = String, Path, description = "Series name"),
("series_id" = String, Path, description = "Series UUID"),
),
request_body = AnilistLinkRequest,
responses(
@@ -346,7 +347,7 @@ pub async fn get_series_link(
)]
pub async fn link_series(
State(state): State<AppState>,
Path((library_id, series_name)): Path<(Uuid, String)>,
Path((library_id, series_id)): Path<(Uuid, Uuid)>,
Json(body): Json<AnilistLinkRequest>,
) -> Result<Json<AnilistSeriesLinkResponse>, ApiError> {
// Try to fetch title/url from AniList if not provided
@@ -382,29 +383,36 @@ pub async fn link_series(
let row = sqlx::query(
r#"
INSERT INTO anilist_series_links (library_id, series_name, provider, anilist_id, anilist_title, anilist_url, status, linked_at)
INSERT INTO anilist_series_links (library_id, series_id, provider, anilist_id, anilist_title, anilist_url, status, linked_at)
VALUES ($1, $2, 'anilist', $3, $4, $5, 'linked', NOW())
ON CONFLICT (library_id, series_name, provider) DO UPDATE
ON CONFLICT (series_id, provider) DO UPDATE
SET anilist_id = EXCLUDED.anilist_id,
anilist_title = EXCLUDED.anilist_title,
anilist_url = EXCLUDED.anilist_url,
status = 'linked',
linked_at = NOW(),
synced_at = NULL
RETURNING library_id, series_name, anilist_id, anilist_title, anilist_url, status, linked_at, synced_at
RETURNING library_id, series_id, anilist_id, anilist_title, anilist_url, status, linked_at, synced_at
"#,
)
.bind(library_id)
.bind(&series_name)
.bind(series_id)
.bind(body.anilist_id)
.bind(&anilist_title)
.bind(&anilist_url)
.fetch_one(&state.pool)
.await?;
// Fetch series name for the response
let series_name: String = sqlx::query_scalar("SELECT name FROM series WHERE id = $1")
.bind(series_id)
.fetch_one(&state.pool)
.await
.unwrap_or_else(|_| "unknown".to_string());
Ok(Json(AnilistSeriesLinkResponse {
library_id: row.get("library_id"),
series_name: row.get("series_name"),
series_name,
anilist_id: row.get("anilist_id"),
anilist_title: row.get("anilist_title"),
anilist_url: row.get("anilist_url"),
@@ -417,11 +425,11 @@ pub async fn link_series(
/// Remove the AniList link for a series
#[utoipa::path(
delete,
path = "/anilist/series/{library_id}/{series_name}/unlink",
path = "/anilist/series/{library_id}/{series_id}/unlink",
tag = "anilist",
params(
("library_id" = String, Path, description = "Library UUID"),
("series_name" = String, Path, description = "Series name"),
("series_id" = String, Path, description = "Series UUID"),
),
responses(
(status = 200, description = "Unlinked"),
@@ -432,13 +440,13 @@ pub async fn link_series(
)]
pub async fn unlink_series(
State(state): State<AppState>,
Path((library_id, series_name)): Path<(Uuid, String)>,
Path((library_id, series_id)): Path<(Uuid, Uuid)>,
) -> Result<Json<crate::responses::UnlinkedResponse>, ApiError> {
let result = sqlx::query(
"DELETE FROM anilist_series_links WHERE library_id = $1 AND series_name = $2",
"DELETE FROM anilist_series_links WHERE library_id = $1 AND series_id = $2",
)
.bind(library_id)
.bind(&series_name)
.bind(series_id)
.execute(&state.pool)
.await?;
@@ -506,10 +514,10 @@ pub async fn list_unlinked(
JOIN libraries l ON l.id = b.library_id
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN anilist_series_links asl
ON asl.library_id = b.library_id
AND asl.series_name = COALESCE(s.name, 'unclassified')
ON asl.series_id = b.series_id
WHERE l.reading_status_provider = 'anilist'
AND asl.library_id IS NULL
AND asl.series_id IS NULL
AND b.series_id IS NOT NULL
GROUP BY l.id, l.name, COALESCE(s.name, 'unclassified')
ORDER BY l.name, series_name
"#,
@@ -553,11 +561,12 @@ pub async fn preview_sync(
let links = sqlx::query(
r#"
SELECT asl.library_id, asl.series_name, asl.anilist_id, asl.anilist_title, asl.anilist_url
SELECT asl.library_id, asl.series_id, s.name AS series_name, asl.anilist_id, asl.anilist_title, asl.anilist_url
FROM anilist_series_links asl
JOIN series s ON s.id = asl.series_id
JOIN libraries l ON l.id = asl.library_id
WHERE l.reading_status_provider = 'anilist'
ORDER BY l.name, asl.series_name
ORDER BY l.name, s.name
"#,
)
.fetch_all(&state.pool)
@@ -566,7 +575,7 @@ pub async fn preview_sync(
let mut items: Vec<AnilistSyncPreviewItem> = Vec::new();
for link in &links {
let library_id: Uuid = link.get("library_id");
let series_id: Uuid = link.get("series_id");
let series_name: String = link.get("series_name");
let anilist_id: i32 = link.get("anilist_id");
let anilist_title: Option<String> = link.get("anilist_title");
@@ -577,15 +586,13 @@ pub async fn preview_sync(
SELECT
COUNT(*) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read,
(SELECT sm.total_volumes FROM series sm WHERE sm.library_id = $1 AND sm.name = $2 LIMIT 1) as total_volumes
(SELECT sm.total_volumes FROM series sm WHERE sm.id = $1 LIMIT 1) as total_volumes
FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND brp.user_id = $3
WHERE b.library_id = $1 AND COALESCE(s.name, 'unclassified') = $2
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND brp.user_id = $2
WHERE b.series_id = $1
"#,
)
.bind(library_id)
.bind(&series_name)
.bind(series_id)
.bind(local_user_id)
.fetch_one(&state.pool)
.await;
@@ -649,8 +656,9 @@ pub async fn sync_to_anilist(
// Get all series that have AniList links in enabled libraries
let links = sqlx::query(
r#"
SELECT asl.library_id, asl.series_name, asl.anilist_id, asl.anilist_title, asl.anilist_url
SELECT asl.library_id, asl.series_id, s.name AS series_name, asl.anilist_id, asl.anilist_title, asl.anilist_url
FROM anilist_series_links asl
JOIN series s ON s.id = asl.series_id
JOIN libraries l ON l.id = asl.library_id
WHERE l.reading_status_provider = 'anilist'
"#,
@@ -674,7 +682,7 @@ pub async fn sync_to_anilist(
"#;
for link in &links {
let library_id: Uuid = link.get("library_id");
let series_id: Uuid = link.get("series_id");
let series_name: String = link.get("series_name");
let anilist_id: i32 = link.get("anilist_id");
let anilist_title: Option<String> = link.get("anilist_title");
@@ -686,15 +694,13 @@ pub async fn sync_to_anilist(
SELECT
COUNT(*) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read,
(SELECT sm.total_volumes FROM series sm WHERE sm.library_id = $1 AND sm.name = $2 LIMIT 1) as total_volumes
(SELECT sm.total_volumes FROM series sm WHERE sm.id = $1 LIMIT 1) as total_volumes
FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND brp.user_id = $3
WHERE b.library_id = $1 AND COALESCE(s.name, 'unclassified') = $2
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND brp.user_id = $2
WHERE b.series_id = $1
"#,
)
.bind(library_id)
.bind(&series_name)
.bind(series_id)
.bind(local_user_id)
.fetch_one(&state.pool)
.await;
@@ -735,10 +741,10 @@ pub async fn sync_to_anilist(
Ok(_) => {
// Update synced_at
let _ = sqlx::query(
"UPDATE anilist_series_links SET status = 'synced', synced_at = NOW() WHERE library_id = $1 AND series_name = $2",
"UPDATE anilist_series_links SET status = 'synced', synced_at = NOW() WHERE library_id = $1 AND series_id = $2",
)
.bind(library_id)
.bind(&series_name)
.bind(link.get::<Uuid, _>("library_id"))
.bind(series_id)
.execute(&state.pool)
.await;
items.push(AnilistSyncItem {
@@ -752,10 +758,10 @@ pub async fn sync_to_anilist(
}
Err(e) => {
let _ = sqlx::query(
"UPDATE anilist_series_links SET status = 'error' WHERE library_id = $1 AND series_name = $2",
"UPDATE anilist_series_links SET status = 'error' WHERE library_id = $1 AND series_id = $2",
)
.bind(library_id)
.bind(&series_name)
.bind(link.get::<Uuid, _>("library_id"))
.bind(series_id)
.execute(&state.pool)
.await;
errors.push(format!("{series_name}: {}", e.message));
@@ -824,8 +830,9 @@ pub async fn pull_from_anilist(
// Find local series linked to these anilist IDs (in enabled libraries)
let link_rows = sqlx::query(
r#"
SELECT asl.library_id, asl.series_name, asl.anilist_id, asl.anilist_title, asl.anilist_url
SELECT asl.library_id, asl.series_id, s.name AS series_name, asl.anilist_id, asl.anilist_title, asl.anilist_url
FROM anilist_series_links asl
JOIN series s ON s.id = asl.series_id
JOIN libraries l ON l.id = asl.library_id
WHERE l.reading_status_provider = 'anilist'
"#,
@@ -833,16 +840,16 @@ pub async fn pull_from_anilist(
.fetch_all(&state.pool)
.await?;
// Build map: anilist_id → (library_id, series_name, anilist_title, anilist_url)
// Build map: anilist_id → (series_id, series_name, anilist_title, anilist_url)
let mut link_map: std::collections::HashMap<i32, (Uuid, String, Option<String>, Option<String>)> =
std::collections::HashMap::new();
for row in &link_rows {
let aid: i32 = row.get("anilist_id");
let lib: Uuid = row.get("library_id");
let sid: Uuid = row.get("series_id");
let name: String = row.get("series_name");
let title: Option<String> = row.get("anilist_title");
let url: Option<String> = row.get("anilist_url");
link_map.insert(aid, (lib, name, title, url));
link_map.insert(aid, (sid, name, title, url));
}
let mut updated = 0i32;
@@ -851,7 +858,7 @@ pub async fn pull_from_anilist(
let mut items: Vec<AnilistPullItem> = Vec::new();
for (anilist_id, anilist_status, progress_volumes) in &entries {
let Some((library_id, series_name, anilist_title, anilist_url)) = link_map.get(anilist_id) else {
let Some((series_id, series_name, anilist_title, anilist_url)) = link_map.get(anilist_id) else {
skipped += 1;
continue;
};
@@ -869,10 +876,9 @@ pub async fn pull_from_anilist(
// Get all book IDs for this series, ordered by volume
let book_rows = sqlx::query(
"SELECT b.id, b.volume FROM books b LEFT JOIN series s ON s.id = b.series_id WHERE b.library_id = $1 AND COALESCE(s.name, 'unclassified') = $2 ORDER BY b.volume NULLS LAST",
"SELECT b.id, b.volume FROM books b WHERE b.series_id = $1 ORDER BY b.volume NULLS LAST",
)
.bind(library_id)
.bind(series_name)
.bind(series_id)
.fetch_all(&state.pool)
.await;
@@ -946,9 +952,10 @@ pub async fn list_links(
State(state): State<AppState>,
) -> Result<Json<Vec<AnilistSeriesLinkResponse>>, ApiError> {
let rows = sqlx::query(
"SELECT library_id, series_name, anilist_id, anilist_title, anilist_url, status, linked_at, synced_at
FROM anilist_series_links
ORDER BY linked_at DESC",
"SELECT asl.library_id, s.name AS series_name, asl.anilist_id, asl.anilist_title, asl.anilist_url, asl.status, asl.linked_at, asl.synced_at
FROM anilist_series_links asl
JOIN series s ON s.id = asl.series_id
ORDER BY asl.linked_at DESC",
)
.fetch_all(&state.pool)
.await?;

View File

@@ -77,14 +77,14 @@ pub async fn list_authors(
NULLIF(authors, '{{}}'),
CASE WHEN author IS NOT NULL AND author != '' THEN ARRAY[author] ELSE ARRAY[]::text[] END
)
) AS author_name, id AS book_id, library_id, series
) AS author_name, id AS book_id, series_id
FROM books
),
author_agg AS (
SELECT
author_name AS name,
COUNT(DISTINCT book_id) AS book_count,
COUNT(DISTINCT (library_id, series)) AS series_count
COUNT(DISTINCT series_id) AS series_count
FROM author_books
WHERE ($1::text IS NULL OR author_name ILIKE $1)
GROUP BY author_name

View File

@@ -303,10 +303,11 @@ pub async fn get_detection_results(
) -> Result<Json<Vec<DownloadDetectionResultDto>>, ApiError> {
let rows = if let Some(status_filter) = &query.status {
sqlx::query(
"SELECT id, series_name, status, missing_count, available_releases, error_message
FROM download_detection_results
WHERE job_id = $1 AND status = $2
ORDER BY series_name",
"SELECT ddr.id, COALESCE(s.name, 'unknown') AS series_name, ddr.status, ddr.missing_count, ddr.available_releases, ddr.error_message
FROM download_detection_results ddr
LEFT JOIN series s ON s.id = ddr.series_id
WHERE ddr.job_id = $1 AND ddr.status = $2
ORDER BY s.name",
)
.bind(job_id)
.bind(status_filter)
@@ -314,10 +315,11 @@ pub async fn get_detection_results(
.await?
} else {
sqlx::query(
"SELECT id, series_name, status, missing_count, available_releases, error_message
FROM download_detection_results
WHERE job_id = $1
ORDER BY status, series_name",
"SELECT ddr.id, COALESCE(s.name, 'unknown') AS series_name, ddr.status, ddr.missing_count, ddr.available_releases, ddr.error_message
FROM download_detection_results ddr
LEFT JOIN series s ON s.id = ddr.series_id
WHERE ddr.job_id = $1
ORDER BY ddr.status, s.name",
)
.bind(job_id)
.fetch_all(&state.pool)
@@ -381,11 +383,12 @@ pub async fn get_latest_found(
State(state): State<AppState>,
) -> Result<Json<Vec<LatestFoundPerLibraryDto>>, ApiError> {
let rows = sqlx::query(
"SELECT ad.id, ad.library_id, ad.series_name, ad.missing_count, ad.available_releases, ad.updated_at, \
"SELECT ad.id, ad.library_id, s.name AS series_name, ad.series_id, ad.missing_count, ad.available_releases, ad.updated_at, \
l.name as library_name \
FROM available_downloads ad \
JOIN libraries l ON l.id = ad.library_id \
ORDER BY l.name, ad.series_name",
JOIN series s ON s.id = ad.series_id \
ORDER BY l.name, s.name",
)
.fetch_all(&state.pool)
.await?;
@@ -509,9 +512,9 @@ pub(crate) async fn process_download_detection(
.map_err(|e| e.message)?;
// Fetch all series with their metadata link status
let all_series: Vec<String> = sqlx::query_scalar(
let all_series_rows: Vec<(String, Option<uuid::Uuid>)> = sqlx::query_as(
r#"
SELECT DISTINCT COALESCE(s.name, 'unclassified')
SELECT DISTINCT COALESCE(s.name, 'unclassified') AS name, b.series_id
FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
@@ -528,11 +531,10 @@ pub(crate) async fn process_download_detection(
r#"
DELETE FROM available_downloads
WHERE library_id = $1
AND series_name NOT IN (
SELECT DISTINCT COALESCE(s.name, 'unclassified')
AND series_id NOT IN (
SELECT DISTINCT b.series_id
FROM books b
LEFT JOIN series s ON s.id = b.series_id
WHERE b.library_id = $1
WHERE b.library_id = $1 AND b.series_id IS NOT NULL
)
"#,
)
@@ -541,6 +543,10 @@ pub(crate) async fn process_download_detection(
.await
.map_err(|e| e.to_string())?;
let all_series: Vec<String> = all_series_rows.iter().map(|(name, _)| name.clone()).collect();
let series_id_map: std::collections::HashMap<String, Uuid> = all_series_rows.iter()
.filter_map(|(name, id)| id.map(|id| (name.clone(), id)))
.collect();
let total = all_series.len() as i32;
sqlx::query("UPDATE index_jobs SET total_files = $2 WHERE id = $1")
.bind(job_id)
@@ -551,7 +557,9 @@ pub(crate) async fn process_download_detection(
// Fetch approved metadata links for this library (series_name -> link_id)
let links: Vec<(String, Uuid)> = sqlx::query(
"SELECT series_name, id FROM external_metadata_links WHERE library_id = $1 AND status = 'approved'",
"SELECT s.name AS series_name, eml.id FROM external_metadata_links eml \
JOIN series s ON s.id = eml.series_id \
WHERE eml.library_id = $1 AND eml.status = 'approved'",
)
.bind(library_id)
.fetch_all(pool)
@@ -602,7 +610,7 @@ pub(crate) async fn process_download_detection(
// Skip unclassified
if series_name == "unclassified" {
insert_result(pool, job_id, library_id, series_name, "no_metadata", 0, None, None).await;
insert_result(pool, job_id, library_id, series_id_map.get(series_name).copied(), "no_metadata", 0, None, None).await;
continue;
}
@@ -610,7 +618,7 @@ pub(crate) async fn process_download_detection(
let link_id = match link_map.get(series_name) {
Some(id) => *id,
None => {
insert_result(pool, job_id, library_id, series_name, "no_metadata", 0, None, None).await;
insert_result(pool, job_id, library_id, series_id_map.get(series_name).copied(), "no_metadata", 0, None, None).await;
continue;
}
};
@@ -625,10 +633,12 @@ pub(crate) async fn process_download_detection(
.map_err(|e| e.to_string())?;
if missing_rows.is_empty() {
insert_result(pool, job_id, library_id, series_name, "no_missing", 0, None, None).await;
insert_result(pool, job_id, library_id, series_id_map.get(series_name).copied(), "no_missing", 0, None, None).await;
// Series is complete, remove from available_downloads
let _ = sqlx::query("DELETE FROM available_downloads WHERE library_id = $1 AND series_name = $2")
.bind(library_id).bind(series_name).execute(pool).await;
if let Some(&sid) = series_id_map.get(series_name) {
let _ = sqlx::query("DELETE FROM available_downloads WHERE series_id = $1")
.bind(sid).execute(pool).await;
}
continue;
}
@@ -655,7 +665,7 @@ pub(crate) async fn process_download_detection(
pool,
job_id,
library_id,
series_name,
series_id_map.get(series_name).copied(),
"found",
missing_count,
releases_json.clone(),
@@ -663,17 +673,17 @@ pub(crate) async fn process_download_detection(
)
.await;
// UPSERT into available_downloads
if let Some(ref rj) = releases_json {
if let (Some(ref rj), Some(&sid)) = (&releases_json, series_id_map.get(series_name)) {
let _ = sqlx::query(
"INSERT INTO available_downloads (library_id, series_name, missing_count, available_releases, updated_at) \
"INSERT INTO available_downloads (library_id, series_id, missing_count, available_releases, updated_at) \
VALUES ($1, $2, $3, $4, NOW()) \
ON CONFLICT (library_id, series_name) DO UPDATE SET \
ON CONFLICT (series_id) DO UPDATE SET \
missing_count = EXCLUDED.missing_count, \
available_releases = EXCLUDED.available_releases, \
updated_at = NOW()",
)
.bind(library_id)
.bind(series_name)
.bind(sid)
.bind(missing_count)
.bind(rj)
.execute(pool)
@@ -681,19 +691,20 @@ pub(crate) async fn process_download_detection(
}
}
Ok(_) => {
insert_result(pool, job_id, library_id, series_name, "not_found", missing_count, None, None).await;
insert_result(pool, job_id, library_id, series_id_map.get(series_name).copied(), "not_found", missing_count, None, None).await;
// Remove from available_downloads if previously found
if let Some(&sid) = series_id_map.get(series_name) {
let _ = sqlx::query(
"DELETE FROM available_downloads WHERE library_id = $1 AND series_name = $2",
"DELETE FROM available_downloads WHERE series_id = $1",
)
.bind(library_id)
.bind(series_name)
.bind(sid)
.execute(pool)
.await;
}
}
Err(e) => {
warn!("[DOWNLOAD_DETECTION] series '{series_name}': {e}");
insert_result(pool, job_id, library_id, series_name, "error", missing_count, None, Some(&e)).await;
insert_result(pool, job_id, library_id, series_id_map.get(series_name).copied(), "error", missing_count, None, Some(&e)).await;
}
}
}
@@ -810,20 +821,7 @@ async fn search_prowlarr_for_series(
let matched: Vec<AvailableReleaseDto> = raw_releases
.into_iter()
.filter_map(|r| {
let title_volumes = prowlarr::extract_volumes_from_title_pub(&r.title);
// "Intégrale" / "Complet" releases match ALL missing volumes
let is_integral = prowlarr::is_integral_release(&r.title);
let matched_vols: Vec<i32> = if is_integral && !missing_volumes.is_empty() {
missing_volumes.to_vec()
} else {
title_volumes
.iter()
.copied()
.filter(|v| missing_volumes.contains(v))
.collect()
};
let (matched_vols, all_volumes) = prowlarr::match_title_volumes(&r.title, missing_volumes);
if matched_vols.is_empty() {
None
@@ -835,7 +833,7 @@ async fn search_prowlarr_for_series(
indexer: r.indexer,
seeders: r.seeders,
matched_missing_volumes: matched_vols,
all_volumes: if is_integral { vec![] } else { title_volumes },
all_volumes,
})
}
})
@@ -849,7 +847,7 @@ async fn insert_result(
pool: &PgPool,
job_id: Uuid,
library_id: Uuid,
series_name: &str,
series_id: Option<Uuid>,
status: &str,
missing_count: i32,
available_releases: Option<serde_json::Value>,
@@ -858,13 +856,13 @@ async fn insert_result(
let _ = sqlx::query(
r#"
INSERT INTO download_detection_results
(job_id, library_id, series_name, status, missing_count, available_releases, error_message)
(job_id, library_id, series_id, status, missing_count, available_releases, error_message)
VALUES ($1, $2, $3, $4, $5, $6, $7)
"#,
)
.bind(job_id)
.bind(library_id)
.bind(series_name)
.bind(series_id)
.bind(status)
.bind(missing_count)
.bind(&available_releases)

View File

@@ -180,6 +180,7 @@ async fn main() -> anyhow::Result<()> {
.route("/books/:id/pages/:n", get(pages::get_page))
.route("/books/:id/progress", get(reading_progress::get_reading_progress).patch(reading_progress::update_reading_progress))
.route("/libraries/:library_id/series", get(series::list_series))
.route("/libraries/:library_id/series/by-name/:name", get(series::get_series_by_name))
.route("/libraries/:library_id/series/:series_id/metadata", get(series::get_series_metadata))
.route("/series", get(series::list_all_series))
.route("/series/ongoing", get(series::ongoing_series))

View File

@@ -137,7 +137,7 @@ pub struct MissingBookItem {
#[derive(Deserialize)]
pub struct MetadataLinkQuery {
pub library_id: Option<String>,
pub series_name: Option<String>,
pub series_id: Option<String>,
}
// ---------------------------------------------------------------------------
@@ -234,12 +234,14 @@ pub async fn create_metadata_match(
.parse()
.map_err(|_| ApiError::bad_request("invalid library_id"))?;
let series_id = crate::series::get_or_create_series(&state.pool, library_id, &body.series_name).await?;
let row = sqlx::query(
r#"
INSERT INTO external_metadata_links
(library_id, series_name, provider, external_id, external_url, status, confidence, metadata_json, total_volumes_external)
(library_id, series_id, provider, external_id, external_url, status, confidence, metadata_json, total_volumes_external)
VALUES ($1, $2, $3, $4, $5, 'pending', $6, $7, $8)
ON CONFLICT (library_id, series_name, provider)
ON CONFLICT (series_id, provider)
DO UPDATE SET
external_id = EXCLUDED.external_id,
external_url = EXCLUDED.external_url,
@@ -251,12 +253,11 @@ pub async fn create_metadata_match(
updated_at = NOW(),
approved_at = NULL,
synced_at = NULL
RETURNING id, library_id, series_name, provider, external_id, external_url, status, confidence,
metadata_json, total_volumes_external, matched_at, approved_at, synced_at
RETURNING id
"#,
)
.bind(library_id)
.bind(&body.series_name)
.bind(series_id)
.bind(&body.provider)
.bind(&body.external_id)
.bind(&body.external_url)
@@ -266,7 +267,22 @@ pub async fn create_metadata_match(
.fetch_one(&state.pool)
.await?;
Ok(Json(row_to_link_dto(&row)))
let link_id: Uuid = row.get("id");
// Re-fetch with JOIN to get series_name for the DTO
let full_row = sqlx::query(
r#"
SELECT eml.id, eml.library_id, s.name AS series_name, eml.series_id, eml.provider, eml.external_id, eml.external_url, eml.status, eml.confidence,
eml.metadata_json, eml.total_volumes_external, eml.matched_at, eml.approved_at, eml.synced_at
FROM external_metadata_links eml
JOIN series s ON s.id = eml.series_id
WHERE eml.id = $1
"#,
)
.bind(link_id)
.fetch_one(&state.pool)
.await?;
Ok(Json(row_to_link_dto(&full_row)))
}
// ---------------------------------------------------------------------------
@@ -296,7 +312,7 @@ pub async fn approve_metadata(
UPDATE external_metadata_links
SET status = 'approved', approved_at = NOW(), updated_at = NOW()
WHERE id = $1
RETURNING library_id, series_name, provider, external_id, metadata_json, total_volumes_external
RETURNING library_id, series_id, provider, external_id, metadata_json, total_volumes_external
"#,
)
.bind(id)
@@ -306,7 +322,9 @@ pub async fn approve_metadata(
let row = result.ok_or_else(|| ApiError::not_found("link not found"))?;
let library_id: Uuid = row.get("library_id");
let series_name: String = row.get("series_name");
let series_id: Uuid = row.get("series_id");
let series_name: String = sqlx::query_scalar("SELECT name FROM series WHERE id = $1")
.bind(series_id).fetch_one(&state.pool).await?;
// Reject any other approved links for the same series (only one active link per series)
// Also clean up their external_book_metadata
@@ -314,12 +332,11 @@ pub async fn approve_metadata(
r#"
UPDATE external_metadata_links
SET status = 'rejected', updated_at = NOW()
WHERE library_id = $1 AND series_name = $2 AND id != $3 AND status = 'approved'
WHERE series_id = $1 AND id != $2 AND status = 'approved'
RETURNING id
"#,
)
.bind(library_id)
.bind(&series_name)
.bind(series_id)
.bind(id)
.fetch_all(&state.pool)
.await?;
@@ -438,7 +455,7 @@ pub async fn reject_metadata(
tag = "metadata",
params(
("library_id" = Option<String>, Query, description = "Library UUID"),
("series_name" = Option<String>, Query, description = "Series name"),
("series_id" = Option<String>, Query, description = "Series UUID"),
),
responses(
(status = 200, body = Vec<ExternalMetadataLinkDto>),
@@ -454,18 +471,21 @@ pub async fn get_metadata_links(
.as_deref()
.and_then(|s| s.parse().ok());
let series_id: Option<Uuid> = query.series_id.as_deref().and_then(|s| s.parse().ok());
let rows = sqlx::query(
r#"
SELECT id, library_id, series_name, provider, external_id, external_url, status, confidence,
metadata_json, total_volumes_external, matched_at, approved_at, synced_at
FROM external_metadata_links
WHERE ($1::uuid IS NULL OR library_id = $1)
AND ($2::text IS NULL OR series_name = $2)
ORDER BY updated_at DESC
SELECT eml.id, eml.library_id, s.name AS series_name, eml.series_id, eml.provider, eml.external_id, eml.external_url, eml.status, eml.confidence,
eml.metadata_json, eml.total_volumes_external, eml.matched_at, eml.approved_at, eml.synced_at
FROM external_metadata_links eml
JOIN series s ON s.id = eml.series_id
WHERE ($1::uuid IS NULL OR eml.library_id = $1)
AND ($2::uuid IS NULL OR eml.series_id = $2)
ORDER BY eml.updated_at DESC
"#,
)
.bind(library_id)
.bind(query.series_name.as_deref())
.bind(series_id)
.fetch_all(&state.pool)
.await?;
@@ -495,7 +515,10 @@ pub async fn get_missing_books(
) -> Result<Json<MissingBooksDto>, ApiError> {
// Verify link exists
let link = sqlx::query(
"SELECT library_id, series_name FROM external_metadata_links WHERE id = $1",
"SELECT eml.library_id, eml.series_id, s.name AS series_name \
FROM external_metadata_links eml \
JOIN series s ON s.id = eml.series_id \
WHERE eml.id = $1",
)
.bind(id)
.fetch_optional(&state.pool)
@@ -503,6 +526,7 @@ pub async fn get_missing_books(
.ok_or_else(|| ApiError::not_found("link not found"))?;
let library_id: Uuid = link.get("library_id");
let series_id: Uuid = link.get("series_id");
let series_name: String = link.get("series_name");
// Count external books
@@ -514,10 +538,9 @@ pub async fn get_missing_books(
// Count local books
let total_local: i64 = sqlx::query_scalar(
"SELECT COUNT(*) FROM books b LEFT JOIN series s ON s.id = b.series_id WHERE b.library_id = $1 AND COALESCE(s.name, 'unclassified') = $2",
"SELECT COUNT(*) FROM books WHERE series_id = $1",
)
.bind(library_id)
.bind(&series_name)
.bind(series_id)
.fetch_one(&state.pool)
.await?;

View File

@@ -432,7 +432,7 @@ pub(crate) async fn process_metadata_batch(
// Get series that already have an approved link (skip them)
let already_linked: std::collections::HashSet<String> = sqlx::query_scalar(
"SELECT series_name FROM external_metadata_links WHERE library_id = $1 AND status = 'approved'",
"SELECT s.name FROM external_metadata_links eml JOIN series s ON s.id = eml.series_id WHERE eml.library_id = $1 AND eml.status = 'approved'",
)
.bind(library_id)
.fetch_all(pool)
@@ -797,14 +797,25 @@ async fn auto_apply(
provider_name: &str,
candidate: &metadata_providers::SeriesCandidate,
) -> Result<Uuid, String> {
// Resolve series_id from series name
let series_id: Uuid = sqlx::query_scalar(
"SELECT id FROM series WHERE library_id = $1 AND name = $2",
)
.bind(library_id)
.bind(series_name)
.fetch_optional(pool)
.await
.map_err(|e| e.to_string())?
.ok_or_else(|| format!("Series '{}' not found in library", series_name))?;
// Create the external_metadata_link
let metadata_json = &candidate.metadata_json;
let row = sqlx::query(
r#"
INSERT INTO external_metadata_links
(library_id, series_name, provider, external_id, external_url, status, confidence, metadata_json, total_volumes_external)
(library_id, series_id, provider, external_id, external_url, status, confidence, metadata_json, total_volumes_external)
VALUES ($1, $2, $3, $4, $5, 'approved', $6, $7, $8)
ON CONFLICT (library_id, series_name, provider)
ON CONFLICT (series_id, provider)
DO UPDATE SET
external_id = EXCLUDED.external_id,
external_url = EXCLUDED.external_url,
@@ -819,7 +830,7 @@ async fn auto_apply(
"#,
)
.bind(library_id)
.bind(series_name)
.bind(series_id)
.bind(provider_name)
.bind(&candidate.external_id)
.bind(&candidate.external_url)

View File

@@ -309,8 +309,10 @@ pub async fn refresh_single_link(
AxumPath(link_id): AxumPath<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let row = sqlx::query(
"SELECT library_id, series_name, provider, external_id, status \
FROM external_metadata_links WHERE id = $1",
"SELECT eml.library_id, s.name AS series_name, eml.provider, eml.external_id, eml.status \
FROM external_metadata_links eml \
JOIN series s ON s.id = eml.series_id \
WHERE eml.id = $1",
)
.bind(link_id)
.fetch_optional(&state.pool)

View File

@@ -17,7 +17,9 @@ use utoipa::OpenApi;
crate::books::convert_book,
crate::books::update_book,
crate::series::get_series_metadata,
crate::series::get_series_by_name,
crate::series::update_series,
crate::series::delete_series,
crate::pages::get_page,
crate::search::search_books,
crate::index_jobs::enqueue_rebuild,
@@ -112,6 +114,7 @@ use utoipa::OpenApi;
crate::series::OngoingQuery,
crate::books::UpdateBookRequest,
crate::series::SeriesMetadata,
crate::series::SeriesLookup,
crate::series::UpdateSeriesRequest,
crate::series::UpdateSeriesResponse,
crate::pages::PageQuery,

View File

@@ -104,6 +104,27 @@ pub(crate) fn extract_volumes_from_title_pub(title: &str) -> Vec<i32> {
/// Returns true if the title indicates a complete/integral edition
/// (e.g., "intégrale", "complet", "complete", "integral").
/// Match a release title against a list of missing volumes.
/// Returns (matched_volumes, all_volumes_in_title).
/// For integral releases, matched_volumes = all missing volumes, all_volumes = empty.
pub(crate) fn match_title_volumes(title: &str, missing_volumes: &[i32]) -> (Vec<i32>, Vec<i32>) {
let title_volumes = extract_volumes_from_title(title);
let is_integral = is_integral_release(title);
let matched = if is_integral && !missing_volumes.is_empty() {
missing_volumes.to_vec()
} else {
title_volumes
.iter()
.copied()
.filter(|v| missing_volumes.contains(v))
.collect()
};
let all = if is_integral { vec![] } else { title_volumes };
(matched, all)
}
pub(crate) fn is_integral_release(title: &str) -> bool {
let lower = title.to_lowercase();
// Strip accents for matching: "intégrale" → "integrale"
@@ -403,21 +424,8 @@ fn match_missing_volumes(
releases
.into_iter()
.map(|r| {
let title_volumes = extract_volumes_from_title(&r.title);
let matched = if missing_numbers.is_empty() {
None
} else {
let matched: Vec<i32> = title_volumes
.iter()
.copied()
.filter(|v| missing_numbers.contains(v))
.collect();
if matched.is_empty() {
None
} else {
Some(matched)
}
};
let (matched_vols, all_volumes) = match_title_volumes(&r.title, &missing_numbers);
let matched = if matched_vols.is_empty() { None } else { Some(matched_vols) };
ProwlarrRelease {
guid: r.guid,
@@ -432,7 +440,7 @@ fn match_missing_volumes(
info_url: r.info_url,
categories: r.categories,
matched_missing_volumes: matched,
all_volumes: title_volumes,
all_volumes,
}
})
.collect()

View File

@@ -377,7 +377,7 @@ pub(crate) async fn process_reading_status_match(
.map_err(|e| e.to_string())?;
let already_linked: std::collections::HashSet<String> = sqlx::query_scalar(
"SELECT series_name FROM anilist_series_links WHERE library_id = $1",
"SELECT s.name FROM anilist_series_links asl JOIN series s ON s.id = asl.series_id WHERE asl.library_id = $1",
)
.bind(library_id)
.fetch_all(pool)
@@ -636,15 +636,24 @@ async fn search_and_link(
.map(String::from);
let anilist_url = candidate["siteUrl"].as_str().map(String::from);
sqlx::query(
r#"
INSERT INTO anilist_series_links (library_id, series_name, provider, anilist_id, anilist_title, anilist_url, status, linked_at)
VALUES ($1, $2, 'anilist', $3, $4, $5, 'linked', NOW())
ON CONFLICT (library_id, series_name, provider) DO NOTHING
"#,
let series_id: Uuid = sqlx::query_scalar(
"SELECT id FROM series WHERE library_id = $1 AND name = $2",
)
.bind(library_id)
.bind(series_name)
.fetch_one(pool)
.await
.map_err(|e| format!("series lookup failed for '{}': {}", series_name, e))?;
sqlx::query(
r#"
INSERT INTO anilist_series_links (library_id, series_id, provider, anilist_id, anilist_title, anilist_url, status, linked_at)
VALUES ($1, $2, 'anilist', $3, $4, $5, 'linked', NOW())
ON CONFLICT (series_id, provider) DO NOTHING
"#,
)
.bind(library_id)
.bind(series_id)
.bind(anilist_id)
.bind(&anilist_title)
.bind(&anilist_url)

View File

@@ -357,6 +357,7 @@ pub async fn get_push_results(
// ---------------------------------------------------------------------------
struct SeriesInfo {
series_id: Uuid,
series_name: String,
anilist_id: i32,
anilist_title: Option<String>,
@@ -379,11 +380,13 @@ pub async fn process_reading_status_push(
let series_to_push: Vec<SeriesInfo> = sqlx::query(
r#"
SELECT
asl.series_name,
asl.series_id,
s.name AS series_name,
asl.anilist_id,
asl.anilist_title,
asl.anilist_url
FROM anilist_series_links asl
JOIN series s ON s.id = asl.series_id
WHERE asl.library_id = $1
AND asl.anilist_id IS NOT NULL
AND (
@@ -392,22 +395,18 @@ pub async fn process_reading_status_push(
SELECT 1
FROM book_reading_progress brp
JOIN books b2 ON b2.id = brp.book_id
LEFT JOIN series s2 ON s2.id = b2.series_id
WHERE b2.library_id = asl.library_id
AND COALESCE(s2.name, 'unclassified') = asl.series_name
WHERE b2.series_id = asl.series_id
AND brp.user_id = $2
AND brp.updated_at > asl.synced_at
)
OR EXISTS (
SELECT 1
FROM books b2
LEFT JOIN series s2 ON s2.id = b2.series_id
WHERE b2.library_id = asl.library_id
AND COALESCE(s2.name, 'unclassified') = asl.series_name
WHERE b2.series_id = asl.series_id
AND b2.created_at > asl.synced_at
)
)
ORDER BY asl.series_name
ORDER BY s.name
"#,
)
.bind(library_id)
@@ -417,6 +416,7 @@ pub async fn process_reading_status_push(
.map_err(|e| e.to_string())?
.into_iter()
.map(|row| SeriesInfo {
series_id: row.get("series_id"),
series_name: row.get("series_name"),
anilist_id: row.get("anilist_id"),
anilist_title: row.get("anilist_title"),
@@ -466,15 +466,12 @@ pub async fn process_reading_status_push(
COUNT(b.id) AS total_books,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read
FROM books b
LEFT JOIN series s ON s.id = b.series_id
LEFT JOIN book_reading_progress brp
ON brp.book_id = b.id AND brp.user_id = $3
WHERE b.library_id = $1
AND COALESCE(s.name, 'unclassified') = $2
ON brp.book_id = b.id AND brp.user_id = $2
WHERE b.series_id = $1
"#,
)
.bind(library_id)
.bind(&series.series_name)
.bind(series.series_id)
.bind(local_user_id)
.fetch_one(pool)
.await
@@ -513,10 +510,9 @@ pub async fn process_reading_status_push(
Ok(()) => {
// Update synced_at
let _ = sqlx::query(
"UPDATE anilist_series_links SET synced_at = NOW() WHERE library_id = $1 AND series_name = $2",
"UPDATE anilist_series_links SET synced_at = NOW() WHERE series_id = $1",
)
.bind(library_id)
.bind(&series.series_name)
.bind(series.series_id)
.execute(pool)
.await;
@@ -532,10 +528,9 @@ pub async fn process_reading_status_push(
match push_to_anilist(&token, series.anilist_id, anilist_status, progress_volumes).await {
Ok(()) => {
let _ = sqlx::query(
"UPDATE anilist_series_links SET synced_at = NOW() WHERE library_id = $1 AND series_name = $2",
"UPDATE anilist_series_links SET synced_at = NOW() WHERE series_id = $1",
)
.bind(library_id)
.bind(&series.series_name)
.bind(series.series_id)
.execute(pool)
.await;

View File

@@ -67,6 +67,53 @@ pub(crate) async fn get_or_create_series(
.map_err(Into::into)
}
// ─── Lookup by name ──────────────────────────────────────────────────────────
#[derive(Serialize, ToSchema)]
pub struct SeriesLookup {
#[schema(value_type = String)]
pub id: Uuid,
#[schema(value_type = String)]
pub library_id: Uuid,
pub name: String,
}
/// Look up a series by name within a library. Returns its UUID and name.
#[utoipa::path(
get,
path = "/libraries/{library_id}/series/by-name/{name}",
tag = "series",
params(
("library_id" = String, Path, description = "Library UUID"),
("name" = String, Path, description = "Series name (URL-encoded)"),
),
responses(
(status = 200, body = SeriesLookup),
(status = 404, description = "Series not found"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn get_series_by_name(
State(state): State<AppState>,
Path((library_id, name)): Path<(Uuid, String)>,
) -> Result<Json<SeriesLookup>, ApiError> {
let row = sqlx::query(
"SELECT id, library_id, name FROM series WHERE library_id = $1 AND LOWER(name) = LOWER($2)"
)
.bind(library_id)
.bind(&name)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found(format!("series '{}' not found", name)))?;
Ok(Json(SeriesLookup {
id: row.get("id"),
library_id: row.get("library_id"),
name: row.get("name"),
}))
}
// ─── Structs ─────────────────────────────────────────────────────────────────
#[derive(Serialize, ToSchema)]
@@ -906,6 +953,8 @@ pub async fn ongoing_books(
#[derive(Serialize, ToSchema)]
pub struct SeriesMetadata {
/// Name of the series
pub series_name: String,
/// Authors of the series (series-level metadata, distinct from per-book author field)
pub authors: Vec<String>,
pub description: Option<String>,
@@ -943,7 +992,7 @@ pub async fn get_series_metadata(
) -> Result<Json<SeriesMetadata>, ApiError> {
// Fetch series row (contains metadata directly)
let series_row = sqlx::query(
"SELECT authors, description, publishers, start_year, total_volumes, status, locked_fields, book_author, book_language \
"SELECT name, authors, description, publishers, start_year, total_volumes, status, locked_fields, book_author, book_language \
FROM series WHERE id = $1 AND library_id = $2"
)
.bind(series_id)
@@ -958,6 +1007,7 @@ pub async fn get_series_metadata(
.await?;
Ok(Json(SeriesMetadata {
series_name: series_row.as_ref().map(|r| r.get::<String, _>("name")).unwrap_or_default(),
authors: series_row.as_ref().map(|r| r.get::<Vec<String>, _>("authors")).unwrap_or_default(),
description: series_row.as_ref().and_then(|r| r.get("description")),
publishers: series_row.as_ref().map(|r| r.get::<Vec<String>, _>("publishers")).unwrap_or_default(),
@@ -1267,6 +1317,7 @@ mod tests {
#[test]
fn series_metadata_serializes() {
let meta = SeriesMetadata {
series_name: "Naruto".to_string(),
description: Some("A ninja story".to_string()),
authors: vec!["Kishimoto".to_string()],
publishers: vec![],