Compare commits

...

6 Commits

Author SHA1 Message Date
52b9b0e00e feat: add series status, improve providers & e2e tests
- Add series status concept (ongoing/ended/hiatus/cancelled/upcoming)
  with normalization across all providers
- Add status field to series_metadata table (migration 0033)
- AniList: use chapters as fallback for volume count on ongoing series,
  add books_message when both volumes and chapters are null
- Bedetheque: extract description from meta tag, genres, parution status,
  origin/language; rewrite book parsing with itemprop microdata for
  clean ISBN, dates, page counts, covers; filter placeholder authors
- Add comprehensive e2e provider tests with field coverage reporting
- Wire status into EditSeriesForm, MetadataSearchModal, and series page

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-18 16:10:45 +01:00
51ef2fa725 chore: bump version to 1.5.4 2026-03-18 15:27:29 +01:00
7d53babc84 chore: bump version to 1.5.3 2026-03-18 15:23:54 +01:00
00f4445924 fix: use sort-order position as fallback volume for book matching
When books have no volume number, use their 1-based position in the
backoffice sort order (volume ASC NULLS LAST, natural title sort) as
effective volume for matching against external provider books.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-18 15:21:32 +01:00
1a91c051b5 chore: bump version to 1.5.2 2026-03-18 15:16:21 +01:00
48ca9d0a8b chore: bump version to 1.5.1 2026-03-18 15:12:44 +01:00
13 changed files with 595 additions and 174 deletions

8
Cargo.lock generated
View File

@@ -64,7 +64,7 @@ checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
[[package]] [[package]]
name = "api" name = "api"
version = "1.5.0" version = "1.5.4"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"argon2", "argon2",
@@ -1232,7 +1232,7 @@ dependencies = [
[[package]] [[package]]
name = "indexer" name = "indexer"
version = "1.5.0" version = "1.5.4"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"axum", "axum",
@@ -1771,7 +1771,7 @@ dependencies = [
[[package]] [[package]]
name = "parsers" name = "parsers"
version = "1.5.0" version = "1.5.4"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"flate2", "flate2",
@@ -2906,7 +2906,7 @@ dependencies = [
[[package]] [[package]]
name = "stripstream-core" name = "stripstream-core"
version = "1.5.0" version = "1.5.4"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"serde", "serde",

View File

@@ -9,7 +9,7 @@ resolver = "2"
[workspace.package] [workspace.package]
edition = "2021" edition = "2021"
version = "1.5.0" version = "1.5.4"
license = "MIT" license = "MIT"
[workspace.dependencies] [workspace.dependencies]

View File

@@ -1079,6 +1079,8 @@ pub struct SeriesMetadata {
pub publishers: Vec<String>, pub publishers: Vec<String>,
pub start_year: Option<i32>, pub start_year: Option<i32>,
pub total_volumes: Option<i32>, pub total_volumes: Option<i32>,
/// Series status: "ongoing", "ended", "hiatus", "cancelled", or null
pub status: Option<String>,
/// Convenience: author from first book (for pre-filling the per-book apply section) /// Convenience: author from first book (for pre-filling the per-book apply section)
pub book_author: Option<String>, pub book_author: Option<String>,
pub book_language: Option<String>, pub book_language: Option<String>,
@@ -1120,7 +1122,7 @@ pub async fn get_series_metadata(
}; };
let meta_row = sqlx::query( let meta_row = sqlx::query(
"SELECT authors, description, publishers, start_year, total_volumes, locked_fields FROM series_metadata WHERE library_id = $1 AND name = $2" "SELECT authors, description, publishers, start_year, total_volumes, status, locked_fields FROM series_metadata WHERE library_id = $1 AND name = $2"
) )
.bind(library_id) .bind(library_id)
.bind(&name) .bind(&name)
@@ -1133,6 +1135,7 @@ pub async fn get_series_metadata(
publishers: meta_row.as_ref().map(|r| r.get::<Vec<String>, _>("publishers")).unwrap_or_default(), publishers: meta_row.as_ref().map(|r| r.get::<Vec<String>, _>("publishers")).unwrap_or_default(),
start_year: meta_row.as_ref().and_then(|r| r.get("start_year")), start_year: meta_row.as_ref().and_then(|r| r.get("start_year")),
total_volumes: meta_row.as_ref().and_then(|r| r.get("total_volumes")), total_volumes: meta_row.as_ref().and_then(|r| r.get("total_volumes")),
status: meta_row.as_ref().and_then(|r| r.get("status")),
book_author: books_row.as_ref().and_then(|r| r.get("author")), book_author: books_row.as_ref().and_then(|r| r.get("author")),
book_language: books_row.as_ref().and_then(|r| r.get("language")), book_language: books_row.as_ref().and_then(|r| r.get("language")),
locked_fields: meta_row.as_ref().map(|r| r.get::<serde_json::Value, _>("locked_fields")).unwrap_or(serde_json::json!({})), locked_fields: meta_row.as_ref().map(|r| r.get::<serde_json::Value, _>("locked_fields")).unwrap_or(serde_json::json!({})),
@@ -1158,6 +1161,8 @@ pub struct UpdateSeriesRequest {
pub publishers: Vec<String>, pub publishers: Vec<String>,
pub start_year: Option<i32>, pub start_year: Option<i32>,
pub total_volumes: Option<i32>, pub total_volumes: Option<i32>,
/// Series status: "ongoing", "ended", "hiatus", "cancelled", or null
pub status: Option<String>,
/// Fields locked from external metadata sync /// Fields locked from external metadata sync
#[serde(default)] #[serde(default)]
pub locked_fields: Option<serde_json::Value>, pub locked_fields: Option<serde_json::Value>,
@@ -1256,14 +1261,15 @@ pub async fn update_series(
let locked_fields = body.locked_fields.clone().unwrap_or(serde_json::json!({})); let locked_fields = body.locked_fields.clone().unwrap_or(serde_json::json!({}));
sqlx::query( sqlx::query(
r#" r#"
INSERT INTO series_metadata (library_id, name, authors, description, publishers, start_year, total_volumes, locked_fields, updated_at) INSERT INTO series_metadata (library_id, name, authors, description, publishers, start_year, total_volumes, status, locked_fields, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW()) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, NOW())
ON CONFLICT (library_id, name) DO UPDATE ON CONFLICT (library_id, name) DO UPDATE
SET authors = EXCLUDED.authors, SET authors = EXCLUDED.authors,
description = EXCLUDED.description, description = EXCLUDED.description,
publishers = EXCLUDED.publishers, publishers = EXCLUDED.publishers,
start_year = EXCLUDED.start_year, start_year = EXCLUDED.start_year,
total_volumes = EXCLUDED.total_volumes, total_volumes = EXCLUDED.total_volumes,
status = EXCLUDED.status,
locked_fields = EXCLUDED.locked_fields, locked_fields = EXCLUDED.locked_fields,
updated_at = NOW() updated_at = NOW()
"# "#
@@ -1275,6 +1281,7 @@ pub async fn update_series(
.bind(&publishers) .bind(&publishers)
.bind(body.start_year) .bind(body.start_year)
.bind(body.total_volumes) .bind(body.total_volumes)
.bind(&body.status)
.bind(&locked_fields) .bind(&locked_fields)
.execute(&state.pool) .execute(&state.pool)
.await?; .await?;

View File

@@ -109,6 +109,8 @@ pub struct SyncReport {
pub books: Vec<BookSyncReport>, pub books: Vec<BookSyncReport>,
pub books_matched: i64, pub books_matched: i64,
pub books_unmatched: i64, pub books_unmatched: i64,
#[serde(skip_serializing_if = "Option::is_none")]
pub books_message: Option<String>,
} }
#[derive(Serialize, ToSchema)] #[derive(Serialize, ToSchema)]
@@ -352,6 +354,14 @@ pub async fn approve_metadata(
report.books = book_reports; report.books = book_reports;
report.books_unmatched = unmatched; report.books_unmatched = unmatched;
if matched == 0 && unmatched == 0 {
report.books_message = Some(
"This provider does not have volume-level data for this series. \
Series metadata was synced, but book matching is not available."
.to_string(),
);
}
// Update synced_at // Update synced_at
sqlx::query("UPDATE external_metadata_links SET synced_at = NOW(), updated_at = NOW() WHERE id = $1") sqlx::query("UPDATE external_metadata_links SET synced_at = NOW(), updated_at = NOW() WHERE id = $1")
.bind(id) .bind(id)
@@ -683,10 +693,14 @@ async fn sync_series_metadata(
.get("start_year") .get("start_year")
.and_then(|y| y.as_i64()) .and_then(|y| y.as_i64())
.map(|y| y as i32); .map(|y| y as i32);
let status = metadata_json
.get("status")
.and_then(|s| s.as_str())
.map(normalize_series_status);
// Fetch existing state before upsert // Fetch existing state before upsert
let existing = sqlx::query( let existing = sqlx::query(
r#"SELECT description, publishers, start_year, total_volumes, authors, locked_fields r#"SELECT description, publishers, start_year, total_volumes, status, authors, locked_fields
FROM series_metadata WHERE library_id = $1 AND name = $2"#, FROM series_metadata WHERE library_id = $1 AND name = $2"#,
) )
.bind(library_id) .bind(library_id)
@@ -697,8 +711,8 @@ async fn sync_series_metadata(
// Respect locked_fields: only update fields that are NOT locked // Respect locked_fields: only update fields that are NOT locked
sqlx::query( sqlx::query(
r#" r#"
INSERT INTO series_metadata (library_id, name, description, publishers, start_year, total_volumes, authors, created_at, updated_at) INSERT INTO series_metadata (library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, NOW(), NOW()) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
ON CONFLICT (library_id, name) ON CONFLICT (library_id, name)
DO UPDATE SET DO UPDATE SET
description = CASE description = CASE
@@ -718,6 +732,10 @@ async fn sync_series_metadata(
WHEN (series_metadata.locked_fields->>'total_volumes')::boolean IS TRUE THEN series_metadata.total_volumes WHEN (series_metadata.locked_fields->>'total_volumes')::boolean IS TRUE THEN series_metadata.total_volumes
ELSE COALESCE(EXCLUDED.total_volumes, series_metadata.total_volumes) ELSE COALESCE(EXCLUDED.total_volumes, series_metadata.total_volumes)
END, END,
status = CASE
WHEN (series_metadata.locked_fields->>'status')::boolean IS TRUE THEN series_metadata.status
ELSE COALESCE(EXCLUDED.status, series_metadata.status)
END,
authors = CASE authors = CASE
WHEN (series_metadata.locked_fields->>'authors')::boolean IS TRUE THEN series_metadata.authors WHEN (series_metadata.locked_fields->>'authors')::boolean IS TRUE THEN series_metadata.authors
WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors
@@ -732,6 +750,7 @@ async fn sync_series_metadata(
.bind(&publishers) .bind(&publishers)
.bind(start_year) .bind(start_year)
.bind(total_volumes) .bind(total_volumes)
.bind(&status)
.bind(&authors) .bind(&authors)
.execute(&state.pool) .execute(&state.pool)
.await?; .await?;
@@ -779,6 +798,11 @@ async fn sync_series_metadata(
old: existing.as_ref().and_then(|r| r.get::<Option<i32>, _>("total_volumes")).map(|y| serde_json::json!(y)), old: existing.as_ref().and_then(|r| r.get::<Option<i32>, _>("total_volumes")).map(|y| serde_json::json!(y)),
new: total_volumes.map(|y| serde_json::json!(y)), new: total_volumes.map(|y| serde_json::json!(y)),
}, },
FieldDef {
name: "status",
old: existing.as_ref().and_then(|r| r.get::<Option<String>, _>("status")).map(|s| serde_json::Value::String(s)),
new: status.as_ref().map(|s| serde_json::Value::String(s.clone())),
},
]; ];
for f in fields { for f in fields {
@@ -801,6 +825,27 @@ async fn sync_series_metadata(
Ok(report) Ok(report)
} }
/// Normalize provider-specific status strings to a standard set:
/// "ongoing", "ended", "hiatus", "cancelled", or the original lowercase value
fn normalize_series_status(raw: &str) -> String {
let lower = raw.to_lowercase();
match lower.as_str() {
// AniList
"finished" => "ended".to_string(),
"releasing" => "ongoing".to_string(),
"not_yet_released" => "upcoming".to_string(),
"cancelled" => "cancelled".to_string(),
"hiatus" => "hiatus".to_string(),
// Bédéthèque
_ if lower.contains("finie") || lower.contains("terminée") => "ended".to_string(),
_ if lower.contains("en cours") => "ongoing".to_string(),
_ if lower.contains("hiatus") || lower.contains("suspendue") => "hiatus".to_string(),
_ if lower.contains("annulée") || lower.contains("arrêtée") => "cancelled".to_string(),
// Fallback
_ => lower,
}
}
async fn sync_books_metadata( async fn sync_books_metadata(
state: &AppState, state: &AppState,
link_id: Uuid, link_id: Uuid,
@@ -829,12 +874,17 @@ async fn sync_books_metadata(
let mut matched_count: i64 = 0; let mut matched_count: i64 = 0;
let mut book_reports: Vec<BookSyncReport> = Vec::new(); let mut book_reports: Vec<BookSyncReport> = Vec::new();
// Pre-fetch all local books for this series to enable flexible matching // Pre-fetch all local books for this series, sorted like the backoffice
// (volume ASC NULLS LAST, then natural title sort)
let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as( let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as(
r#" r#"
SELECT id, volume, title FROM books SELECT id, volume, title FROM books
WHERE library_id = $1 WHERE library_id = $1
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2 AND COALESCE(NULLIF(series, ''), 'unclassified') = $2
ORDER BY volume NULLS LAST,
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''),
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
title ASC
"#, "#,
) )
.bind(library_id) .bind(library_id)
@@ -842,24 +892,30 @@ async fn sync_books_metadata(
.fetch_all(&state.pool) .fetch_all(&state.pool)
.await?; .await?;
// Build effective position for each local book: use volume if set, otherwise 1-based sort order
let local_books_with_pos: Vec<(Uuid, i32, String)> = local_books
.iter()
.enumerate()
.map(|(idx, (id, vol, title))| (*id, vol.unwrap_or((idx + 1) as i32), title.clone()))
.collect();
// Track which local books have already been matched to avoid double-matching // Track which local books have already been matched to avoid double-matching
let mut matched_local_ids = std::collections::HashSet::new(); let mut matched_local_ids = std::collections::HashSet::new();
for book in &books { for (ext_idx, book) in books.iter().enumerate() {
// Strategy 1: Match by volume number // Effective volume for the external book: provider volume_number, or 1-based position
let mut local_book_id: Option<Uuid> = if let Some(vol) = book.volume_number { let ext_vol = book.volume_number.unwrap_or((ext_idx + 1) as i32);
local_books
.iter() // Strategy 1: Match by effective volume number
.find(|(id, v, _)| *v == Some(vol) && !matched_local_ids.contains(id)) let mut local_book_id: Option<Uuid> = local_books_with_pos
.map(|(id, _, _)| *id) .iter()
} else { .find(|(id, v, _)| *v == ext_vol && !matched_local_ids.contains(id))
None .map(|(id, _, _)| *id);
};
// Strategy 2: External title contained in local title or vice-versa (case-insensitive) // Strategy 2: External title contained in local title or vice-versa (case-insensitive)
if local_book_id.is_none() { if local_book_id.is_none() {
let ext_title_lower = book.title.to_lowercase(); let ext_title_lower = book.title.to_lowercase();
local_book_id = local_books.iter().find(|(id, _, local_title)| { local_book_id = local_books_with_pos.iter().find(|(id, _, local_title)| {
if matched_local_ids.contains(id) { if matched_local_ids.contains(id) {
return false; return false;
} }

View File

@@ -41,6 +41,7 @@ query ($search: String) {
description(asHtml: false) description(asHtml: false)
coverImage { large medium } coverImage { large medium }
startDate { year } startDate { year }
status
volumes volumes
chapters chapters
staff { edges { node { name { full } } role } } staff { edges { node { name { full } } role } }
@@ -59,6 +60,7 @@ query ($id: Int) {
description(asHtml: false) description(asHtml: false)
coverImage { large medium } coverImage { large medium }
startDate { year } startDate { year }
status
volumes volumes
chapters chapters
staff { edges { node { name { full } } role } } staff { edges { node { name { full } } role } }
@@ -157,6 +159,17 @@ async fn search_series_impl(
.and_then(|v| v.as_i64()) .and_then(|v| v.as_i64())
.map(|v| v as i32); .map(|v| v as i32);
let chapters = m
.get("chapters")
.and_then(|v| v.as_i64())
.map(|v| v as i32);
let status = m
.get("status")
.and_then(|s| s.as_str())
.unwrap_or("UNKNOWN")
.to_string();
let site_url = m let site_url = m
.get("siteUrl") .get("siteUrl")
.and_then(|u| u.as_str()) .and_then(|u| u.as_str())
@@ -166,6 +179,15 @@ async fn search_series_impl(
let confidence = compute_confidence(&title, &query_lower); let confidence = compute_confidence(&title, &query_lower);
// Use volumes if known, otherwise fall back to chapters count
let (total_volumes, volume_source) = match volumes {
Some(v) => (Some(v), "volumes"),
None => match chapters {
Some(c) => (Some(c), "chapters"),
None => (None, "unknown"),
},
};
Some(SeriesCandidate { Some(SeriesCandidate {
external_id: id.to_string(), external_id: id.to_string(),
title, title,
@@ -173,11 +195,16 @@ async fn search_series_impl(
description, description,
publishers: vec![], publishers: vec![],
start_year, start_year,
total_volumes: volumes, total_volumes,
cover_url, cover_url,
external_url: site_url, external_url: site_url,
confidence, confidence,
metadata_json: serde_json::json!({}), metadata_json: serde_json::json!({
"status": status,
"chapters": chapters,
"volumes": volumes,
"volume_source": volume_source,
}),
}) })
}) })
.collect(); .collect();
@@ -225,6 +252,14 @@ async fn get_series_books_impl(
.and_then(|v| v.as_i64()) .and_then(|v| v.as_i64())
.map(|v| v as i32); .map(|v| v as i32);
let chapters = media
.get("chapters")
.and_then(|v| v.as_i64())
.map(|v| v as i32);
// Use volumes if known, otherwise fall back to chapters count
let total = volumes.or(chapters);
let cover_url = media let cover_url = media
.get("coverImage") .get("coverImage")
.and_then(|ci| ci.get("large").or_else(|| ci.get("medium"))) .and_then(|ci| ci.get("large").or_else(|| ci.get("medium")))
@@ -238,9 +273,9 @@ async fn get_series_books_impl(
let authors = extract_authors(media); let authors = extract_authors(media);
// AniList doesn't have per-volume data — generate volume entries if volumes count is known // AniList doesn't have per-volume data — generate entries from volumes count (or chapters as fallback)
let mut books = Vec::new(); let mut books = Vec::new();
if let Some(total) = volumes { if let Some(total) = total {
for vol in 1..=total { for vol in 1..=total {
books.push(BookCandidate { books.push(BookCandidate {
external_book_id: format!("{}-vol-{}", external_id, vol), external_book_id: format!("{}-vol-{}", external_id, vol),
@@ -256,21 +291,6 @@ async fn get_series_books_impl(
metadata_json: serde_json::json!({}), metadata_json: serde_json::json!({}),
}); });
} }
} else {
// Single entry for the whole manga
books.push(BookCandidate {
external_book_id: external_id.to_string(),
title,
volume_number: Some(1),
authors,
isbn: None,
summary: description,
cover_url,
page_count: None,
language: Some("ja".to_string()),
publish_date: None,
metadata_json: serde_json::json!({}),
});
} }
Ok(books) Ok(books)

View File

@@ -210,6 +210,10 @@ async fn search_series_impl(
"authors": c.authors, "authors": c.authors,
"publishers": c.publishers, "publishers": c.publishers,
"start_year": c.start_year, "start_year": c.start_year,
"genres": details.genres,
"status": details.status,
"origin": details.origin,
"language": details.language,
}); });
} }
} }
@@ -235,6 +239,10 @@ struct SeriesDetails {
publishers: Vec<String>, publishers: Vec<String>,
start_year: Option<i32>, start_year: Option<i32>,
album_count: Option<i32>, album_count: Option<i32>,
genres: Vec<String>,
status: Option<String>,
origin: Option<String>,
language: Option<String>,
} }
async fn fetch_series_details( async fn fetch_series_details(
@@ -276,64 +284,109 @@ async fn fetch_series_details(
publishers: vec![], publishers: vec![],
start_year: None, start_year: None,
album_count: None, album_count: None,
genres: vec![],
status: None,
origin: None,
language: None,
}; };
// Description: look for #full-commentaire or .serie-info // Description from <meta name="description"> — format: "Tout sur la série {name} : {description}"
if let Ok(sel) = Selector::parse("#full-commentaire") { if let Ok(sel) = Selector::parse(r#"meta[name="description"]"#) {
if let Some(el) = doc.select(&sel).next() { if let Some(el) = doc.select(&sel).next() {
let text = el.text().collect::<String>().trim().to_string(); if let Some(content) = el.value().attr("content") {
if !text.is_empty() { let desc = content.trim().to_string();
details.description = Some(text); // Strip the "Tout sur la série ... : " prefix
} let cleaned = if let Some(pos) = desc.find(" : ") {
} desc[pos + 3..].trim().to_string()
} } else {
desc
// Fallback description from span.infoedition };
if details.description.is_none() { if !cleaned.is_empty() {
if let Ok(sel) = Selector::parse("span.infoedition") { details.description = Some(cleaned);
if let Some(el) = doc.select(&sel).next() {
let text = el.text().collect::<String>().trim().to_string();
if !text.is_empty() {
details.description = Some(text);
} }
} }
} }
} }
// Extract authors and publishers from album info blocks // Extract authors from itemprop="author" and itemprop="illustrator" (deduplicated)
if let Ok(sel) = Selector::parse(".infos li") { {
let mut authors_set = std::collections::HashSet::new(); let mut authors_set = std::collections::HashSet::new();
let mut publishers_set = std::collections::HashSet::new(); for attr in ["author", "illustrator"] {
if let Ok(sel) = Selector::parse(&format!(r#"[itemprop="{attr}"]"#)) {
for li in doc.select(&sel) { for el in doc.select(&sel) {
let text = li.text().collect::<String>(); let name = el.text().collect::<String>().trim().to_string();
let text = text.trim(); // Names are "Last, First" — normalize to "First Last"
let normalized = if let Some((last, first)) = name.split_once(',') {
if let Some(val) = extract_info_value(text, "Scénario") { format!("{} {}", first.trim(), last.trim())
for a in val.split(',').map(str::trim).filter(|s| !s.is_empty()) { } else {
authors_set.insert(a.to_string()); name
} };
} if !normalized.is_empty() && is_real_author(&normalized) {
if let Some(val) = extract_info_value(text, "Dessin") { authors_set.insert(normalized);
for a in val.split(',').map(str::trim).filter(|s| !s.is_empty()) { }
authors_set.insert(a.to_string());
}
}
if let Some(val) = extract_info_value(text, "Editeur") {
for p in val.split(',').map(str::trim).filter(|s| !s.is_empty()) {
publishers_set.insert(p.to_string());
} }
} }
} }
details.authors = authors_set.into_iter().collect(); details.authors = authors_set.into_iter().collect();
details.authors.sort(); details.authors.sort();
}
// Extract publishers from itemprop="publisher" (deduplicated)
{
let mut publishers_set = std::collections::HashSet::new();
if let Ok(sel) = Selector::parse(r#"[itemprop="publisher"]"#) {
for el in doc.select(&sel) {
let name = el.text().collect::<String>().trim().to_string();
if !name.is_empty() {
publishers_set.insert(name);
}
}
}
details.publishers = publishers_set.into_iter().collect(); details.publishers = publishers_set.into_iter().collect();
details.publishers.sort(); details.publishers.sort();
} }
// Album count from serie-info text (e.g. "Tomes : 8") // Extract series-level info from <li><label>X :</label>value</li> blocks
// Genre: <li><label>Genre :</label><span class="style-serie">Animalier, Aventure, Humour</span></li>
if let Ok(sel) = Selector::parse("span.style-serie") {
if let Some(el) = doc.select(&sel).next() {
let text = el.text().collect::<String>();
details.genres = text
.split(',')
.map(|s| s.trim().to_string())
.filter(|s| !s.is_empty())
.collect();
}
}
// Parution: <li><label>Parution :</label><span class="parution-serie">Série finie</span></li>
if let Ok(sel) = Selector::parse("span.parution-serie") {
if let Some(el) = doc.select(&sel).next() {
let text = el.text().collect::<String>().trim().to_string();
if !text.is_empty() {
details.status = Some(text);
}
}
}
// Origine and Langue from page text (no dedicated CSS class)
let page_text = doc.root_element().text().collect::<String>(); let page_text = doc.root_element().text().collect::<String>();
if let Some(val) = extract_info_value(&page_text, "Origine") {
let val = val.lines().next().unwrap_or(val).trim();
if !val.is_empty() {
details.origin = Some(val.to_string());
}
}
if let Some(val) = extract_info_value(&page_text, "Langue") {
let val = val.lines().next().unwrap_or(val).trim();
if !val.is_empty() {
details.language = Some(val.to_string());
}
}
// Album count from serie-info text (e.g. "Tomes : 8")
if let Ok(re) = regex::Regex::new(r"Tomes?\s*:\s*(\d+)") { if let Ok(re) = regex::Regex::new(r"Tomes?\s*:\s*(\d+)") {
if let Some(caps) = re.captures(&page_text) { if let Some(caps) = re.captures(&page_text) {
if let Ok(n) = caps[1].parse::<i32>() { if let Ok(n) = caps[1].parse::<i32>() {
@@ -342,11 +395,16 @@ async fn fetch_series_details(
} }
} }
// Start year from first album date (Dépot légal) // Start year from first <meta itemprop="datePublished" content="YYYY-MM-DD">
if let Ok(re) = regex::Regex::new(r"[Dd][ée]p[ôo]t l[ée]gal\s*:\s*\d{2}/(\d{4})") { if let Ok(sel) = Selector::parse(r#"[itemprop="datePublished"]"#) {
if let Some(caps) = re.captures(&page_text) { if let Some(el) = doc.select(&sel).next() {
if let Ok(year) = caps[1].parse::<i32>() { if let Some(content) = el.value().attr("content") {
details.start_year = Some(year); // content is "YYYY-MM-DD"
if let Some(year_str) = content.split('-').next() {
if let Ok(year) = year_str.parse::<i32>() {
details.start_year = Some(year);
}
}
} }
} }
} }
@@ -424,79 +482,91 @@ async fn get_series_books_impl(
let doc = Html::parse_document(&html); let doc = Html::parse_document(&html);
let mut books = Vec::new(); let mut books = Vec::new();
// Albums are in .album-main blocks // Each album block starts before a .album-main div.
// The cover image (<img itemprop="image">) is OUTSIDE .album-main (sibling),
// so we iterate over a broader parent. But the simplest approach: parse all
// itemprop elements relative to each .album-main, plus pick covers separately.
let album_sel = Selector::parse(".album-main").map_err(|e| format!("selector: {e}"))?; let album_sel = Selector::parse(".album-main").map_err(|e| format!("selector: {e}"))?;
for album_el in doc.select(&album_sel) { // Pre-collect cover images — they appear in <img itemprop="image"> before each .album-main
let album_html = album_el.html(); // and link to an album URL containing the book ID
let album_doc = Html::parse_fragment(&album_html); let cover_sel = Selector::parse(r#"img[itemprop="image"]"#).map_err(|e| format!("selector: {e}"))?;
let covers: Vec<String> = doc.select(&cover_sel)
.filter_map(|el| el.value().attr("src").map(|s| {
if s.starts_with("http") { s.to_string() } else { format!("https://www.bedetheque.com{}", s) }
}))
.collect();
// Title from .titre for (idx, album_el) in doc.select(&album_sel).enumerate() {
let title = select_text(&album_doc, ".titre") // Title from <a class="titre" title="..."> — the title attribute is clean
.or_else(|| { let title_sel = Selector::parse("a.titre").ok();
Selector::parse(".titre a") let title_el = title_sel.as_ref().and_then(|s| album_el.select(s).next());
.ok() let title = title_el
.and_then(|s| album_doc.select(&s).next()) .and_then(|el| el.value().attr("title"))
.map(|el| el.text().collect::<String>().trim().to_string()) .unwrap_or("")
}) .trim()
.unwrap_or_default(); .to_string();
if title.is_empty() { if title.is_empty() {
continue; continue;
} }
// Volume number from title or .num span // External book ID from album URL (e.g. "...-1063.html")
let volume_number = select_text(&album_doc, ".num") let album_url = title_el.and_then(|el| el.value().attr("href")).unwrap_or("");
.and_then(|s| { let external_book_id = regex::Regex::new(r"-(\d+)\.html")
s.trim_end_matches('.')
.trim()
.parse::<i32>()
.ok()
})
.or_else(|| extract_volume_from_title(&title));
// Album URL
let album_url = Selector::parse("a[href*='/BD-']")
.ok() .ok()
.and_then(|s| album_doc.select(&s).next()) .and_then(|re| re.captures(album_url))
.and_then(|el| el.value().attr("href")) .map(|c| c[1].to_string())
.map(String::from);
// External book id from URL
let external_book_id = album_url
.as_deref()
.and_then(|u| {
regex::Regex::new(r"-(\d+)\.html")
.ok()
.and_then(|re| re.captures(u))
.map(|c| c[1].to_string())
})
.unwrap_or_default(); .unwrap_or_default();
// Cover // Volume number from URL pattern "Tome-{N}-" or from itemprop name
let cover_url = Selector::parse("img[src*='cache/thb_couv']") let volume_number = regex::Regex::new(r"(?i)Tome-(\d+)-")
.ok() .ok()
.and_then(|s| album_doc.select(&s).next()) .and_then(|re| re.captures(album_url))
.and_then(|el| el.value().attr("src")) .and_then(|c| c[1].parse::<i32>().ok())
.map(|s| { .or_else(|| extract_volume_from_title(&title));
if s.starts_with("http") {
s.to_string()
} else {
format!("https://www.bedetheque.com{}", s)
}
});
// Extract info fields // Authors from itemprop="author" and itemprop="illustrator"
let album_text = album_el.text().collect::<String>(); let mut authors = Vec::new();
let authors = extract_all_authors(&album_text); let author_sel = Selector::parse(r#"[itemprop="author"]"#).ok();
let isbn = extract_info_value(&album_text, "EAN/ISBN") let illustrator_sel = Selector::parse(r#"[itemprop="illustrator"]"#).ok();
.or_else(|| extract_info_value(&album_text, "ISBN")) for sel in [&author_sel, &illustrator_sel].into_iter().flatten() {
.map(|s| s.trim().to_string()); for el in album_el.select(sel) {
let page_count = extract_info_value(&album_text, "Planches") let name = el.text().collect::<String>().trim().to_string();
.and_then(|s| s.trim().parse::<i32>().ok()); // Names are "Last, First" format — normalize to "First Last"
let publish_date = extract_info_value(&album_text, "Dépot légal") let normalized = if let Some((last, first)) = name.split_once(',') {
.or_else(|| extract_info_value(&album_text, "Depot legal")) format!("{} {}", first.trim(), last.trim())
.map(|s| s.trim().to_string()); } else {
name
};
if !normalized.is_empty() && is_real_author(&normalized) && !authors.contains(&normalized) {
authors.push(normalized);
}
}
}
// ISBN from <span itemprop="isbn">
let isbn = Selector::parse(r#"[itemprop="isbn"]"#)
.ok()
.and_then(|s| album_el.select(&s).next())
.map(|el| el.text().collect::<String>().trim().to_string())
.filter(|s| !s.is_empty());
// Page count from <span itemprop="numberOfPages">
let page_count = Selector::parse(r#"[itemprop="numberOfPages"]"#)
.ok()
.and_then(|s| album_el.select(&s).next())
.and_then(|el| el.text().collect::<String>().trim().parse::<i32>().ok());
// Publish date from <meta itemprop="datePublished" content="YYYY-MM-DD">
let publish_date = Selector::parse(r#"[itemprop="datePublished"]"#)
.ok()
.and_then(|s| album_el.select(&s).next())
.and_then(|el| el.value().attr("content").map(|c| c.trim().to_string()))
.filter(|s| !s.is_empty());
// Cover from pre-collected covers (same index)
let cover_url = covers.get(idx).cloned();
books.push(BookCandidate { books.push(BookCandidate {
external_book_id, external_book_id,
@@ -517,26 +587,9 @@ async fn get_series_books_impl(
Ok(books) Ok(books)
} }
fn select_text(doc: &Html, selector: &str) -> Option<String> { /// Filter out placeholder author names from Bédéthèque
Selector::parse(selector) fn is_real_author(name: &str) -> bool {
.ok() !name.starts_with('<') && !name.ends_with('>') && name != "Collectif"
.and_then(|s| doc.select(&s).next())
.map(|el| el.text().collect::<String>().trim().to_string())
.filter(|s| !s.is_empty())
}
fn extract_all_authors(text: &str) -> Vec<String> {
let mut authors = Vec::new();
for label in ["Scénario", "Scenario", "Dessin"] {
if let Some(val) = extract_info_value(text, label) {
for a in val.split(',').map(str::trim).filter(|s| !s.is_empty()) {
if !authors.contains(&a.to_string()) {
authors.push(a.to_string());
}
}
}
}
authors
} }
fn extract_volume_from_title(title: &str) -> Option<i32> { fn extract_volume_from_title(title: &str) -> Option<i32> {

View File

@@ -79,3 +79,217 @@ pub fn get_provider(name: &str) -> Option<Box<dyn MetadataProvider>> {
_ => None, _ => None,
} }
} }
// ---------------------------------------------------------------------------
// End-to-end provider tests
//
// These tests hit real external APIs — run them explicitly with:
// cargo test -p api providers_e2e -- --ignored --nocapture
// ---------------------------------------------------------------------------
#[cfg(test)]
mod providers_e2e {
use super::*;
fn config_fr() -> ProviderConfig {
ProviderConfig { api_key: None, language: "fr".to_string() }
}
fn config_en() -> ProviderConfig {
ProviderConfig { api_key: None, language: "en".to_string() }
}
fn print_candidate(name: &str, c: &SeriesCandidate) {
println!("\n=== {name} — best candidate ===");
println!(" title: {:?}", c.title);
println!(" external_id: {:?}", c.external_id);
println!(" authors: {:?}", c.authors);
println!(" description: {:?}", c.description.as_deref().map(|d| &d[..d.len().min(120)]));
println!(" publishers: {:?}", c.publishers);
println!(" start_year: {:?}", c.start_year);
println!(" total_volumes: {:?}", c.total_volumes);
println!(" cover_url: {}", c.cover_url.is_some());
println!(" external_url: {}", c.external_url.is_some());
println!(" confidence: {:.2}", c.confidence);
println!(" metadata_json: {}", serde_json::to_string_pretty(&c.metadata_json).unwrap_or_default());
}
fn print_books(name: &str, books: &[BookCandidate]) {
println!("\n=== {name}{} books ===", books.len());
for (i, b) in books.iter().take(5).enumerate() {
println!(
" [{}] vol={:?} title={:?} authors={} isbn={:?} pages={:?} lang={:?} date={:?} cover={}",
i, b.volume_number, b.title, b.authors.len(), b.isbn, b.page_count, b.language, b.publish_date, b.cover_url.is_some()
);
}
if books.len() > 5 { println!(" ... and {} more", books.len() - 5); }
let with_vol = books.iter().filter(|b| b.volume_number.is_some()).count();
let with_isbn = books.iter().filter(|b| b.isbn.is_some()).count();
let with_authors = books.iter().filter(|b| !b.authors.is_empty()).count();
let with_date = books.iter().filter(|b| b.publish_date.is_some()).count();
let with_cover = books.iter().filter(|b| b.cover_url.is_some()).count();
let with_pages = books.iter().filter(|b| b.page_count.is_some()).count();
println!(" --- field coverage ---");
println!(" volume_number: {with_vol}/{}", books.len());
println!(" isbn: {with_isbn}/{}", books.len());
println!(" authors: {with_authors}/{}", books.len());
println!(" publish_date: {with_date}/{}", books.len());
println!(" cover_url: {with_cover}/{}", books.len());
println!(" page_count: {with_pages}/{}", books.len());
}
// --- Google Books ---
#[tokio::test]
#[ignore]
async fn google_books_search_and_books() {
let p = get_provider("google_books").unwrap();
let cfg = config_en();
let candidates = p.search_series("Blacksad", &cfg).await.unwrap();
assert!(!candidates.is_empty(), "google_books: no results for Blacksad");
print_candidate("google_books", &candidates[0]);
let books = p.get_series_books(&candidates[0].external_id, &cfg).await.unwrap();
print_books("google_books", &books);
assert!(!books.is_empty(), "google_books: no books returned");
}
// --- Open Library ---
#[tokio::test]
#[ignore]
async fn open_library_search_and_books() {
let p = get_provider("open_library").unwrap();
let cfg = config_en();
let candidates = p.search_series("Sandman Neil Gaiman", &cfg).await.unwrap();
assert!(!candidates.is_empty(), "open_library: no results for Sandman");
print_candidate("open_library", &candidates[0]);
let books = p.get_series_books(&candidates[0].external_id, &cfg).await.unwrap();
print_books("open_library", &books);
assert!(!books.is_empty(), "open_library: no books returned");
}
// --- AniList ---
#[tokio::test]
#[ignore]
async fn anilist_search_finished() {
let p = get_provider("anilist").unwrap();
let cfg = config_fr();
let candidates = p.search_series("Death Note", &cfg).await.unwrap();
assert!(!candidates.is_empty(), "anilist: no results for Death Note");
print_candidate("anilist (finished)", &candidates[0]);
let best = &candidates[0];
assert!(best.total_volumes.is_some(), "anilist: finished series should have total_volumes");
assert!(best.description.is_some(), "anilist: should have description");
assert!(!best.authors.is_empty(), "anilist: should have authors");
let status = best.metadata_json.get("status").and_then(|s| s.as_str());
assert_eq!(status, Some("FINISHED"), "anilist: Death Note should be FINISHED");
let books = p.get_series_books(&best.external_id, &cfg).await.unwrap();
print_books("anilist (Death Note)", &books);
assert!(books.len() >= 12, "anilist: Death Note should have ≥12 volumes, got {}", books.len());
}
#[tokio::test]
#[ignore]
async fn anilist_search_ongoing() {
let p = get_provider("anilist").unwrap();
let cfg = config_fr();
let candidates = p.search_series("One Piece", &cfg).await.unwrap();
assert!(!candidates.is_empty(), "anilist: no results for One Piece");
print_candidate("anilist (ongoing)", &candidates[0]);
let best = &candidates[0];
let status = best.metadata_json.get("status").and_then(|s| s.as_str());
assert_eq!(status, Some("RELEASING"), "anilist: One Piece should be RELEASING");
let volume_source = best.metadata_json.get("volume_source").and_then(|s| s.as_str());
println!(" volume_source: {:?}", volume_source);
println!(" total_volumes: {:?}", best.total_volumes);
}
// --- Bédéthèque ---
#[tokio::test]
#[ignore]
async fn bedetheque_search_and_books() {
let p = get_provider("bedetheque").unwrap();
let cfg = config_fr();
let candidates = p.search_series("De Cape et de Crocs", &cfg).await.unwrap();
assert!(!candidates.is_empty(), "bedetheque: no results");
print_candidate("bedetheque", &candidates[0]);
let best = &candidates[0];
assert!(best.description.is_some(), "bedetheque: should have description");
assert!(!best.authors.is_empty(), "bedetheque: should have authors");
assert!(!best.publishers.is_empty(), "bedetheque: should have publishers");
assert!(best.start_year.is_some(), "bedetheque: should have start_year");
assert!(best.total_volumes.is_some(), "bedetheque: should have total_volumes");
// Enriched metadata_json
let mj = &best.metadata_json;
assert!(mj.get("genres").and_then(|g| g.as_array()).map(|a| !a.is_empty()).unwrap_or(false), "bedetheque: should have genres");
assert!(mj.get("status").and_then(|s| s.as_str()).is_some(), "bedetheque: should have status");
let books = p.get_series_books(&best.external_id, &cfg).await.unwrap();
print_books("bedetheque", &books);
assert!(books.len() >= 12, "bedetheque: De Cape et de Crocs should have ≥12 volumes, got {}", books.len());
}
// --- ComicVine (needs API key) ---
#[tokio::test]
#[ignore]
async fn comicvine_no_key() {
let p = get_provider("comicvine").unwrap();
let cfg = config_en();
let result = p.search_series("Batman", &cfg).await;
println!("\n=== comicvine (no key) ===");
match result {
Ok(c) => println!(" returned {} candidates (unexpected without key)", c.len()),
Err(e) => println!(" expected error: {e}"),
}
}
// --- Cross-provider comparison ---
#[tokio::test]
#[ignore]
async fn cross_provider_blacksad() {
println!("\n{}", "=".repeat(60));
println!(" Cross-provider comparison: Blacksad");
println!("{}\n", "=".repeat(60));
let providers: Vec<(&str, ProviderConfig)> = vec![
("google_books", config_en()),
("open_library", config_en()),
("anilist", config_fr()),
("bedetheque", config_fr()),
];
for (name, cfg) in &providers {
let p = get_provider(name).unwrap();
match p.search_series("Blacksad", cfg).await {
Ok(candidates) if !candidates.is_empty() => {
let b = &candidates[0];
println!("[{name}] title={:?} authors={} desc={} pubs={} year={:?} vols={:?} cover={} url={} conf={:.2}",
b.title, b.authors.len(), b.description.is_some(), b.publishers.len(),
b.start_year, b.total_volumes, b.cover_url.is_some(), b.external_url.is_some(), b.confidence);
}
Ok(_) => println!("[{name}] no results"),
Err(e) => println!("[{name}] error: {e}"),
}
}
}
}

View File

@@ -39,6 +39,15 @@ function LockButton({
); );
} }
const SERIES_STATUSES = [
{ value: "", label: "Non défini" },
{ value: "ongoing", label: "En cours" },
{ value: "ended", label: "Terminée" },
{ value: "hiatus", label: "Hiatus" },
{ value: "cancelled", label: "Annulée" },
{ value: "upcoming", label: "À paraître" },
] as const;
interface EditSeriesFormProps { interface EditSeriesFormProps {
libraryId: string; libraryId: string;
seriesName: string; seriesName: string;
@@ -49,6 +58,7 @@ interface EditSeriesFormProps {
currentDescription: string | null; currentDescription: string | null;
currentStartYear: number | null; currentStartYear: number | null;
currentTotalVolumes: number | null; currentTotalVolumes: number | null;
currentStatus: string | null;
currentLockedFields: Record<string, boolean>; currentLockedFields: Record<string, boolean>;
} }
@@ -62,6 +72,7 @@ export function EditSeriesForm({
currentDescription, currentDescription,
currentStartYear, currentStartYear,
currentTotalVolumes, currentTotalVolumes,
currentStatus,
currentLockedFields, currentLockedFields,
}: EditSeriesFormProps) { }: EditSeriesFormProps) {
const router = useRouter(); const router = useRouter();
@@ -80,6 +91,7 @@ export function EditSeriesForm({
const [description, setDescription] = useState(currentDescription ?? ""); const [description, setDescription] = useState(currentDescription ?? "");
const [startYear, setStartYear] = useState(currentStartYear?.toString() ?? ""); const [startYear, setStartYear] = useState(currentStartYear?.toString() ?? "");
const [totalVolumes, setTotalVolumes] = useState(currentTotalVolumes?.toString() ?? ""); const [totalVolumes, setTotalVolumes] = useState(currentTotalVolumes?.toString() ?? "");
const [status, setStatus] = useState(currentStatus ?? "");
// Lock states // Lock states
const [lockedFields, setLockedFields] = useState<Record<string, boolean>>(currentLockedFields); const [lockedFields, setLockedFields] = useState<Record<string, boolean>>(currentLockedFields);
@@ -142,6 +154,7 @@ export function EditSeriesForm({
setDescription(currentDescription ?? ""); setDescription(currentDescription ?? "");
setStartYear(currentStartYear?.toString() ?? ""); setStartYear(currentStartYear?.toString() ?? "");
setTotalVolumes(currentTotalVolumes?.toString() ?? ""); setTotalVolumes(currentTotalVolumes?.toString() ?? "");
setStatus(currentStatus ?? "");
setLockedFields(currentLockedFields); setLockedFields(currentLockedFields);
setShowApplyToBooks(false); setShowApplyToBooks(false);
setBookAuthor(currentBookAuthor ?? ""); setBookAuthor(currentBookAuthor ?? "");
@@ -182,6 +195,7 @@ export function EditSeriesForm({
description: description.trim() || null, description: description.trim() || null,
start_year: startYear.trim() ? parseInt(startYear.trim(), 10) : null, start_year: startYear.trim() ? parseInt(startYear.trim(), 10) : null,
total_volumes: totalVolumes.trim() ? parseInt(totalVolumes.trim(), 10) : null, total_volumes: totalVolumes.trim() ? parseInt(totalVolumes.trim(), 10) : null,
status: status || null,
locked_fields: lockedFields, locked_fields: lockedFields,
}; };
if (showApplyToBooks) { if (showApplyToBooks) {
@@ -285,6 +299,23 @@ export function EditSeriesForm({
/> />
</FormField> </FormField>
<FormField>
<div className="flex items-center gap-1">
<FormLabel>Statut</FormLabel>
<LockButton locked={!!lockedFields.status} onToggle={() => toggleLock("status")} disabled={isPending} />
</div>
<select
value={status}
onChange={(e) => setStatus(e.target.value)}
disabled={isPending}
className="w-full rounded-lg border border-border bg-background px-3 py-2 text-sm text-foreground focus:outline-none focus:ring-2 focus:ring-primary/40"
>
{SERIES_STATUSES.map((s) => (
<option key={s.value} value={s.value}>{s.label}</option>
))}
</select>
</FormField>
{/* Auteurs — multi-valeur */} {/* Auteurs — multi-valeur */}
<FormField className="sm:col-span-2"> <FormField className="sm:col-span-2">
<div className="flex items-center gap-1"> <div className="flex items-center gap-1">

View File

@@ -13,6 +13,7 @@ const FIELD_LABELS: Record<string, string> = {
publishers: "Éditeurs", publishers: "Éditeurs",
start_year: "Année", start_year: "Année",
total_volumes: "Nb volumes", total_volumes: "Nb volumes",
status: "Statut",
summary: "Résumé", summary: "Résumé",
isbn: "ISBN", isbn: "ISBN",
publish_date: "Date de publication", publish_date: "Date de publication",
@@ -338,7 +339,14 @@ export function MetadataSearchModal({
<div className="flex items-center gap-2 text-xs text-muted-foreground"> <div className="flex items-center gap-2 text-xs text-muted-foreground">
{c.publishers.length > 0 && <span>{c.publishers[0]}</span>} {c.publishers.length > 0 && <span>{c.publishers[0]}</span>}
{c.start_year != null && <span>{c.start_year}</span>} {c.start_year != null && <span>{c.start_year}</span>}
{c.total_volumes != null && <span>{c.total_volumes} vol.</span>} {c.total_volumes != null && (
<span>
{c.total_volumes} {c.metadata_json?.volume_source === "chapters" ? "ch." : "vol."}
</span>
)}
{c.metadata_json?.status === "RELEASING" && (
<span className="italic text-amber-500">en cours</span>
)}
</div> </div>
</div> </div>
</div> </div>
@@ -366,8 +374,11 @@ export function MetadataSearchModal({
{selectedCandidate.authors.length > 0 && ( {selectedCandidate.authors.length > 0 && (
<p className="text-sm text-muted-foreground">{selectedCandidate.authors.join(", ")}</p> <p className="text-sm text-muted-foreground">{selectedCandidate.authors.join(", ")}</p>
)} )}
{selectedCandidate.total_volumes && ( {selectedCandidate.total_volumes != null && (
<p className="text-sm text-muted-foreground">{selectedCandidate.total_volumes} volumes</p> <p className="text-sm text-muted-foreground">
{selectedCandidate.total_volumes} {selectedCandidate.metadata_json?.volume_source === "chapters" ? "chapitres" : "volumes"}
{selectedCandidate.metadata_json?.status === "RELEASING" && <span className="italic text-amber-500 ml-1">(en cours)</span>}
</p>
)} )}
<p className="text-xs text-muted-foreground mt-1 inline-flex items-center gap-1"> <p className="text-xs text-muted-foreground mt-1 inline-flex items-center gap-1">
via <ProviderIcon provider={selectedCandidate.provider} size={12} /> <span className="font-medium">{providerLabel(selectedCandidate.provider)}</span> via <ProviderIcon provider={selectedCandidate.provider} size={12} /> <span className="font-medium">{providerLabel(selectedCandidate.provider)}</span>
@@ -458,8 +469,15 @@ export function MetadataSearchModal({
</div> </div>
)} )}
{/* Books message (e.g. provider has no volume data) */}
{syncReport.books_message && (
<div className="p-3 rounded-lg bg-amber-500/10 border border-amber-500/30">
<p className="text-xs text-amber-600">{syncReport.books_message}</p>
</div>
)}
{/* Books report */} {/* Books report */}
{(syncReport.books.length > 0 || syncReport.books_unmatched > 0) && ( {!syncReport.books_message && (syncReport.books.length > 0 || syncReport.books_unmatched > 0) && (
<div className="p-3 rounded-lg bg-muted/30 border border-border/50"> <div className="p-3 rounded-lg bg-muted/30 border border-border/50">
<p className="text-xs font-semibold text-muted-foreground uppercase tracking-wide mb-2"> <p className="text-xs font-semibold text-muted-foreground uppercase tracking-wide mb-2">
Livres {syncReport.books_matched} matched{syncReport.books_unmatched > 0 && `, ${syncReport.books_unmatched} unmatched`} Livres {syncReport.books_matched} matched{syncReport.books_unmatched > 0 && `, ${syncReport.books_unmatched} unmatched`}

View File

@@ -100,9 +100,27 @@ export default async function SeriesDetailPage({
<div className="flex-1 space-y-4"> <div className="flex-1 space-y-4">
<h1 className="text-3xl font-bold text-foreground">{displayName}</h1> <h1 className="text-3xl font-bold text-foreground">{displayName}</h1>
{seriesMeta && seriesMeta.authors.length > 0 && ( <div className="flex flex-wrap items-center gap-3">
<p className="text-base text-muted-foreground">{seriesMeta.authors.join(", ")}</p> {seriesMeta && seriesMeta.authors.length > 0 && (
)} <p className="text-base text-muted-foreground">{seriesMeta.authors.join(", ")}</p>
)}
{seriesMeta?.status && (
<span className={`inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium ${
seriesMeta.status === "ongoing" ? "bg-blue-500/15 text-blue-600" :
seriesMeta.status === "ended" ? "bg-green-500/15 text-green-600" :
seriesMeta.status === "hiatus" ? "bg-amber-500/15 text-amber-600" :
seriesMeta.status === "cancelled" ? "bg-red-500/15 text-red-600" :
"bg-muted text-muted-foreground"
}`}>
{seriesMeta.status === "ongoing" ? "En cours" :
seriesMeta.status === "ended" ? "Terminée" :
seriesMeta.status === "hiatus" ? "Hiatus" :
seriesMeta.status === "cancelled" ? "Annulée" :
seriesMeta.status === "upcoming" ? "À paraître" :
seriesMeta.status}
</span>
)}
</div>
{seriesMeta?.description && ( {seriesMeta?.description && (
<SafeHtml html={seriesMeta.description} className="text-sm text-muted-foreground leading-relaxed" /> <SafeHtml html={seriesMeta.description} className="text-sm text-muted-foreground leading-relaxed" />
@@ -153,6 +171,7 @@ export default async function SeriesDetailPage({
currentDescription={seriesMeta?.description ?? null} currentDescription={seriesMeta?.description ?? null}
currentStartYear={seriesMeta?.start_year ?? null} currentStartYear={seriesMeta?.start_year ?? null}
currentTotalVolumes={seriesMeta?.total_volumes ?? null} currentTotalVolumes={seriesMeta?.total_volumes ?? null}
currentStatus={seriesMeta?.status ?? null}
currentLockedFields={seriesMeta?.locked_fields ?? {}} currentLockedFields={seriesMeta?.locked_fields ?? {}}
/> />
<MetadataSearchModal <MetadataSearchModal

View File

@@ -516,6 +516,7 @@ export type SeriesMetadataDto = {
publishers: string[]; publishers: string[];
start_year: number | null; start_year: number | null;
total_volumes: number | null; total_volumes: number | null;
status: string | null;
book_author: string | null; book_author: string | null;
book_language: string | null; book_language: string | null;
locked_fields: Record<string, boolean>; locked_fields: Record<string, boolean>;
@@ -657,6 +658,7 @@ export type SyncReport = {
books: BookSyncReport[]; books: BookSyncReport[];
books_matched: number; books_matched: number;
books_unmatched: number; books_unmatched: number;
books_message?: string;
}; };
export type MissingBooksDto = { export type MissingBooksDto = {

View File

@@ -1,6 +1,6 @@
{ {
"name": "stripstream-backoffice", "name": "stripstream-backoffice",
"version": "1.5.0", "version": "1.5.4",
"private": true, "private": true,
"scripts": { "scripts": {
"dev": "next dev -p 7082", "dev": "next dev -p 7082",

View File

@@ -0,0 +1 @@
ALTER TABLE series_metadata ADD COLUMN status TEXT;