Compare commits
72 Commits
06a245d90a
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 766e3a01b2 | |||
| 626e2e035d | |||
| cfd2321db2 | |||
| 1b715033ce | |||
| 81d1586501 | |||
| bd74c9e3e3 | |||
| 41228430cf | |||
| 6a4ba06fac | |||
| e5c3542d3f | |||
| 24516f1069 | |||
| 5383cdef60 | |||
| be5c3f7a34 | |||
| caa9922ff9 | |||
| 135f000c71 | |||
| d9e50a4235 | |||
| 5f6eb5a5cb | |||
| 41c77fca2e | |||
| 49621f3fb1 | |||
| 6df743b2e6 | |||
| edfefc0128 | |||
| b0185abefe | |||
| b9e54cbfd8 | |||
| 3f0bd783cd | |||
| fc8856c83f | |||
| bd09f3d943 | |||
| 1f434c3d67 | |||
| 4972a403df | |||
| 629708cdd0 | |||
| 560087a897 | |||
| 27f553b005 | |||
| ed7665248e | |||
| 736b8aedc0 | |||
| 3daa49ae6c | |||
| 5fb24188e1 | |||
| 54f972db17 | |||
| acd8b62382 | |||
| cc65e3d1ad | |||
| 70889ca955 | |||
| 4ad6d57271 | |||
| fe5de3d5c1 | |||
| 5a224c48c0 | |||
| d08fe31b1b | |||
| 4d69ed91c5 | |||
| c6ddd3e6c7 | |||
| 504185f31f | |||
| acd0cce3f8 | |||
| e14da4fc8d | |||
| c04d4fb618 | |||
| 57bc82703d | |||
| e6aa7ebed0 | |||
| c44b51d6ef | |||
| d4c48de780 | |||
| 8948f75d62 | |||
| d304877a83 | |||
| 9cec32ba3e | |||
| e8768dfad7 | |||
| cfc98819ab | |||
| bfc1c76fe2 | |||
| 39e9f35acb | |||
| 36987f59b9 | |||
| 931d0e06f4 | |||
| 741a4da878 | |||
| e28b78d0e6 | |||
| 163dc3698c | |||
| 818bd82e0f | |||
| 76c8bcbf2c | |||
| 00094b22c6 | |||
| 1e4d9acebe | |||
| b226aa3a35 | |||
| d913be9d2a | |||
| e9bb951d97 | |||
| 037ede2750 |
17
.gitea/workflows/deploy.yml
Normal file
17
.gitea/workflows/deploy.yml
Normal file
@@ -0,0 +1,17 @@
|
||||
name: Deploy with Docker Compose
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main # adapte la branche que tu veux déployer
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: mac-orbstack-runner # le nom que tu as donné au runner
|
||||
steps:
|
||||
- name: Deploy stack
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
run: |
|
||||
BUILDKIT_PROGRESS=plain cd /Users/julienfroidefond/Sites/docker-stack && docker pull julienfroidefond32/stripstream-backoffice && docker pull julienfroidefond32/stripstream-api && docker pull julienfroidefond32/stripstream-indexer && ./scripts/stack.sh up stripstream
|
||||
25
Cargo.lock
generated
25
Cargo.lock
generated
@@ -64,7 +64,7 @@ checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
|
||||
|
||||
[[package]]
|
||||
name = "api"
|
||||
version = "1.6.1"
|
||||
version = "1.23.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argon2",
|
||||
@@ -76,6 +76,7 @@ dependencies = [
|
||||
"image",
|
||||
"jpeg-decoder",
|
||||
"lru",
|
||||
"notifications",
|
||||
"parsers",
|
||||
"rand 0.8.5",
|
||||
"regex",
|
||||
@@ -1232,7 +1233,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexer"
|
||||
version = "1.6.1"
|
||||
version = "1.23.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"axum",
|
||||
@@ -1240,6 +1241,7 @@ dependencies = [
|
||||
"futures",
|
||||
"image",
|
||||
"jpeg-decoder",
|
||||
"notifications",
|
||||
"num_cpus",
|
||||
"parsers",
|
||||
"reqwest",
|
||||
@@ -1663,6 +1665,19 @@ dependencies = [
|
||||
"nom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "notifications"
|
||||
version = "1.23.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sqlx",
|
||||
"tokio",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nu-ansi-term"
|
||||
version = "0.50.3"
|
||||
@@ -1771,7 +1786,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "parsers"
|
||||
version = "1.6.1"
|
||||
version = "1.23.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"flate2",
|
||||
@@ -2270,6 +2285,7 @@ dependencies = [
|
||||
"base64",
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"http",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
@@ -2278,6 +2294,7 @@ dependencies = [
|
||||
"hyper-util",
|
||||
"js-sys",
|
||||
"log",
|
||||
"mime_guess",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"quinn",
|
||||
@@ -2906,7 +2923,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "stripstream-core"
|
||||
version = "1.6.1"
|
||||
version = "1.23.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"serde",
|
||||
|
||||
@@ -3,13 +3,14 @@ members = [
|
||||
"apps/api",
|
||||
"apps/indexer",
|
||||
"crates/core",
|
||||
"crates/notifications",
|
||||
"crates/parsers",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
edition = "2021"
|
||||
version = "1.6.1"
|
||||
version = "1.23.0"
|
||||
license = "MIT"
|
||||
|
||||
[workspace.dependencies]
|
||||
@@ -22,7 +23,7 @@ image = { version = "0.25", default-features = false, features = ["jpeg", "png",
|
||||
jpeg-decoder = "0.3"
|
||||
lru = "0.12"
|
||||
rayon = "1.10"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json", "multipart", "rustls-tls"] }
|
||||
rand = "0.8"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
||||
68
README.md
68
README.md
@@ -81,28 +81,58 @@ The backoffice will be available at http://localhost:7082
|
||||
|
||||
## Features
|
||||
|
||||
### Libraries Management
|
||||
- Create and manage multiple libraries
|
||||
- Configure automatic scanning schedules (hourly, daily, weekly)
|
||||
- Real-time file watcher for instant indexing
|
||||
- Full and incremental rebuild options
|
||||
> For the full feature list, business rules, and API details, see [docs/FEATURES.md](docs/FEATURES.md).
|
||||
|
||||
### Books Management
|
||||
- Support for CBZ, CBR, and PDF formats
|
||||
- Automatic metadata extraction
|
||||
- Series and volume detection
|
||||
- Full-text search powered by PostgreSQL
|
||||
### Libraries
|
||||
- Multi-library management with per-library configuration
|
||||
- Incremental and full scanning, real-time filesystem watcher
|
||||
- Per-library metadata provider selection (Google Books, ComicVine, BedéThèque, AniList, Open Library)
|
||||
|
||||
### Jobs Monitoring
|
||||
- Real-time job progress tracking
|
||||
- Detailed statistics (scanned, indexed, removed, errors)
|
||||
- Job history and logs
|
||||
- Cancel pending jobs
|
||||
### Books & Series
|
||||
- **Formats**: CBZ, CBR, PDF, EPUB
|
||||
- Automatic metadata extraction (title, series, volume, authors, page count) from filenames and directory structure
|
||||
- Series aggregation with missing volume detection
|
||||
- Thumbnail generation (WebP/JPEG/PNG) with lazy generation and bulk rebuild
|
||||
- CBR → CBZ conversion
|
||||
|
||||
### Search
|
||||
- Full-text search across titles, authors, and series
|
||||
- Library filtering
|
||||
- Real-time suggestions
|
||||
### Reading Progress
|
||||
- Per-book tracking: unread / reading / read with current page
|
||||
- Series-level aggregated reading status
|
||||
- Bulk mark-as-read for series
|
||||
|
||||
### Search & Discovery
|
||||
- Full-text search across titles, authors, and series (PostgreSQL `pg_trgm`)
|
||||
- Author listing with book/series counts
|
||||
- Filtering by reading status, series status, format, metadata provider
|
||||
|
||||
### External Metadata
|
||||
- Search, match, approve/reject workflow with confidence scoring
|
||||
- Batch auto-matching and scheduled metadata refresh
|
||||
- Field locking to protect manual edits from sync
|
||||
|
||||
### External Integrations
|
||||
- **Komga**: import reading progress
|
||||
- **Prowlarr**: search for missing volumes
|
||||
- **qBittorrent**: add torrents directly from search results
|
||||
|
||||
### Background Jobs
|
||||
- Rebuild, rescan, thumbnail generation, metadata batch, CBR conversion
|
||||
- Real-time progress via Server-Sent Events (SSE)
|
||||
- Job history, error tracking, cancellation
|
||||
|
||||
### Page Rendering
|
||||
- On-demand page extraction from all formats
|
||||
- Image processing (format, quality, max width, resampling filter)
|
||||
- LRU in-memory + disk cache
|
||||
|
||||
### Security
|
||||
- Token-based auth (`admin` / `read` scopes) with Argon2 hashing
|
||||
- Rate limiting, token expiration and revocation
|
||||
|
||||
### Web UI (Backoffice)
|
||||
- Dashboard with statistics, charts, and reading progress
|
||||
- Library, book, series, author management
|
||||
- Live job monitoring, metadata search modals, settings panel
|
||||
|
||||
## Environment Variables
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ futures = "0.3"
|
||||
image.workspace = true
|
||||
jpeg-decoder.workspace = true
|
||||
lru.workspace = true
|
||||
notifications = { path = "../../crates/notifications" }
|
||||
stripstream-core = { path = "../../crates/core" }
|
||||
parsers = { path = "../../crates/parsers" }
|
||||
rand.workspace = true
|
||||
|
||||
@@ -6,13 +6,15 @@ COPY Cargo.toml ./
|
||||
COPY apps/api/Cargo.toml apps/api/Cargo.toml
|
||||
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
|
||||
COPY crates/core/Cargo.toml crates/core/Cargo.toml
|
||||
COPY crates/notifications/Cargo.toml crates/notifications/Cargo.toml
|
||||
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
|
||||
|
||||
RUN mkdir -p apps/api/src apps/indexer/src crates/core/src crates/parsers/src && \
|
||||
RUN mkdir -p apps/api/src apps/indexer/src crates/core/src crates/notifications/src crates/parsers/src && \
|
||||
echo "fn main() {}" > apps/api/src/main.rs && \
|
||||
echo "fn main() {}" > apps/indexer/src/main.rs && \
|
||||
echo "" > apps/indexer/src/lib.rs && \
|
||||
echo "" > crates/core/src/lib.rs && \
|
||||
echo "" > crates/notifications/src/lib.rs && \
|
||||
echo "" > crates/parsers/src/lib.rs
|
||||
|
||||
# Build dependencies only (cached as long as Cargo.toml files don't change)
|
||||
@@ -26,12 +28,13 @@ RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||
COPY apps/api/src apps/api/src
|
||||
COPY apps/indexer/src apps/indexer/src
|
||||
COPY crates/core/src crates/core/src
|
||||
COPY crates/notifications/src crates/notifications/src
|
||||
COPY crates/parsers/src crates/parsers/src
|
||||
|
||||
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||
--mount=type=cache,target=/usr/local/cargo/git \
|
||||
--mount=type=cache,target=/app/target \
|
||||
touch apps/api/src/main.rs crates/core/src/lib.rs crates/parsers/src/lib.rs && \
|
||||
touch apps/api/src/main.rs crates/core/src/lib.rs crates/notifications/src/lib.rs crates/parsers/src/lib.rs && \
|
||||
cargo build --release -p api && \
|
||||
cp /app/target/release/api /usr/local/bin/api
|
||||
|
||||
|
||||
178
apps/api/src/authors.rs
Normal file
178
apps/api/src/authors.rs
Normal file
@@ -0,0 +1,178 @@
|
||||
use axum::{extract::{Query, State}, Json};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Row;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::{error::ApiError, state::AppState};
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
pub struct ListAuthorsQuery {
|
||||
#[schema(value_type = Option<String>, example = "batman")]
|
||||
pub q: Option<String>,
|
||||
#[schema(value_type = Option<i64>, example = 1)]
|
||||
pub page: Option<i64>,
|
||||
#[schema(value_type = Option<i64>, example = 20)]
|
||||
pub limit: Option<i64>,
|
||||
/// Sort order: "name" (default), "books" (most books first)
|
||||
#[schema(value_type = Option<String>, example = "books")]
|
||||
pub sort: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct AuthorItem {
|
||||
pub name: String,
|
||||
pub book_count: i64,
|
||||
pub series_count: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct AuthorsPageResponse {
|
||||
pub items: Vec<AuthorItem>,
|
||||
pub total: i64,
|
||||
pub page: i64,
|
||||
pub limit: i64,
|
||||
}
|
||||
|
||||
/// List all unique authors with book/series counts
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/authors",
|
||||
tag = "authors",
|
||||
params(
|
||||
("q" = Option<String>, Query, description = "Search by author name"),
|
||||
("page" = Option<i64>, Query, description = "Page number (1-based)"),
|
||||
("limit" = Option<i64>, Query, description = "Items per page (max 100)"),
|
||||
("sort" = Option<String>, Query, description = "Sort: name (default) or books"),
|
||||
),
|
||||
responses(
|
||||
(status = 200, body = AuthorsPageResponse),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn list_authors(
|
||||
State(state): State<AppState>,
|
||||
Query(query): Query<ListAuthorsQuery>,
|
||||
) -> Result<Json<AuthorsPageResponse>, ApiError> {
|
||||
let page = query.page.unwrap_or(1).max(1);
|
||||
let limit = query.limit.unwrap_or(20).clamp(1, 100);
|
||||
let offset = (page - 1) * limit;
|
||||
let sort = query.sort.as_deref().unwrap_or("name");
|
||||
|
||||
let order_clause = match sort {
|
||||
"books" => "book_count DESC, name ASC",
|
||||
_ => "name ASC",
|
||||
};
|
||||
|
||||
let q_pattern = query.q.as_deref()
|
||||
.filter(|s| !s.trim().is_empty())
|
||||
.map(|s| format!("%{s}%"));
|
||||
|
||||
// Aggregate unique authors from books.authors + books.author + series_metadata.authors
|
||||
let sql = format!(
|
||||
r#"
|
||||
WITH all_authors AS (
|
||||
SELECT DISTINCT UNNEST(
|
||||
COALESCE(
|
||||
NULLIF(authors, '{{}}'),
|
||||
CASE WHEN author IS NOT NULL AND author != '' THEN ARRAY[author] ELSE ARRAY[]::text[] END
|
||||
)
|
||||
) AS name
|
||||
FROM books
|
||||
UNION
|
||||
SELECT DISTINCT UNNEST(authors) AS name
|
||||
FROM series_metadata
|
||||
WHERE authors != '{{}}'
|
||||
),
|
||||
filtered AS (
|
||||
SELECT name FROM all_authors
|
||||
WHERE ($1::text IS NULL OR name ILIKE $1)
|
||||
),
|
||||
book_counts AS (
|
||||
SELECT
|
||||
f.name AS author_name,
|
||||
COUNT(DISTINCT b.id) AS book_count
|
||||
FROM filtered f
|
||||
LEFT JOIN books b ON (
|
||||
f.name = ANY(
|
||||
COALESCE(
|
||||
NULLIF(b.authors, '{{}}'),
|
||||
CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END
|
||||
)
|
||||
)
|
||||
)
|
||||
GROUP BY f.name
|
||||
),
|
||||
series_counts AS (
|
||||
SELECT
|
||||
f.name AS author_name,
|
||||
COUNT(DISTINCT (sm.library_id, sm.name)) AS series_count
|
||||
FROM filtered f
|
||||
LEFT JOIN series_metadata sm ON (
|
||||
f.name = ANY(sm.authors) AND sm.authors != '{{}}'
|
||||
)
|
||||
GROUP BY f.name
|
||||
)
|
||||
SELECT
|
||||
f.name,
|
||||
COALESCE(bc.book_count, 0) AS book_count,
|
||||
COALESCE(sc.series_count, 0) AS series_count
|
||||
FROM filtered f
|
||||
LEFT JOIN book_counts bc ON bc.author_name = f.name
|
||||
LEFT JOIN series_counts sc ON sc.author_name = f.name
|
||||
ORDER BY {order_clause}
|
||||
LIMIT $2 OFFSET $3
|
||||
"#
|
||||
);
|
||||
|
||||
let count_sql = r#"
|
||||
WITH all_authors AS (
|
||||
SELECT DISTINCT UNNEST(
|
||||
COALESCE(
|
||||
NULLIF(authors, '{}'),
|
||||
CASE WHEN author IS NOT NULL AND author != '' THEN ARRAY[author] ELSE ARRAY[]::text[] END
|
||||
)
|
||||
) AS name
|
||||
FROM books
|
||||
UNION
|
||||
SELECT DISTINCT UNNEST(authors) AS name
|
||||
FROM series_metadata
|
||||
WHERE authors != '{}'
|
||||
)
|
||||
SELECT COUNT(*) AS total
|
||||
FROM all_authors
|
||||
WHERE ($1::text IS NULL OR name ILIKE $1)
|
||||
"#;
|
||||
|
||||
let (rows, count_row) = tokio::join!(
|
||||
sqlx::query(&sql)
|
||||
.bind(q_pattern.as_deref())
|
||||
.bind(limit)
|
||||
.bind(offset)
|
||||
.fetch_all(&state.pool),
|
||||
sqlx::query(count_sql)
|
||||
.bind(q_pattern.as_deref())
|
||||
.fetch_one(&state.pool)
|
||||
);
|
||||
|
||||
let rows = rows.map_err(|e| ApiError::internal(format!("authors query failed: {e}")))?;
|
||||
let total: i64 = count_row
|
||||
.map_err(|e| ApiError::internal(format!("authors count failed: {e}")))?
|
||||
.get("total");
|
||||
|
||||
let items: Vec<AuthorItem> = rows
|
||||
.iter()
|
||||
.map(|r| AuthorItem {
|
||||
name: r.get("name"),
|
||||
book_count: r.get("book_count"),
|
||||
series_count: r.get("series_count"),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(AuthorsPageResponse {
|
||||
items,
|
||||
total,
|
||||
page,
|
||||
limit,
|
||||
}))
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -16,6 +16,10 @@ pub struct RebuildRequest {
|
||||
pub library_id: Option<Uuid>,
|
||||
#[schema(value_type = Option<bool>, example = false)]
|
||||
pub full: Option<bool>,
|
||||
/// Deep rescan: clears directory mtimes to force re-walking all directories,
|
||||
/// discovering newly supported formats without deleting existing data.
|
||||
#[schema(value_type = Option<bool>, example = false)]
|
||||
pub rescan: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
@@ -117,7 +121,8 @@ pub async fn enqueue_rebuild(
|
||||
) -> Result<Json<IndexJobResponse>, ApiError> {
|
||||
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
|
||||
let is_full = payload.as_ref().and_then(|p| p.0.full).unwrap_or(false);
|
||||
let job_type = if is_full { "full_rebuild" } else { "rebuild" };
|
||||
let is_rescan = payload.as_ref().and_then(|p| p.0.rescan).unwrap_or(false);
|
||||
let job_type = if is_full { "full_rebuild" } else if is_rescan { "rescan" } else { "rebuild" };
|
||||
let id = Uuid::new_v4();
|
||||
|
||||
sqlx::query(
|
||||
|
||||
@@ -154,10 +154,11 @@ pub async fn sync_komga_read_books(
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
type BookEntry = (Uuid, String, String);
|
||||
// Primary: (series_lower, title_lower) -> Vec<(Uuid, title, series)>
|
||||
let mut primary_map: HashMap<(String, String), Vec<(Uuid, String, String)>> = HashMap::new();
|
||||
let mut primary_map: HashMap<(String, String), Vec<BookEntry>> = HashMap::new();
|
||||
// Secondary: title_lower -> Vec<(Uuid, title, series)>
|
||||
let mut secondary_map: HashMap<String, Vec<(Uuid, String, String)>> = HashMap::new();
|
||||
let mut secondary_map: HashMap<String, Vec<BookEntry>> = HashMap::new();
|
||||
|
||||
for row in &rows {
|
||||
let id: Uuid = row.get("id");
|
||||
|
||||
@@ -23,6 +23,13 @@ pub struct LibraryResponse {
|
||||
pub watcher_enabled: bool,
|
||||
pub metadata_provider: Option<String>,
|
||||
pub fallback_metadata_provider: Option<String>,
|
||||
pub metadata_refresh_mode: String,
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub next_metadata_refresh_at: Option<chrono::DateTime<chrono::Utc>>,
|
||||
pub series_count: i64,
|
||||
/// First book IDs from up to 5 distinct series (for thumbnail fan display)
|
||||
#[schema(value_type = Vec<String>)]
|
||||
pub thumbnail_book_ids: Vec<Uuid>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
@@ -41,14 +48,27 @@ pub struct CreateLibraryRequest {
|
||||
responses(
|
||||
(status = 200, body = Vec<LibraryResponse>),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn list_libraries(State(state): State<AppState>) -> Result<Json<Vec<LibraryResponse>>, ApiError> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT l.id, l.name, l.root_path, l.enabled, l.monitor_enabled, l.scan_mode, l.next_scan_at, l.watcher_enabled, l.metadata_provider, l.fallback_metadata_provider,
|
||||
(SELECT COUNT(*) FROM books b WHERE b.library_id = l.id) as book_count
|
||||
"SELECT l.id, l.name, l.root_path, l.enabled, l.monitor_enabled, l.scan_mode, l.next_scan_at, l.watcher_enabled, l.metadata_provider, l.fallback_metadata_provider, l.metadata_refresh_mode, l.next_metadata_refresh_at,
|
||||
(SELECT COUNT(*) FROM books b WHERE b.library_id = l.id) as book_count,
|
||||
(SELECT COUNT(DISTINCT COALESCE(NULLIF(b.series, ''), 'unclassified')) FROM books b WHERE b.library_id = l.id) as series_count,
|
||||
COALESCE((
|
||||
SELECT ARRAY_AGG(first_id ORDER BY series_name)
|
||||
FROM (
|
||||
SELECT DISTINCT ON (COALESCE(NULLIF(b.series, ''), 'unclassified'))
|
||||
COALESCE(NULLIF(b.series, ''), 'unclassified') as series_name,
|
||||
b.id as first_id
|
||||
FROM books b
|
||||
WHERE b.library_id = l.id
|
||||
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'),
|
||||
b.volume NULLS LAST, b.title ASC
|
||||
LIMIT 5
|
||||
) sub
|
||||
), ARRAY[]::uuid[]) as thumbnail_book_ids
|
||||
FROM libraries l ORDER BY l.created_at DESC"
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
@@ -62,12 +82,16 @@ pub async fn list_libraries(State(state): State<AppState>) -> Result<Json<Vec<Li
|
||||
root_path: row.get("root_path"),
|
||||
enabled: row.get("enabled"),
|
||||
book_count: row.get("book_count"),
|
||||
series_count: row.get("series_count"),
|
||||
monitor_enabled: row.get("monitor_enabled"),
|
||||
scan_mode: row.get("scan_mode"),
|
||||
next_scan_at: row.get("next_scan_at"),
|
||||
watcher_enabled: row.get("watcher_enabled"),
|
||||
metadata_provider: row.get("metadata_provider"),
|
||||
fallback_metadata_provider: row.get("fallback_metadata_provider"),
|
||||
metadata_refresh_mode: row.get("metadata_refresh_mode"),
|
||||
next_metadata_refresh_at: row.get("next_metadata_refresh_at"),
|
||||
thumbnail_book_ids: row.get("thumbnail_book_ids"),
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -115,12 +139,16 @@ pub async fn create_library(
|
||||
root_path,
|
||||
enabled: true,
|
||||
book_count: 0,
|
||||
series_count: 0,
|
||||
monitor_enabled: false,
|
||||
scan_mode: "manual".to_string(),
|
||||
next_scan_at: None,
|
||||
watcher_enabled: false,
|
||||
metadata_provider: None,
|
||||
fallback_metadata_provider: None,
|
||||
metadata_refresh_mode: "manual".to_string(),
|
||||
next_metadata_refresh_at: None,
|
||||
thumbnail_book_ids: vec![],
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -192,7 +220,6 @@ use crate::index_jobs::{IndexJobResponse, RebuildRequest};
|
||||
(status = 200, body = IndexJobResponse),
|
||||
(status = 404, description = "Library not found"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
@@ -212,7 +239,8 @@ pub async fn scan_library(
|
||||
}
|
||||
|
||||
let is_full = payload.as_ref().and_then(|p| p.full).unwrap_or(false);
|
||||
let job_type = if is_full { "full_rebuild" } else { "rebuild" };
|
||||
let is_rescan = payload.as_ref().and_then(|p| p.rescan).unwrap_or(false);
|
||||
let job_type = if is_full { "full_rebuild" } else if is_rescan { "rescan" } else { "rebuild" };
|
||||
|
||||
// Create indexing job for this library
|
||||
let job_id = Uuid::new_v4();
|
||||
@@ -241,6 +269,8 @@ pub struct UpdateMonitoringRequest {
|
||||
#[schema(value_type = String, example = "hourly")]
|
||||
pub scan_mode: String, // 'manual', 'hourly', 'daily', 'weekly'
|
||||
pub watcher_enabled: Option<bool>,
|
||||
#[schema(value_type = Option<String>, example = "daily")]
|
||||
pub metadata_refresh_mode: Option<String>, // 'manual', 'hourly', 'daily', 'weekly'
|
||||
}
|
||||
|
||||
/// Update monitoring settings for a library
|
||||
@@ -271,6 +301,12 @@ pub async fn update_monitoring(
|
||||
return Err(ApiError::bad_request("scan_mode must be one of: manual, hourly, daily, weekly"));
|
||||
}
|
||||
|
||||
// Validate metadata_refresh_mode
|
||||
let metadata_refresh_mode = input.metadata_refresh_mode.as_deref().unwrap_or("manual");
|
||||
if !valid_modes.contains(&metadata_refresh_mode) {
|
||||
return Err(ApiError::bad_request("metadata_refresh_mode must be one of: manual, hourly, daily, weekly"));
|
||||
}
|
||||
|
||||
// Calculate next_scan_at if monitoring is enabled
|
||||
let next_scan_at = if input.monitor_enabled {
|
||||
let interval_minutes = match input.scan_mode.as_str() {
|
||||
@@ -284,16 +320,31 @@ pub async fn update_monitoring(
|
||||
None
|
||||
};
|
||||
|
||||
// Calculate next_metadata_refresh_at
|
||||
let next_metadata_refresh_at = if metadata_refresh_mode != "manual" {
|
||||
let interval_minutes = match metadata_refresh_mode {
|
||||
"hourly" => 60,
|
||||
"daily" => 1440,
|
||||
"weekly" => 10080,
|
||||
_ => 1440,
|
||||
};
|
||||
Some(chrono::Utc::now() + chrono::Duration::minutes(interval_minutes))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let watcher_enabled = input.watcher_enabled.unwrap_or(false);
|
||||
|
||||
let result = sqlx::query(
|
||||
"UPDATE libraries SET monitor_enabled = $2, scan_mode = $3, next_scan_at = $4, watcher_enabled = $5 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled, metadata_provider, fallback_metadata_provider"
|
||||
"UPDATE libraries SET monitor_enabled = $2, scan_mode = $3, next_scan_at = $4, watcher_enabled = $5, metadata_refresh_mode = $6, next_metadata_refresh_at = $7 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled, metadata_provider, fallback_metadata_provider, metadata_refresh_mode, next_metadata_refresh_at"
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(input.monitor_enabled)
|
||||
.bind(input.scan_mode)
|
||||
.bind(next_scan_at)
|
||||
.bind(watcher_enabled)
|
||||
.bind(metadata_refresh_mode)
|
||||
.bind(next_metadata_refresh_at)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
@@ -306,18 +357,38 @@ pub async fn update_monitoring(
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
|
||||
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')) FROM books WHERE library_id = $1")
|
||||
.bind(library_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
|
||||
let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar(
|
||||
"SELECT b.id FROM books b
|
||||
WHERE b.library_id = $1
|
||||
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'), b.volume NULLS LAST, b.title ASC
|
||||
LIMIT 5"
|
||||
)
|
||||
.bind(library_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(Json(LibraryResponse {
|
||||
id: row.get("id"),
|
||||
name: row.get("name"),
|
||||
root_path: row.get("root_path"),
|
||||
enabled: row.get("enabled"),
|
||||
book_count,
|
||||
series_count,
|
||||
monitor_enabled: row.get("monitor_enabled"),
|
||||
scan_mode: row.get("scan_mode"),
|
||||
next_scan_at: row.get("next_scan_at"),
|
||||
watcher_enabled: row.get("watcher_enabled"),
|
||||
metadata_provider: row.get("metadata_provider"),
|
||||
fallback_metadata_provider: row.get("fallback_metadata_provider"),
|
||||
metadata_refresh_mode: row.get("metadata_refresh_mode"),
|
||||
next_metadata_refresh_at: row.get("next_metadata_refresh_at"),
|
||||
thumbnail_book_ids,
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -353,7 +424,7 @@ pub async fn update_metadata_provider(
|
||||
let fallback = input.fallback_metadata_provider.as_deref().filter(|s| !s.is_empty());
|
||||
|
||||
let result = sqlx::query(
|
||||
"UPDATE libraries SET metadata_provider = $2, fallback_metadata_provider = $3 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled, metadata_provider, fallback_metadata_provider"
|
||||
"UPDATE libraries SET metadata_provider = $2, fallback_metadata_provider = $3 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled, metadata_provider, fallback_metadata_provider, metadata_refresh_mode, next_metadata_refresh_at"
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(provider)
|
||||
@@ -370,17 +441,37 @@ pub async fn update_metadata_provider(
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
|
||||
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')) FROM books WHERE library_id = $1")
|
||||
.bind(library_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
|
||||
let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar(
|
||||
"SELECT b.id FROM books b
|
||||
WHERE b.library_id = $1
|
||||
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'), b.volume NULLS LAST, b.title ASC
|
||||
LIMIT 5"
|
||||
)
|
||||
.bind(library_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
Ok(Json(LibraryResponse {
|
||||
id: row.get("id"),
|
||||
name: row.get("name"),
|
||||
root_path: row.get("root_path"),
|
||||
enabled: row.get("enabled"),
|
||||
book_count,
|
||||
series_count,
|
||||
monitor_enabled: row.get("monitor_enabled"),
|
||||
scan_mode: row.get("scan_mode"),
|
||||
next_scan_at: row.get("next_scan_at"),
|
||||
watcher_enabled: row.get("watcher_enabled"),
|
||||
metadata_provider: row.get("metadata_provider"),
|
||||
fallback_metadata_provider: row.get("fallback_metadata_provider"),
|
||||
metadata_refresh_mode: row.get("metadata_refresh_mode"),
|
||||
next_metadata_refresh_at: row.get("next_metadata_refresh_at"),
|
||||
thumbnail_book_ids,
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
mod auth;
|
||||
mod authors;
|
||||
mod books;
|
||||
mod error;
|
||||
mod handlers;
|
||||
@@ -7,15 +8,20 @@ mod komga;
|
||||
mod libraries;
|
||||
mod metadata;
|
||||
mod metadata_batch;
|
||||
mod metadata_refresh;
|
||||
mod metadata_providers;
|
||||
mod api_middleware;
|
||||
mod openapi;
|
||||
mod pages;
|
||||
mod prowlarr;
|
||||
mod qbittorrent;
|
||||
mod reading_progress;
|
||||
mod search;
|
||||
mod series;
|
||||
mod settings;
|
||||
mod state;
|
||||
mod stats;
|
||||
mod telegram;
|
||||
mod thumbnails;
|
||||
mod tokens;
|
||||
|
||||
@@ -82,14 +88,13 @@ async fn main() -> anyhow::Result<()> {
|
||||
};
|
||||
|
||||
let admin_routes = Router::new()
|
||||
.route("/libraries", get(libraries::list_libraries).post(libraries::create_library))
|
||||
.route("/libraries", axum::routing::post(libraries::create_library))
|
||||
.route("/libraries/:id", delete(libraries::delete_library))
|
||||
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
|
||||
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
|
||||
.route("/libraries/:id/metadata-provider", axum::routing::patch(libraries::update_metadata_provider))
|
||||
.route("/books/:id", axum::routing::patch(books::update_book))
|
||||
.route("/books/:id/convert", axum::routing::post(books::convert_book))
|
||||
.route("/libraries/:library_id/series/:name", axum::routing::patch(books::update_series))
|
||||
.route("/libraries/:library_id/series/:name", axum::routing::patch(series::update_series))
|
||||
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
|
||||
.route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild))
|
||||
.route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate))
|
||||
@@ -103,6 +108,11 @@ async fn main() -> anyhow::Result<()> {
|
||||
.route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token))
|
||||
.route("/admin/tokens/:id", delete(tokens::revoke_token))
|
||||
.route("/admin/tokens/:id/delete", axum::routing::post(tokens::delete_token))
|
||||
.route("/prowlarr/search", axum::routing::post(prowlarr::search_prowlarr))
|
||||
.route("/prowlarr/test", get(prowlarr::test_prowlarr))
|
||||
.route("/qbittorrent/add", axum::routing::post(qbittorrent::add_torrent))
|
||||
.route("/qbittorrent/test", get(qbittorrent::test_qbittorrent))
|
||||
.route("/telegram/test", get(telegram::test_telegram))
|
||||
.route("/komga/sync", axum::routing::post(komga::sync_komga_read_books))
|
||||
.route("/komga/reports", get(komga::list_sync_reports))
|
||||
.route("/komga/reports/:id", get(komga::get_sync_report))
|
||||
@@ -116,6 +126,8 @@ async fn main() -> anyhow::Result<()> {
|
||||
.route("/metadata/batch", axum::routing::post(metadata_batch::start_batch))
|
||||
.route("/metadata/batch/:id/report", get(metadata_batch::get_batch_report))
|
||||
.route("/metadata/batch/:id/results", get(metadata_batch::get_batch_results))
|
||||
.route("/metadata/refresh", axum::routing::post(metadata_refresh::start_refresh))
|
||||
.route("/metadata/refresh/:id/report", get(metadata_refresh::get_refresh_report))
|
||||
.merge(settings::settings_routes())
|
||||
.route_layer(middleware::from_fn_with_state(
|
||||
state.clone(),
|
||||
@@ -123,18 +135,22 @@ async fn main() -> anyhow::Result<()> {
|
||||
));
|
||||
|
||||
let read_routes = Router::new()
|
||||
.route("/libraries", get(libraries::list_libraries))
|
||||
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
|
||||
.route("/books", get(books::list_books))
|
||||
.route("/books/ongoing", get(books::ongoing_books))
|
||||
.route("/books/ongoing", get(series::ongoing_books))
|
||||
.route("/books/:id", get(books::get_book))
|
||||
.route("/books/:id/thumbnail", get(books::get_thumbnail))
|
||||
.route("/books/:id/pages/:n", get(pages::get_page))
|
||||
.route("/books/:id/progress", get(reading_progress::get_reading_progress).patch(reading_progress::update_reading_progress))
|
||||
.route("/libraries/:library_id/series", get(books::list_series))
|
||||
.route("/libraries/:library_id/series/:name/metadata", get(books::get_series_metadata))
|
||||
.route("/series", get(books::list_all_series))
|
||||
.route("/series/ongoing", get(books::ongoing_series))
|
||||
.route("/series/statuses", get(books::series_statuses))
|
||||
.route("/libraries/:library_id/series", get(series::list_series))
|
||||
.route("/libraries/:library_id/series/:name/metadata", get(series::get_series_metadata))
|
||||
.route("/series", get(series::list_all_series))
|
||||
.route("/series/ongoing", get(series::ongoing_series))
|
||||
.route("/series/statuses", get(series::series_statuses))
|
||||
.route("/series/provider-statuses", get(series::provider_statuses))
|
||||
.route("/series/mark-read", axum::routing::post(reading_progress::mark_series_read))
|
||||
.route("/authors", get(authors::list_authors))
|
||||
.route("/stats", get(stats::get_stats))
|
||||
.route("/search", get(search::search_books))
|
||||
.route_layer(middleware::from_fn_with_state(state.clone(), api_middleware::read_rate_limit))
|
||||
|
||||
@@ -369,6 +369,26 @@ pub async fn approve_metadata(
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Notify via Telegram (with first book thumbnail if available)
|
||||
let provider_for_notif: String = row.get("provider");
|
||||
let thumbnail_path: Option<String> = sqlx::query_scalar(
|
||||
"SELECT thumbnail_path FROM books WHERE library_id = $1 AND series_name = $2 AND thumbnail_path IS NOT NULL ORDER BY sort_order LIMIT 1",
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(&series_name)
|
||||
.fetch_optional(&state.pool)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
notifications::notify(
|
||||
state.pool.clone(),
|
||||
notifications::NotificationEvent::MetadataApproved {
|
||||
series_name: series_name.clone(),
|
||||
provider: provider_for_notif,
|
||||
thumbnail_path,
|
||||
},
|
||||
);
|
||||
|
||||
Ok(Json(ApproveResponse {
|
||||
status: "approved".to_string(),
|
||||
report,
|
||||
@@ -693,10 +713,11 @@ pub(crate) async fn sync_series_metadata(
|
||||
.get("start_year")
|
||||
.and_then(|y| y.as_i64())
|
||||
.map(|y| y as i32);
|
||||
let status = metadata_json
|
||||
.get("status")
|
||||
.and_then(|s| s.as_str())
|
||||
.map(normalize_series_status);
|
||||
let status = if let Some(raw) = metadata_json.get("status").and_then(|s| s.as_str()) {
|
||||
Some(normalize_series_status(&state.pool, raw).await)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Fetch existing state before upsert
|
||||
let existing = sqlx::query(
|
||||
@@ -775,7 +796,7 @@ pub(crate) async fn sync_series_metadata(
|
||||
let fields = vec![
|
||||
FieldDef {
|
||||
name: "description",
|
||||
old: existing.as_ref().and_then(|r| r.get::<Option<String>, _>("description")).map(|s| serde_json::Value::String(s)),
|
||||
old: existing.as_ref().and_then(|r| r.get::<Option<String>, _>("description")).map(serde_json::Value::String),
|
||||
new: description.map(|s| serde_json::Value::String(s.to_string())),
|
||||
},
|
||||
FieldDef {
|
||||
@@ -800,8 +821,8 @@ pub(crate) async fn sync_series_metadata(
|
||||
},
|
||||
FieldDef {
|
||||
name: "status",
|
||||
old: existing.as_ref().and_then(|r| r.get::<Option<String>, _>("status")).map(|s| serde_json::Value::String(s)),
|
||||
new: status.as_ref().map(|s| serde_json::Value::String(s.clone())),
|
||||
old: existing.as_ref().and_then(|r| r.get::<Option<String>, _>("status")).map(serde_json::Value::String),
|
||||
new: status.as_ref().map(|s: &String| serde_json::Value::String(s.clone())),
|
||||
},
|
||||
];
|
||||
|
||||
@@ -825,25 +846,35 @@ pub(crate) async fn sync_series_metadata(
|
||||
Ok(report)
|
||||
}
|
||||
|
||||
/// Normalize provider-specific status strings to a standard set:
|
||||
/// "ongoing", "ended", "hiatus", "cancelled", or the original lowercase value
|
||||
fn normalize_series_status(raw: &str) -> String {
|
||||
/// Normalize provider-specific status strings using the status_mappings table.
|
||||
/// Returns None if no mapping is found — unknown statuses are not stored.
|
||||
pub(crate) async fn normalize_series_status(pool: &sqlx::PgPool, raw: &str) -> String {
|
||||
let lower = raw.to_lowercase();
|
||||
match lower.as_str() {
|
||||
// AniList
|
||||
"finished" => "ended".to_string(),
|
||||
"releasing" => "ongoing".to_string(),
|
||||
"not_yet_released" => "upcoming".to_string(),
|
||||
"cancelled" => "cancelled".to_string(),
|
||||
"hiatus" => "hiatus".to_string(),
|
||||
// Bédéthèque
|
||||
_ if lower.contains("finie") || lower.contains("terminée") => "ended".to_string(),
|
||||
_ if lower.contains("en cours") => "ongoing".to_string(),
|
||||
_ if lower.contains("hiatus") || lower.contains("suspendue") => "hiatus".to_string(),
|
||||
_ if lower.contains("annulée") || lower.contains("arrêtée") => "cancelled".to_string(),
|
||||
// Fallback
|
||||
_ => lower,
|
||||
|
||||
// Try exact match first (only mapped entries)
|
||||
if let Ok(Some(row)) = sqlx::query_scalar::<_, String>(
|
||||
"SELECT mapped_status FROM status_mappings WHERE provider_status = $1 AND mapped_status IS NOT NULL",
|
||||
)
|
||||
.bind(&lower)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
{
|
||||
return row;
|
||||
}
|
||||
|
||||
// Try substring match (for Bédéthèque-style statuses like "Série finie")
|
||||
if let Ok(Some(row)) = sqlx::query_scalar::<_, String>(
|
||||
"SELECT mapped_status FROM status_mappings WHERE $1 LIKE '%' || provider_status || '%' AND mapped_status IS NOT NULL LIMIT 1",
|
||||
)
|
||||
.bind(&lower)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
{
|
||||
return row;
|
||||
}
|
||||
|
||||
// No mapping found — return the provider status as-is (lowercased)
|
||||
lower
|
||||
}
|
||||
|
||||
pub(crate) async fn sync_books_metadata(
|
||||
|
||||
@@ -124,6 +124,12 @@ pub async fn start_batch(
|
||||
|
||||
// Spawn the background processing task
|
||||
let pool = state.pool.clone();
|
||||
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||
.bind(library_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = process_metadata_batch(&pool, job_id, library_id).await {
|
||||
warn!("[METADATA_BATCH] job {job_id} failed: {e}");
|
||||
@@ -134,6 +140,13 @@ pub async fn start_batch(
|
||||
.bind(e.to_string())
|
||||
.execute(&pool)
|
||||
.await;
|
||||
notifications::notify(
|
||||
pool.clone(),
|
||||
notifications::NotificationEvent::MetadataBatchFailed {
|
||||
library_name,
|
||||
error: e.to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -389,17 +402,19 @@ async fn process_metadata_batch(
|
||||
update_progress(pool, job_id, processed, total, series_name).await;
|
||||
insert_result(
|
||||
pool,
|
||||
job_id,
|
||||
library_id,
|
||||
series_name,
|
||||
"already_linked",
|
||||
None,
|
||||
false,
|
||||
0,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
Some("Unclassified series skipped"),
|
||||
&InsertResultParams {
|
||||
job_id,
|
||||
library_id,
|
||||
series_name,
|
||||
status: "already_linked",
|
||||
provider_used: None,
|
||||
fallback_used: false,
|
||||
candidates_count: 0,
|
||||
best_confidence: None,
|
||||
best_candidate_json: None,
|
||||
link_id: None,
|
||||
error_message: Some("Unclassified series skipped"),
|
||||
},
|
||||
)
|
||||
.await;
|
||||
continue;
|
||||
@@ -411,17 +426,19 @@ async fn process_metadata_batch(
|
||||
update_progress(pool, job_id, processed, total, series_name).await;
|
||||
insert_result(
|
||||
pool,
|
||||
job_id,
|
||||
library_id,
|
||||
series_name,
|
||||
"already_linked",
|
||||
None,
|
||||
false,
|
||||
0,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
&InsertResultParams {
|
||||
job_id,
|
||||
library_id,
|
||||
series_name,
|
||||
status: "already_linked",
|
||||
provider_used: None,
|
||||
fallback_used: false,
|
||||
candidates_count: 0,
|
||||
best_confidence: None,
|
||||
best_candidate_json: None,
|
||||
link_id: None,
|
||||
error_message: None,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
continue;
|
||||
@@ -577,17 +594,19 @@ async fn process_metadata_batch(
|
||||
|
||||
insert_result(
|
||||
pool,
|
||||
job_id,
|
||||
library_id,
|
||||
series_name,
|
||||
result_status,
|
||||
provider_used.as_deref(),
|
||||
fallback_used,
|
||||
candidates_count,
|
||||
best_confidence,
|
||||
best_candidate.as_ref(),
|
||||
link_id,
|
||||
error_msg.as_deref(),
|
||||
&InsertResultParams {
|
||||
job_id,
|
||||
library_id,
|
||||
series_name,
|
||||
status: result_status,
|
||||
provider_used: provider_used.as_deref(),
|
||||
fallback_used,
|
||||
candidates_count,
|
||||
best_confidence,
|
||||
best_candidate_json: best_candidate.as_ref(),
|
||||
link_id,
|
||||
error_message: error_msg.as_deref(),
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -615,6 +634,21 @@ async fn process_metadata_batch(
|
||||
|
||||
info!("[METADATA_BATCH] job={job_id} completed: {processed}/{total} series processed");
|
||||
|
||||
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||
.bind(library_id)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
notifications::notify(
|
||||
pool.clone(),
|
||||
notifications::NotificationEvent::MetadataBatchCompleted {
|
||||
library_name,
|
||||
total_series: total,
|
||||
processed,
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -632,7 +666,7 @@ enum SearchOutcome {
|
||||
|
||||
async fn search_and_evaluate(
|
||||
pool: &PgPool,
|
||||
_library_id: Uuid,
|
||||
library_id: Uuid,
|
||||
series_name: &str,
|
||||
provider_name: &str,
|
||||
) -> SearchOutcome {
|
||||
@@ -660,7 +694,31 @@ async fn search_and_evaluate(
|
||||
// Check if best candidate has perfect confidence
|
||||
let best = candidates.into_iter().next().unwrap();
|
||||
if (best.confidence - 1.0).abs() < f32::EPSILON {
|
||||
// Multiple results but best is 100% — still too many results
|
||||
// Multiple results but best is 100% — check if book count matches to auto-match
|
||||
if let Some(ext_total) = best.total_volumes {
|
||||
let local_count: Option<i64> = sqlx::query_scalar(
|
||||
r#"
|
||||
SELECT COUNT(*) FROM books
|
||||
WHERE library_id = $1
|
||||
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2
|
||||
"#,
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(series_name)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
if let Some(count) = local_count {
|
||||
if count == ext_total as i64 {
|
||||
info!(
|
||||
"[METADATA_BATCH] Auto-match by book count: series='{}' confidence=100% local_books={} external_volumes={}",
|
||||
series_name, count, ext_total
|
||||
);
|
||||
return SearchOutcome::AutoMatch(best);
|
||||
}
|
||||
}
|
||||
}
|
||||
return SearchOutcome::TooManyResults(1, Some(best)); // count the 100% one
|
||||
}
|
||||
|
||||
@@ -741,9 +799,12 @@ async fn sync_series_from_candidate(
|
||||
let publishers = &candidate.publishers;
|
||||
let start_year = candidate.start_year;
|
||||
let total_volumes = candidate.total_volumes;
|
||||
let status = candidate.metadata_json
|
||||
.get("status")
|
||||
.and_then(|s| s.as_str());
|
||||
let status = if let Some(raw) = candidate.metadata_json.get("status").and_then(|s| s.as_str()) {
|
||||
Some(crate::metadata::normalize_series_status(pool, raw).await)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let status = status.as_deref();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
@@ -984,7 +1045,7 @@ async fn resolve_provider_name(pool: &PgPool, lib_provider: Option<&str>) -> Str
|
||||
"google_books".to_string()
|
||||
}
|
||||
|
||||
async fn load_provider_config_from_pool(
|
||||
pub(crate) async fn load_provider_config_from_pool(
|
||||
pool: &PgPool,
|
||||
provider_name: &str,
|
||||
) -> metadata_providers::ProviderConfig {
|
||||
@@ -1018,7 +1079,7 @@ async fn load_provider_config_from_pool(
|
||||
config
|
||||
}
|
||||
|
||||
async fn is_job_cancelled(pool: &PgPool, job_id: Uuid) -> bool {
|
||||
pub(crate) async fn is_job_cancelled(pool: &PgPool, job_id: Uuid) -> bool {
|
||||
sqlx::query_scalar::<_, bool>(
|
||||
"SELECT status = 'cancelled' FROM index_jobs WHERE id = $1",
|
||||
)
|
||||
@@ -1028,7 +1089,7 @@ async fn is_job_cancelled(pool: &PgPool, job_id: Uuid) -> bool {
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
async fn update_progress(pool: &PgPool, job_id: Uuid, processed: i32, total: i32, current: &str) {
|
||||
pub(crate) async fn update_progress(pool: &PgPool, job_id: Uuid, processed: i32, total: i32, current: &str) {
|
||||
let percent = if total > 0 {
|
||||
(processed as f64 / total as f64 * 100.0) as i32
|
||||
} else {
|
||||
@@ -1046,20 +1107,21 @@ async fn update_progress(pool: &PgPool, job_id: Uuid, processed: i32, total: i32
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn insert_result(
|
||||
pool: &PgPool,
|
||||
struct InsertResultParams<'a> {
|
||||
job_id: Uuid,
|
||||
library_id: Uuid,
|
||||
series_name: &str,
|
||||
status: &str,
|
||||
provider_used: Option<&str>,
|
||||
series_name: &'a str,
|
||||
status: &'a str,
|
||||
provider_used: Option<&'a str>,
|
||||
fallback_used: bool,
|
||||
candidates_count: i32,
|
||||
best_confidence: Option<f32>,
|
||||
best_candidate_json: Option<&serde_json::Value>,
|
||||
best_candidate_json: Option<&'a serde_json::Value>,
|
||||
link_id: Option<Uuid>,
|
||||
error_message: Option<&str>,
|
||||
) {
|
||||
error_message: Option<&'a str>,
|
||||
}
|
||||
|
||||
async fn insert_result(pool: &PgPool, params: &InsertResultParams<'_>) {
|
||||
let _ = sqlx::query(
|
||||
r#"
|
||||
INSERT INTO metadata_batch_results
|
||||
@@ -1067,17 +1129,17 @@ async fn insert_result(
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
|
||||
"#,
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(library_id)
|
||||
.bind(series_name)
|
||||
.bind(status)
|
||||
.bind(provider_used)
|
||||
.bind(fallback_used)
|
||||
.bind(candidates_count)
|
||||
.bind(best_confidence)
|
||||
.bind(best_candidate_json)
|
||||
.bind(link_id)
|
||||
.bind(error_message)
|
||||
.bind(params.job_id)
|
||||
.bind(params.library_id)
|
||||
.bind(params.series_name)
|
||||
.bind(params.status)
|
||||
.bind(params.provider_used)
|
||||
.bind(params.fallback_used)
|
||||
.bind(params.candidates_count)
|
||||
.bind(params.best_confidence)
|
||||
.bind(params.best_candidate_json)
|
||||
.bind(params.link_id)
|
||||
.bind(params.error_message)
|
||||
.execute(pool)
|
||||
.await;
|
||||
}
|
||||
|
||||
@@ -128,7 +128,7 @@ async fn search_series_impl(
|
||||
let mut candidates: Vec<SeriesCandidate> = media
|
||||
.iter()
|
||||
.filter_map(|m| {
|
||||
let id = m.get("id").and_then(|id| id.as_i64())? as i64;
|
||||
let id = m.get("id").and_then(|id| id.as_i64())?;
|
||||
let title_obj = m.get("title")?;
|
||||
let title = title_obj
|
||||
.get("english")
|
||||
|
||||
@@ -497,6 +497,13 @@ async fn get_series_books_impl(
|
||||
}))
|
||||
.collect();
|
||||
|
||||
static RE_TOME: std::sync::LazyLock<regex::Regex> =
|
||||
std::sync::LazyLock::new(|| regex::Regex::new(r"(?i)-Tome-\d+-").unwrap());
|
||||
static RE_BOOK_ID: std::sync::LazyLock<regex::Regex> =
|
||||
std::sync::LazyLock::new(|| regex::Regex::new(r"-(\d+)\.html").unwrap());
|
||||
static RE_VOLUME: std::sync::LazyLock<regex::Regex> =
|
||||
std::sync::LazyLock::new(|| regex::Regex::new(r"(?i)Tome-(\d+)-").unwrap());
|
||||
|
||||
for (idx, album_el) in doc.select(&album_sel).enumerate() {
|
||||
// Title from <a class="titre" title="..."> — the title attribute is clean
|
||||
let title_sel = Selector::parse("a.titre").ok();
|
||||
@@ -513,16 +520,21 @@ async fn get_series_books_impl(
|
||||
|
||||
// External book ID from album URL (e.g. "...-1063.html")
|
||||
let album_url = title_el.and_then(|el| el.value().attr("href")).unwrap_or("");
|
||||
let external_book_id = regex::Regex::new(r"-(\d+)\.html")
|
||||
.ok()
|
||||
.and_then(|re| re.captures(album_url))
|
||||
|
||||
// Only keep main tomes — their URLs contain "Tome-{N}-"
|
||||
// Skip hors-série (HS), intégrales (INT/INTFL), romans, coffrets, etc.
|
||||
if !RE_TOME.is_match(album_url) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let external_book_id = RE_BOOK_ID
|
||||
.captures(album_url)
|
||||
.map(|c| c[1].to_string())
|
||||
.unwrap_or_default();
|
||||
|
||||
// Volume number from URL pattern "Tome-{N}-" or from itemprop name
|
||||
let volume_number = regex::Regex::new(r"(?i)Tome-(\d+)-")
|
||||
.ok()
|
||||
.and_then(|re| re.captures(album_url))
|
||||
let volume_number = RE_VOLUME
|
||||
.captures(album_url)
|
||||
.and_then(|c| c[1].parse::<i32>().ok())
|
||||
.or_else(|| extract_volume_from_title(&title));
|
||||
|
||||
@@ -610,20 +622,50 @@ fn extract_volume_from_title(title: &str) -> Option<i32> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Normalize a title by removing French articles (leading or in parentheses)
|
||||
/// and extra whitespace/punctuation, so that "Les Légendaires - Résistance"
|
||||
/// and "Légendaires (Les) - Résistance" produce the same canonical form.
|
||||
fn normalize_title(s: &str) -> String {
|
||||
let lower = s.to_lowercase();
|
||||
// Remove articles in parentheses: "(les)", "(la)", "(le)", "(l')", "(un)", "(une)", "(des)"
|
||||
let re_parens = regex::Regex::new(r"\s*\((?:les?|la|l'|une?|des|du|d')\)").unwrap();
|
||||
let cleaned = re_parens.replace_all(&lower, "");
|
||||
// Remove leading articles: "les ", "la ", "le ", "l'", "un ", "une ", "des ", "du ", "d'"
|
||||
let re_leading = regex::Regex::new(r"^(?:les?|la|l'|une?|des|du|d')\s+").unwrap();
|
||||
let cleaned = re_leading.replace(&cleaned, "");
|
||||
// Collapse multiple spaces/dashes into single
|
||||
let re_spaces = regex::Regex::new(r"\s+").unwrap();
|
||||
re_spaces.replace_all(cleaned.trim(), " ").to_string()
|
||||
}
|
||||
|
||||
fn compute_confidence(title: &str, query: &str) -> f32 {
|
||||
let title_lower = title.to_lowercase();
|
||||
if title_lower == query {
|
||||
1.0
|
||||
} else if title_lower.starts_with(query) || query.starts_with(&title_lower) {
|
||||
let query_lower = query.to_lowercase();
|
||||
if title_lower == query_lower {
|
||||
return 1.0;
|
||||
}
|
||||
|
||||
// Try with normalized forms (handles Bedetheque's "Name (Article)" convention)
|
||||
let title_norm = normalize_title(title);
|
||||
let query_norm = normalize_title(query);
|
||||
if title_norm == query_norm {
|
||||
return 1.0;
|
||||
}
|
||||
|
||||
if title_lower.starts_with(&query_lower) || query_lower.starts_with(&title_lower)
|
||||
|| title_norm.starts_with(&query_norm) || query_norm.starts_with(&title_norm)
|
||||
{
|
||||
0.85
|
||||
} else if title_lower.contains(query) || query.contains(&title_lower) {
|
||||
} else if title_lower.contains(&query_lower) || query_lower.contains(&title_lower)
|
||||
|| title_norm.contains(&query_norm) || query_norm.contains(&title_norm)
|
||||
{
|
||||
0.7
|
||||
} else {
|
||||
let common: usize = query
|
||||
let common: usize = query_lower
|
||||
.chars()
|
||||
.filter(|c| title_lower.contains(*c))
|
||||
.count();
|
||||
let max_len = query.len().max(title_lower.len()).max(1);
|
||||
let max_len = query_lower.len().max(title_lower.len()).max(1);
|
||||
(common as f32 / max_len as f32).clamp(0.1, 0.6)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,11 +86,11 @@ async fn search_series_impl(
|
||||
.iter()
|
||||
.filter_map(|vol| {
|
||||
let name = vol.get("name").and_then(|n| n.as_str())?.to_string();
|
||||
let id = vol.get("id").and_then(|id| id.as_i64())? as i64;
|
||||
let id = vol.get("id").and_then(|id| id.as_i64())?;
|
||||
let description = vol
|
||||
.get("description")
|
||||
.and_then(|d| d.as_str())
|
||||
.map(|d| strip_html(d));
|
||||
.map(strip_html);
|
||||
let publisher = vol
|
||||
.get("publisher")
|
||||
.and_then(|p| p.get("name"))
|
||||
@@ -180,7 +180,7 @@ async fn get_series_books_impl(
|
||||
let books: Vec<BookCandidate> = results
|
||||
.iter()
|
||||
.filter_map(|issue| {
|
||||
let id = issue.get("id").and_then(|id| id.as_i64())? as i64;
|
||||
let id = issue.get("id").and_then(|id| id.as_i64())?;
|
||||
let name = issue
|
||||
.get("name")
|
||||
.and_then(|n| n.as_str())
|
||||
@@ -194,7 +194,7 @@ async fn get_series_books_impl(
|
||||
let description = issue
|
||||
.get("description")
|
||||
.and_then(|d| d.as_str())
|
||||
.map(|d| strip_html(d));
|
||||
.map(strip_html);
|
||||
let cover_url = issue
|
||||
.get("image")
|
||||
.and_then(|img| img.get("medium_url").or_else(|| img.get("small_url")))
|
||||
|
||||
@@ -295,7 +295,7 @@ async fn get_series_books_impl(
|
||||
|
||||
let mut books: Vec<BookCandidate> = items
|
||||
.iter()
|
||||
.map(|item| volume_to_book_candidate(item))
|
||||
.map(volume_to_book_candidate)
|
||||
.collect();
|
||||
|
||||
// Sort by volume number
|
||||
|
||||
@@ -144,10 +144,10 @@ async fn search_series_impl(
|
||||
entry.publishers.push(p.clone());
|
||||
}
|
||||
}
|
||||
if entry.start_year.is_none() || first_publish_year.map_or(false, |y| entry.start_year.unwrap() > y) {
|
||||
if first_publish_year.is_some() {
|
||||
entry.start_year = first_publish_year;
|
||||
}
|
||||
if (entry.start_year.is_none() || first_publish_year.is_some_and(|y| entry.start_year.unwrap() > y))
|
||||
&& first_publish_year.is_some()
|
||||
{
|
||||
entry.start_year = first_publish_year;
|
||||
}
|
||||
if entry.cover_url.is_none() {
|
||||
entry.cover_url = cover_url;
|
||||
|
||||
825
apps/api/src/metadata_refresh.rs
Normal file
825
apps/api/src/metadata_refresh.rs
Normal file
@@ -0,0 +1,825 @@
|
||||
use axum::{
|
||||
extract::{Path as AxumPath, State},
|
||||
Json,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::{PgPool, Row};
|
||||
use uuid::Uuid;
|
||||
use utoipa::ToSchema;
|
||||
use tracing::{info, warn};
|
||||
|
||||
use crate::{error::ApiError, metadata_providers, state::AppState};
|
||||
use crate::metadata_batch::{load_provider_config_from_pool, is_job_cancelled, update_progress};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// DTOs
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
pub struct MetadataRefreshRequest {
|
||||
pub library_id: String,
|
||||
}
|
||||
|
||||
/// A single field change: old → new
|
||||
#[derive(Serialize, Clone)]
|
||||
struct FieldDiff {
|
||||
field: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
old: Option<serde_json::Value>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
new: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// Per-book changes
|
||||
#[derive(Serialize, Clone)]
|
||||
struct BookDiff {
|
||||
book_id: String,
|
||||
title: String,
|
||||
volume: Option<i32>,
|
||||
changes: Vec<FieldDiff>,
|
||||
}
|
||||
|
||||
/// Per-series change report
|
||||
#[derive(Serialize, Clone)]
|
||||
struct SeriesRefreshResult {
|
||||
series_name: String,
|
||||
provider: String,
|
||||
status: String, // "updated", "unchanged", "error"
|
||||
series_changes: Vec<FieldDiff>,
|
||||
book_changes: Vec<BookDiff>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
error: Option<String>,
|
||||
}
|
||||
|
||||
/// Response DTO for the report endpoint
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct MetadataRefreshReportDto {
|
||||
#[schema(value_type = String)]
|
||||
pub job_id: Uuid,
|
||||
pub status: String,
|
||||
pub total_links: i64,
|
||||
pub refreshed: i64,
|
||||
pub unchanged: i64,
|
||||
pub errors: i64,
|
||||
pub changes: serde_json::Value,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// POST /metadata/refresh — Trigger a metadata refresh job
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/metadata/refresh",
|
||||
tag = "metadata",
|
||||
request_body = MetadataRefreshRequest,
|
||||
responses(
|
||||
(status = 200, description = "Job created"),
|
||||
(status = 400, description = "Bad request"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn start_refresh(
|
||||
State(state): State<AppState>,
|
||||
Json(body): Json<MetadataRefreshRequest>,
|
||||
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||
let library_id: Uuid = body
|
||||
.library_id
|
||||
.parse()
|
||||
.map_err(|_| ApiError::bad_request("invalid library_id"))?;
|
||||
|
||||
// Verify library exists
|
||||
sqlx::query("SELECT 1 FROM libraries WHERE id = $1")
|
||||
.bind(library_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?
|
||||
.ok_or_else(|| ApiError::not_found("library not found"))?;
|
||||
|
||||
// Check no existing running metadata_refresh job for this library
|
||||
let existing: Option<Uuid> = sqlx::query_scalar(
|
||||
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'metadata_refresh' AND status IN ('pending', 'running') LIMIT 1",
|
||||
)
|
||||
.bind(library_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
if let Some(existing_id) = existing {
|
||||
return Ok(Json(serde_json::json!({
|
||||
"id": existing_id.to_string(),
|
||||
"status": "already_running",
|
||||
})));
|
||||
}
|
||||
|
||||
// Check there are approved links to refresh
|
||||
let link_count: i64 = sqlx::query_scalar(
|
||||
"SELECT COUNT(*) FROM external_metadata_links WHERE library_id = $1 AND status = 'approved'",
|
||||
)
|
||||
.bind(library_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
|
||||
if link_count == 0 {
|
||||
return Err(ApiError::bad_request("No approved metadata links to refresh for this library"));
|
||||
}
|
||||
|
||||
let job_id = Uuid::new_v4();
|
||||
sqlx::query(
|
||||
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, 'metadata_refresh', 'pending')",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(library_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
// Spawn the background processing task
|
||||
let pool = state.pool.clone();
|
||||
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||
.bind(library_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = process_metadata_refresh(&pool, job_id, library_id).await {
|
||||
warn!("[METADATA_REFRESH] job {job_id} failed: {e}");
|
||||
let _ = sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(e.to_string())
|
||||
.execute(&pool)
|
||||
.await;
|
||||
notifications::notify(
|
||||
pool.clone(),
|
||||
notifications::NotificationEvent::MetadataRefreshFailed {
|
||||
library_name,
|
||||
error: e.to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
Ok(Json(serde_json::json!({
|
||||
"id": job_id.to_string(),
|
||||
"status": "pending",
|
||||
})))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// GET /metadata/refresh/:id/report — Refresh report from stats_json
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/metadata/refresh/{id}/report",
|
||||
tag = "metadata",
|
||||
params(("id" = String, Path, description = "Job UUID")),
|
||||
responses(
|
||||
(status = 200, body = MetadataRefreshReportDto),
|
||||
(status = 404, description = "Job not found"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn get_refresh_report(
|
||||
State(state): State<AppState>,
|
||||
AxumPath(job_id): AxumPath<Uuid>,
|
||||
) -> Result<Json<MetadataRefreshReportDto>, ApiError> {
|
||||
let row = sqlx::query(
|
||||
"SELECT status, stats_json, total_files FROM index_jobs WHERE id = $1 AND type = 'metadata_refresh'",
|
||||
)
|
||||
.bind(job_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?
|
||||
.ok_or_else(|| ApiError::not_found("job not found"))?;
|
||||
|
||||
let job_status: String = row.get("status");
|
||||
let stats: Option<serde_json::Value> = row.get("stats_json");
|
||||
let total_files: Option<i32> = row.get("total_files");
|
||||
|
||||
let (refreshed, unchanged, errors, changes) = if let Some(ref s) = stats {
|
||||
(
|
||||
s.get("refreshed").and_then(|v| v.as_i64()).unwrap_or(0),
|
||||
s.get("unchanged").and_then(|v| v.as_i64()).unwrap_or(0),
|
||||
s.get("errors").and_then(|v| v.as_i64()).unwrap_or(0),
|
||||
s.get("changes").cloned().unwrap_or(serde_json::json!([])),
|
||||
)
|
||||
} else {
|
||||
(0, 0, 0, serde_json::json!([]))
|
||||
};
|
||||
|
||||
Ok(Json(MetadataRefreshReportDto {
|
||||
job_id,
|
||||
status: job_status,
|
||||
total_links: total_files.unwrap_or(0) as i64,
|
||||
refreshed,
|
||||
unchanged,
|
||||
errors,
|
||||
changes,
|
||||
}))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Background processing
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async fn process_metadata_refresh(
|
||||
pool: &PgPool,
|
||||
job_id: Uuid,
|
||||
library_id: Uuid,
|
||||
) -> Result<(), String> {
|
||||
// Set job to running
|
||||
sqlx::query("UPDATE index_jobs SET status = 'running', started_at = NOW() WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Get all approved links for this library
|
||||
let links: Vec<(Uuid, String, String, String)> = sqlx::query_as(
|
||||
r#"
|
||||
SELECT id, series_name, provider, external_id
|
||||
FROM external_metadata_links
|
||||
WHERE library_id = $1 AND status = 'approved'
|
||||
ORDER BY series_name
|
||||
"#,
|
||||
)
|
||||
.bind(library_id)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let total = links.len() as i32;
|
||||
sqlx::query("UPDATE index_jobs SET total_files = $2 WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.bind(total)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let mut processed = 0i32;
|
||||
let mut refreshed = 0i32;
|
||||
let mut unchanged = 0i32;
|
||||
let mut errors = 0i32;
|
||||
let mut all_results: Vec<SeriesRefreshResult> = Vec::new();
|
||||
|
||||
for (link_id, series_name, provider_name, external_id) in &links {
|
||||
// Check cancellation
|
||||
if is_job_cancelled(pool, job_id).await {
|
||||
sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'cancelled', finished_at = NOW() WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
match refresh_link(pool, *link_id, library_id, series_name, provider_name, external_id).await {
|
||||
Ok(result) => {
|
||||
if result.status == "updated" {
|
||||
refreshed += 1;
|
||||
info!("[METADATA_REFRESH] job={job_id} updated series='{series_name}' via {provider_name}");
|
||||
} else {
|
||||
unchanged += 1;
|
||||
}
|
||||
all_results.push(result);
|
||||
}
|
||||
Err(e) => {
|
||||
errors += 1;
|
||||
warn!("[METADATA_REFRESH] job={job_id} error on series='{series_name}': {e}");
|
||||
all_results.push(SeriesRefreshResult {
|
||||
series_name: series_name.clone(),
|
||||
provider: provider_name.clone(),
|
||||
status: "error".to_string(),
|
||||
series_changes: vec![],
|
||||
book_changes: vec![],
|
||||
error: Some(e),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
processed += 1;
|
||||
update_progress(pool, job_id, processed, total, series_name).await;
|
||||
|
||||
// Rate limit: 1s delay between provider calls
|
||||
tokio::time::sleep(std::time::Duration::from_millis(1000)).await;
|
||||
}
|
||||
|
||||
// Only keep series that have changes or errors (filter out "unchanged")
|
||||
let changes_only: Vec<&SeriesRefreshResult> = all_results
|
||||
.iter()
|
||||
.filter(|r| r.status != "unchanged")
|
||||
.collect();
|
||||
|
||||
// Build stats summary
|
||||
let stats = serde_json::json!({
|
||||
"total_links": total,
|
||||
"refreshed": refreshed,
|
||||
"unchanged": unchanged,
|
||||
"errors": errors,
|
||||
"changes": changes_only,
|
||||
});
|
||||
|
||||
sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), progress_percent = 100, stats_json = $2 WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(stats)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
info!("[METADATA_REFRESH] job={job_id} completed: {refreshed} updated, {unchanged} unchanged, {errors} errors");
|
||||
|
||||
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||
.bind(library_id)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
notifications::notify(
|
||||
pool.clone(),
|
||||
notifications::NotificationEvent::MetadataRefreshCompleted {
|
||||
library_name,
|
||||
refreshed,
|
||||
unchanged,
|
||||
errors,
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Refresh a single approved metadata link: re-fetch from provider, compare, sync, return diff
|
||||
async fn refresh_link(
|
||||
pool: &PgPool,
|
||||
link_id: Uuid,
|
||||
library_id: Uuid,
|
||||
series_name: &str,
|
||||
provider_name: &str,
|
||||
external_id: &str,
|
||||
) -> Result<SeriesRefreshResult, String> {
|
||||
let provider = metadata_providers::get_provider(provider_name)
|
||||
.ok_or_else(|| format!("Unknown provider: {provider_name}"))?;
|
||||
|
||||
let config = load_provider_config_from_pool(pool, provider_name).await;
|
||||
|
||||
let mut series_changes: Vec<FieldDiff> = Vec::new();
|
||||
let mut book_changes: Vec<BookDiff> = Vec::new();
|
||||
|
||||
// ── Series-level refresh ──────────────────────────────────────────────
|
||||
let candidates = provider
|
||||
.search_series(series_name, &config)
|
||||
.await
|
||||
.map_err(|e| format!("provider search error: {e}"))?;
|
||||
|
||||
let candidate = candidates
|
||||
.iter()
|
||||
.find(|c| c.external_id == external_id)
|
||||
.or_else(|| candidates.first());
|
||||
|
||||
if let Some(candidate) = candidate {
|
||||
// Update link metadata_json
|
||||
sqlx::query(
|
||||
r#"
|
||||
UPDATE external_metadata_links
|
||||
SET metadata_json = $2,
|
||||
total_volumes_external = $3,
|
||||
updated_at = NOW()
|
||||
WHERE id = $1
|
||||
"#,
|
||||
)
|
||||
.bind(link_id)
|
||||
.bind(&candidate.metadata_json)
|
||||
.bind(candidate.total_volumes)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Diff + sync series metadata
|
||||
series_changes = sync_series_with_diff(pool, library_id, series_name, candidate).await?;
|
||||
}
|
||||
|
||||
// ── Book-level refresh ────────────────────────────────────────────────
|
||||
let books = provider
|
||||
.get_series_books(external_id, &config)
|
||||
.await
|
||||
.map_err(|e| format!("provider books error: {e}"))?;
|
||||
|
||||
// Delete existing external_book_metadata for this link
|
||||
sqlx::query("DELETE FROM external_book_metadata WHERE link_id = $1")
|
||||
.bind(link_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Pre-fetch local books
|
||||
let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as(
|
||||
r#"
|
||||
SELECT id, volume, title FROM books
|
||||
WHERE library_id = $1
|
||||
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2
|
||||
ORDER BY volume NULLS LAST,
|
||||
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''),
|
||||
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
|
||||
title ASC
|
||||
"#,
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(series_name)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let local_books_with_pos: Vec<(Uuid, i32, String)> = local_books
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, (id, vol, title))| (*id, vol.unwrap_or((idx + 1) as i32), title.clone()))
|
||||
.collect();
|
||||
|
||||
let mut matched_local_ids = std::collections::HashSet::new();
|
||||
|
||||
for (ext_idx, book) in books.iter().enumerate() {
|
||||
let ext_vol = book.volume_number.unwrap_or((ext_idx + 1) as i32);
|
||||
|
||||
// Match by volume number
|
||||
let mut local_book_id: Option<Uuid> = local_books_with_pos
|
||||
.iter()
|
||||
.find(|(id, v, _)| *v == ext_vol && !matched_local_ids.contains(id))
|
||||
.map(|(id, _, _)| *id);
|
||||
|
||||
// Match by title containment
|
||||
if local_book_id.is_none() {
|
||||
let ext_title_lower = book.title.to_lowercase();
|
||||
local_book_id = local_books_with_pos
|
||||
.iter()
|
||||
.find(|(id, _, local_title)| {
|
||||
if matched_local_ids.contains(id) {
|
||||
return false;
|
||||
}
|
||||
let local_lower = local_title.to_lowercase();
|
||||
local_lower.contains(&ext_title_lower) || ext_title_lower.contains(&local_lower)
|
||||
})
|
||||
.map(|(id, _, _)| *id);
|
||||
}
|
||||
|
||||
if let Some(id) = local_book_id {
|
||||
matched_local_ids.insert(id);
|
||||
}
|
||||
|
||||
// Insert external_book_metadata
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO external_book_metadata
|
||||
(link_id, book_id, external_book_id, volume_number, title, authors, isbn, summary, cover_url, page_count, language, publish_date, metadata_json)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
||||
"#,
|
||||
)
|
||||
.bind(link_id)
|
||||
.bind(local_book_id)
|
||||
.bind(&book.external_book_id)
|
||||
.bind(book.volume_number)
|
||||
.bind(&book.title)
|
||||
.bind(&book.authors)
|
||||
.bind(&book.isbn)
|
||||
.bind(&book.summary)
|
||||
.bind(&book.cover_url)
|
||||
.bind(book.page_count)
|
||||
.bind(&book.language)
|
||||
.bind(&book.publish_date)
|
||||
.bind(&book.metadata_json)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
// Diff + push metadata to matched local book
|
||||
if let Some(book_id) = local_book_id {
|
||||
let diffs = sync_book_with_diff(pool, book_id, book).await?;
|
||||
if !diffs.is_empty() {
|
||||
let local_title = local_books_with_pos
|
||||
.iter()
|
||||
.find(|(id, _, _)| *id == book_id)
|
||||
.map(|(_, _, t)| t.clone())
|
||||
.unwrap_or_default();
|
||||
book_changes.push(BookDiff {
|
||||
book_id: book_id.to_string(),
|
||||
title: local_title,
|
||||
volume: book.volume_number,
|
||||
changes: diffs,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update synced_at on the link
|
||||
sqlx::query("UPDATE external_metadata_links SET synced_at = NOW(), updated_at = NOW() WHERE id = $1")
|
||||
.bind(link_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let has_changes = !series_changes.is_empty() || !book_changes.is_empty();
|
||||
|
||||
Ok(SeriesRefreshResult {
|
||||
series_name: series_name.to_string(),
|
||||
provider: provider_name.to_string(),
|
||||
status: if has_changes { "updated".to_string() } else { "unchanged".to_string() },
|
||||
series_changes,
|
||||
book_changes,
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Diff helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Compare old/new for a nullable string field. Returns Some(FieldDiff) only if value actually changed.
|
||||
fn diff_opt_str(field: &str, old: Option<&str>, new: Option<&str>) -> Option<FieldDiff> {
|
||||
let new_val = new.filter(|s| !s.is_empty());
|
||||
// Only report a change if there is a new non-empty value AND it differs from old
|
||||
match (old, new_val) {
|
||||
(Some(o), Some(n)) if o != n => Some(FieldDiff {
|
||||
field: field.to_string(),
|
||||
old: Some(serde_json::Value::String(o.to_string())),
|
||||
new: Some(serde_json::Value::String(n.to_string())),
|
||||
}),
|
||||
(None, Some(n)) => Some(FieldDiff {
|
||||
field: field.to_string(),
|
||||
old: None,
|
||||
new: Some(serde_json::Value::String(n.to_string())),
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn diff_opt_i32(field: &str, old: Option<i32>, new: Option<i32>) -> Option<FieldDiff> {
|
||||
match (old, new) {
|
||||
(Some(o), Some(n)) if o != n => Some(FieldDiff {
|
||||
field: field.to_string(),
|
||||
old: Some(serde_json::json!(o)),
|
||||
new: Some(serde_json::json!(n)),
|
||||
}),
|
||||
(None, Some(n)) => Some(FieldDiff {
|
||||
field: field.to_string(),
|
||||
old: None,
|
||||
new: Some(serde_json::json!(n)),
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn diff_str_vec(field: &str, old: &[String], new: &[String]) -> Option<FieldDiff> {
|
||||
if new.is_empty() {
|
||||
return None;
|
||||
}
|
||||
if old != new {
|
||||
Some(FieldDiff {
|
||||
field: field.to_string(),
|
||||
old: Some(serde_json::json!(old)),
|
||||
new: Some(serde_json::json!(new)),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Series sync with diff tracking
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async fn sync_series_with_diff(
|
||||
pool: &PgPool,
|
||||
library_id: Uuid,
|
||||
series_name: &str,
|
||||
candidate: &metadata_providers::SeriesCandidate,
|
||||
) -> Result<Vec<FieldDiff>, String> {
|
||||
let new_description = candidate.metadata_json
|
||||
.get("description")
|
||||
.and_then(|d| d.as_str())
|
||||
.or(candidate.description.as_deref());
|
||||
let new_authors = &candidate.authors;
|
||||
let new_publishers = &candidate.publishers;
|
||||
let new_start_year = candidate.start_year;
|
||||
let new_total_volumes = candidate.total_volumes;
|
||||
let new_status = if let Some(raw) = candidate.metadata_json.get("status").and_then(|s| s.as_str()) {
|
||||
Some(crate::metadata::normalize_series_status(pool, raw).await)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let new_status = new_status.as_deref();
|
||||
|
||||
// Fetch existing series metadata for diffing
|
||||
let existing = sqlx::query(
|
||||
r#"SELECT description, publishers, start_year, total_volumes, status, authors, locked_fields
|
||||
FROM series_metadata WHERE library_id = $1 AND name = $2"#,
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(series_name)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let locked = existing
|
||||
.as_ref()
|
||||
.map(|r| r.get::<serde_json::Value, _>("locked_fields"))
|
||||
.unwrap_or(serde_json::json!({}));
|
||||
let is_locked = |field: &str| -> bool {
|
||||
locked.get(field).and_then(|v| v.as_bool()).unwrap_or(false)
|
||||
};
|
||||
|
||||
// Build diffs (only for unlocked fields that actually change)
|
||||
let mut diffs: Vec<FieldDiff> = Vec::new();
|
||||
|
||||
if !is_locked("description") {
|
||||
let old_desc: Option<String> = existing.as_ref().and_then(|r| r.get("description"));
|
||||
if let Some(d) = diff_opt_str("description", old_desc.as_deref(), new_description) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("authors") {
|
||||
let old_authors: Vec<String> = existing.as_ref().map(|r| r.get("authors")).unwrap_or_default();
|
||||
if let Some(d) = diff_str_vec("authors", &old_authors, new_authors) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("publishers") {
|
||||
let old_publishers: Vec<String> = existing.as_ref().map(|r| r.get("publishers")).unwrap_or_default();
|
||||
if let Some(d) = diff_str_vec("publishers", &old_publishers, new_publishers) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("start_year") {
|
||||
let old_year: Option<i32> = existing.as_ref().and_then(|r| r.get("start_year"));
|
||||
if let Some(d) = diff_opt_i32("start_year", old_year, new_start_year) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("total_volumes") {
|
||||
let old_vols: Option<i32> = existing.as_ref().and_then(|r| r.get("total_volumes"));
|
||||
if let Some(d) = diff_opt_i32("total_volumes", old_vols, new_total_volumes) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("status") {
|
||||
let old_status: Option<String> = existing.as_ref().and_then(|r| r.get("status"));
|
||||
if let Some(d) = diff_opt_str("status", old_status.as_deref(), new_status) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
|
||||
// Now do the actual upsert
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO series_metadata (library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
|
||||
ON CONFLICT (library_id, name)
|
||||
DO UPDATE SET
|
||||
description = CASE
|
||||
WHEN (series_metadata.locked_fields->>'description')::boolean IS TRUE THEN series_metadata.description
|
||||
ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series_metadata.description)
|
||||
END,
|
||||
publishers = CASE
|
||||
WHEN (series_metadata.locked_fields->>'publishers')::boolean IS TRUE THEN series_metadata.publishers
|
||||
WHEN array_length(EXCLUDED.publishers, 1) > 0 THEN EXCLUDED.publishers
|
||||
ELSE series_metadata.publishers
|
||||
END,
|
||||
start_year = CASE
|
||||
WHEN (series_metadata.locked_fields->>'start_year')::boolean IS TRUE THEN series_metadata.start_year
|
||||
ELSE COALESCE(EXCLUDED.start_year, series_metadata.start_year)
|
||||
END,
|
||||
total_volumes = CASE
|
||||
WHEN (series_metadata.locked_fields->>'total_volumes')::boolean IS TRUE THEN series_metadata.total_volumes
|
||||
ELSE COALESCE(EXCLUDED.total_volumes, series_metadata.total_volumes)
|
||||
END,
|
||||
status = CASE
|
||||
WHEN (series_metadata.locked_fields->>'status')::boolean IS TRUE THEN series_metadata.status
|
||||
ELSE COALESCE(EXCLUDED.status, series_metadata.status)
|
||||
END,
|
||||
authors = CASE
|
||||
WHEN (series_metadata.locked_fields->>'authors')::boolean IS TRUE THEN series_metadata.authors
|
||||
WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors
|
||||
ELSE series_metadata.authors
|
||||
END,
|
||||
updated_at = NOW()
|
||||
"#,
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(series_name)
|
||||
.bind(new_description)
|
||||
.bind(new_publishers)
|
||||
.bind(new_start_year)
|
||||
.bind(new_total_volumes)
|
||||
.bind(new_status)
|
||||
.bind(new_authors)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(diffs)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Book sync with diff tracking
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
async fn sync_book_with_diff(
|
||||
pool: &PgPool,
|
||||
book_id: Uuid,
|
||||
ext_book: &metadata_providers::BookCandidate,
|
||||
) -> Result<Vec<FieldDiff>, String> {
|
||||
// Fetch current book state
|
||||
let current = sqlx::query(
|
||||
"SELECT summary, isbn, publish_date, language, authors, locked_fields FROM books WHERE id = $1",
|
||||
)
|
||||
.bind(book_id)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let locked = current.get::<serde_json::Value, _>("locked_fields");
|
||||
let is_locked = |field: &str| -> bool {
|
||||
locked.get(field).and_then(|v| v.as_bool()).unwrap_or(false)
|
||||
};
|
||||
|
||||
// Build diffs
|
||||
let mut diffs: Vec<FieldDiff> = Vec::new();
|
||||
|
||||
if !is_locked("summary") {
|
||||
let old: Option<String> = current.get("summary");
|
||||
if let Some(d) = diff_opt_str("summary", old.as_deref(), ext_book.summary.as_deref()) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("isbn") {
|
||||
let old: Option<String> = current.get("isbn");
|
||||
if let Some(d) = diff_opt_str("isbn", old.as_deref(), ext_book.isbn.as_deref()) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("publish_date") {
|
||||
let old: Option<String> = current.get("publish_date");
|
||||
if let Some(d) = diff_opt_str("publish_date", old.as_deref(), ext_book.publish_date.as_deref()) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("language") {
|
||||
let old: Option<String> = current.get("language");
|
||||
if let Some(d) = diff_opt_str("language", old.as_deref(), ext_book.language.as_deref()) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
if !is_locked("authors") {
|
||||
let old: Vec<String> = current.get("authors");
|
||||
if let Some(d) = diff_str_vec("authors", &old, &ext_book.authors) {
|
||||
diffs.push(d);
|
||||
}
|
||||
}
|
||||
|
||||
// Do the actual update
|
||||
sqlx::query(
|
||||
r#"
|
||||
UPDATE books SET
|
||||
summary = CASE
|
||||
WHEN (locked_fields->>'summary')::boolean IS TRUE THEN summary
|
||||
ELSE COALESCE(NULLIF($2, ''), summary)
|
||||
END,
|
||||
isbn = CASE
|
||||
WHEN (locked_fields->>'isbn')::boolean IS TRUE THEN isbn
|
||||
ELSE COALESCE(NULLIF($3, ''), isbn)
|
||||
END,
|
||||
publish_date = CASE
|
||||
WHEN (locked_fields->>'publish_date')::boolean IS TRUE THEN publish_date
|
||||
ELSE COALESCE(NULLIF($4, ''), publish_date)
|
||||
END,
|
||||
language = CASE
|
||||
WHEN (locked_fields->>'language')::boolean IS TRUE THEN language
|
||||
ELSE COALESCE(NULLIF($5, ''), language)
|
||||
END,
|
||||
authors = CASE
|
||||
WHEN (locked_fields->>'authors')::boolean IS TRUE THEN authors
|
||||
WHEN CARDINALITY($6::text[]) > 0 THEN $6
|
||||
ELSE authors
|
||||
END,
|
||||
author = CASE
|
||||
WHEN (locked_fields->>'authors')::boolean IS TRUE THEN author
|
||||
WHEN CARDINALITY($6::text[]) > 0 THEN $6[1]
|
||||
ELSE author
|
||||
END,
|
||||
updated_at = NOW()
|
||||
WHERE id = $1
|
||||
"#,
|
||||
)
|
||||
.bind(book_id)
|
||||
.bind(&ext_book.summary)
|
||||
.bind(&ext_book.isbn)
|
||||
.bind(&ext_book.publish_date)
|
||||
.bind(&ext_book.language)
|
||||
.bind(&ext_book.authors)
|
||||
.execute(pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(diffs)
|
||||
}
|
||||
@@ -10,14 +10,14 @@ use utoipa::OpenApi;
|
||||
crate::reading_progress::update_reading_progress,
|
||||
crate::reading_progress::mark_series_read,
|
||||
crate::books::get_thumbnail,
|
||||
crate::books::list_series,
|
||||
crate::books::list_all_series,
|
||||
crate::books::ongoing_series,
|
||||
crate::books::ongoing_books,
|
||||
crate::series::list_series,
|
||||
crate::series::list_all_series,
|
||||
crate::series::ongoing_series,
|
||||
crate::series::ongoing_books,
|
||||
crate::books::convert_book,
|
||||
crate::books::update_book,
|
||||
crate::books::get_series_metadata,
|
||||
crate::books::update_series,
|
||||
crate::series::get_series_metadata,
|
||||
crate::series::update_series,
|
||||
crate::pages::get_page,
|
||||
crate::search::search_books,
|
||||
crate::index_jobs::enqueue_rebuild,
|
||||
@@ -35,10 +35,12 @@ use utoipa::OpenApi;
|
||||
crate::libraries::delete_library,
|
||||
crate::libraries::scan_library,
|
||||
crate::libraries::update_monitoring,
|
||||
crate::libraries::update_metadata_provider,
|
||||
crate::tokens::list_tokens,
|
||||
crate::tokens::create_token,
|
||||
crate::tokens::revoke_token,
|
||||
crate::tokens::delete_token,
|
||||
crate::authors::list_authors,
|
||||
crate::stats::get_stats,
|
||||
crate::settings::get_settings,
|
||||
crate::settings::get_setting,
|
||||
@@ -53,6 +55,23 @@ use utoipa::OpenApi;
|
||||
crate::metadata::get_metadata_links,
|
||||
crate::metadata::get_missing_books,
|
||||
crate::metadata::delete_metadata_link,
|
||||
crate::series::series_statuses,
|
||||
crate::series::provider_statuses,
|
||||
crate::settings::list_status_mappings,
|
||||
crate::settings::upsert_status_mapping,
|
||||
crate::settings::delete_status_mapping,
|
||||
crate::prowlarr::search_prowlarr,
|
||||
crate::prowlarr::test_prowlarr,
|
||||
crate::qbittorrent::add_torrent,
|
||||
crate::qbittorrent::test_qbittorrent,
|
||||
crate::metadata_batch::start_batch,
|
||||
crate::metadata_batch::get_batch_report,
|
||||
crate::metadata_batch::get_batch_results,
|
||||
crate::metadata_refresh::start_refresh,
|
||||
crate::metadata_refresh::get_refresh_report,
|
||||
crate::komga::sync_komga_read_books,
|
||||
crate::komga::list_sync_reports,
|
||||
crate::komga::get_sync_report,
|
||||
),
|
||||
components(
|
||||
schemas(
|
||||
@@ -64,14 +83,14 @@ use utoipa::OpenApi;
|
||||
crate::reading_progress::UpdateReadingProgressRequest,
|
||||
crate::reading_progress::MarkSeriesReadRequest,
|
||||
crate::reading_progress::MarkSeriesReadResponse,
|
||||
crate::books::SeriesItem,
|
||||
crate::books::SeriesPage,
|
||||
crate::books::ListAllSeriesQuery,
|
||||
crate::books::OngoingQuery,
|
||||
crate::series::SeriesItem,
|
||||
crate::series::SeriesPage,
|
||||
crate::series::ListAllSeriesQuery,
|
||||
crate::series::OngoingQuery,
|
||||
crate::books::UpdateBookRequest,
|
||||
crate::books::SeriesMetadata,
|
||||
crate::books::UpdateSeriesRequest,
|
||||
crate::books::UpdateSeriesResponse,
|
||||
crate::series::SeriesMetadata,
|
||||
crate::series::UpdateSeriesRequest,
|
||||
crate::series::UpdateSeriesResponse,
|
||||
crate::pages::PageQuery,
|
||||
crate::search::SearchQuery,
|
||||
crate::search::SearchResponse,
|
||||
@@ -86,6 +105,7 @@ use utoipa::OpenApi;
|
||||
crate::libraries::LibraryResponse,
|
||||
crate::libraries::CreateLibraryRequest,
|
||||
crate::libraries::UpdateMonitoringRequest,
|
||||
crate::libraries::UpdateMetadataProviderRequest,
|
||||
crate::tokens::CreateTokenRequest,
|
||||
crate::tokens::TokenResponse,
|
||||
crate::tokens::CreatedTokenResponse,
|
||||
@@ -93,6 +113,11 @@ use utoipa::OpenApi;
|
||||
crate::settings::ClearCacheResponse,
|
||||
crate::settings::CacheStats,
|
||||
crate::settings::ThumbnailStats,
|
||||
crate::settings::StatusMappingDto,
|
||||
crate::settings::UpsertStatusMappingRequest,
|
||||
crate::authors::ListAuthorsQuery,
|
||||
crate::authors::AuthorItem,
|
||||
crate::authors::AuthorsPageResponse,
|
||||
crate::stats::StatsResponse,
|
||||
crate::stats::StatsOverview,
|
||||
crate::stats::ReadingStatusStats,
|
||||
@@ -101,6 +126,8 @@ use utoipa::OpenApi;
|
||||
crate::stats::LibraryStats,
|
||||
crate::stats::TopSeries,
|
||||
crate::stats::MonthlyAdditions,
|
||||
crate::stats::MetadataStats,
|
||||
crate::stats::ProviderCount,
|
||||
crate::metadata::ApproveRequest,
|
||||
crate::metadata::ApproveResponse,
|
||||
crate::metadata::SyncReport,
|
||||
@@ -113,6 +140,23 @@ use utoipa::OpenApi;
|
||||
crate::metadata::ExternalMetadataLinkDto,
|
||||
crate::metadata::MissingBooksDto,
|
||||
crate::metadata::MissingBookItem,
|
||||
crate::qbittorrent::QBittorrentAddRequest,
|
||||
crate::qbittorrent::QBittorrentAddResponse,
|
||||
crate::qbittorrent::QBittorrentTestResponse,
|
||||
crate::prowlarr::ProwlarrSearchRequest,
|
||||
crate::prowlarr::ProwlarrRelease,
|
||||
crate::prowlarr::ProwlarrCategory,
|
||||
crate::prowlarr::ProwlarrSearchResponse,
|
||||
crate::prowlarr::MissingVolumeInput,
|
||||
crate::prowlarr::ProwlarrTestResponse,
|
||||
crate::metadata_batch::MetadataBatchRequest,
|
||||
crate::metadata_batch::MetadataBatchReportDto,
|
||||
crate::metadata_batch::MetadataBatchResultDto,
|
||||
crate::metadata_refresh::MetadataRefreshRequest,
|
||||
crate::metadata_refresh::MetadataRefreshReportDto,
|
||||
crate::komga::KomgaSyncRequest,
|
||||
crate::komga::KomgaSyncResponse,
|
||||
crate::komga::KomgaSyncReportSummary,
|
||||
ErrorResponse,
|
||||
)
|
||||
),
|
||||
@@ -120,12 +164,20 @@ use utoipa::OpenApi;
|
||||
("Bearer" = [])
|
||||
),
|
||||
tags(
|
||||
(name = "books", description = "Read-only endpoints for browsing and searching books"),
|
||||
(name = "books", description = "Book browsing, details and management"),
|
||||
(name = "series", description = "Series browsing, filtering and management"),
|
||||
(name = "search", description = "Full-text search across books and series"),
|
||||
(name = "reading-progress", description = "Reading progress tracking per book"),
|
||||
(name = "libraries", description = "Library management endpoints (Admin only)"),
|
||||
(name = "authors", description = "Author browsing and listing"),
|
||||
(name = "stats", description = "Collection statistics and dashboard data"),
|
||||
(name = "libraries", description = "Library listing, scanning, and management (create/delete/settings: Admin only)"),
|
||||
(name = "indexing", description = "Search index management and job control (Admin only)"),
|
||||
(name = "metadata", description = "External metadata providers and matching (Admin only)"),
|
||||
(name = "komga", description = "Komga read-status sync (Admin only)"),
|
||||
(name = "tokens", description = "API token management (Admin only)"),
|
||||
(name = "settings", description = "Application settings and cache management (Admin only)"),
|
||||
(name = "prowlarr", description = "Prowlarr indexer integration (Admin only)"),
|
||||
(name = "qbittorrent", description = "qBittorrent download client integration (Admin only)"),
|
||||
),
|
||||
modifiers(&SecurityAddon)
|
||||
)]
|
||||
|
||||
@@ -277,7 +277,17 @@ pub async fn get_page(
|
||||
let cache_dir2 = cache_dir_path.clone();
|
||||
let format2 = format;
|
||||
tokio::spawn(async move {
|
||||
prefetch_page(state2, book_id, &abs_path2, next_page, format2, quality, width, filter, timeout_secs, &cache_dir2).await;
|
||||
prefetch_page(state2, &PrefetchParams {
|
||||
book_id,
|
||||
abs_path: &abs_path2,
|
||||
page: next_page,
|
||||
format: format2,
|
||||
quality,
|
||||
width,
|
||||
filter,
|
||||
timeout_secs,
|
||||
cache_dir: &cache_dir2,
|
||||
}).await;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -290,19 +300,30 @@ pub async fn get_page(
|
||||
}
|
||||
}
|
||||
|
||||
/// Prefetch a single page into disk+memory cache (best-effort, ignores errors).
|
||||
async fn prefetch_page(
|
||||
state: AppState,
|
||||
struct PrefetchParams<'a> {
|
||||
book_id: Uuid,
|
||||
abs_path: &str,
|
||||
abs_path: &'a str,
|
||||
page: u32,
|
||||
format: OutputFormat,
|
||||
quality: u8,
|
||||
width: u32,
|
||||
filter: image::imageops::FilterType,
|
||||
timeout_secs: u64,
|
||||
cache_dir: &Path,
|
||||
) {
|
||||
cache_dir: &'a Path,
|
||||
}
|
||||
|
||||
/// Prefetch a single page into disk+memory cache (best-effort, ignores errors).
|
||||
async fn prefetch_page(state: AppState, params: &PrefetchParams<'_>) {
|
||||
let book_id = params.book_id;
|
||||
let page = params.page;
|
||||
let format = params.format;
|
||||
let quality = params.quality;
|
||||
let width = params.width;
|
||||
let filter = params.filter;
|
||||
let timeout_secs = params.timeout_secs;
|
||||
let abs_path = params.abs_path;
|
||||
let cache_dir = params.cache_dir;
|
||||
|
||||
let mem_key = format!("{book_id}:{page}:{}:{quality}:{width}", format.extension());
|
||||
// Already in memory cache?
|
||||
if state.page_cache.lock().await.contains(&mem_key) {
|
||||
@@ -330,6 +351,7 @@ async fn prefetch_page(
|
||||
Some(ref e) if e == "cbz" => "cbz",
|
||||
Some(ref e) if e == "cbr" => "cbr",
|
||||
Some(ref e) if e == "pdf" => "pdf",
|
||||
Some(ref e) if e == "epub" => "epub",
|
||||
_ => return,
|
||||
}
|
||||
.to_string();
|
||||
@@ -458,6 +480,7 @@ fn render_page(
|
||||
"cbz" => parsers::BookFormat::Cbz,
|
||||
"cbr" => parsers::BookFormat::Cbr,
|
||||
"pdf" => parsers::BookFormat::Pdf,
|
||||
"epub" => parsers::BookFormat::Epub,
|
||||
_ => return Err(ApiError::bad_request("unsupported source format")),
|
||||
};
|
||||
|
||||
|
||||
363
apps/api/src/prowlarr.rs
Normal file
363
apps/api/src/prowlarr.rs
Normal file
@@ -0,0 +1,363 @@
|
||||
use axum::{extract::State, Json};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Row;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::{error::ApiError, state::AppState};
|
||||
|
||||
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
pub struct MissingVolumeInput {
|
||||
pub volume_number: Option<i32>,
|
||||
#[allow(dead_code)]
|
||||
pub title: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
pub struct ProwlarrSearchRequest {
|
||||
pub series_name: String,
|
||||
pub volume_number: Option<i32>,
|
||||
pub custom_query: Option<String>,
|
||||
pub missing_volumes: Option<Vec<MissingVolumeInput>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ProwlarrRawRelease {
|
||||
pub guid: String,
|
||||
pub title: String,
|
||||
pub size: i64,
|
||||
pub download_url: Option<String>,
|
||||
pub indexer: Option<String>,
|
||||
pub seeders: Option<i32>,
|
||||
pub leechers: Option<i32>,
|
||||
pub publish_date: Option<String>,
|
||||
pub protocol: Option<String>,
|
||||
pub info_url: Option<String>,
|
||||
pub categories: Option<Vec<ProwlarrCategory>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ProwlarrRelease {
|
||||
pub guid: String,
|
||||
pub title: String,
|
||||
pub size: i64,
|
||||
pub download_url: Option<String>,
|
||||
pub indexer: Option<String>,
|
||||
pub seeders: Option<i32>,
|
||||
pub leechers: Option<i32>,
|
||||
pub publish_date: Option<String>,
|
||||
pub protocol: Option<String>,
|
||||
pub info_url: Option<String>,
|
||||
pub categories: Option<Vec<ProwlarrCategory>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub matched_missing_volumes: Option<Vec<i32>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ProwlarrCategory {
|
||||
pub id: i32,
|
||||
pub name: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct ProwlarrSearchResponse {
|
||||
pub results: Vec<ProwlarrRelease>,
|
||||
pub query: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct ProwlarrTestResponse {
|
||||
pub success: bool,
|
||||
pub message: String,
|
||||
pub indexer_count: Option<i32>,
|
||||
}
|
||||
|
||||
// ─── Config helper ──────────────────────────────────────────────────────────
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct ProwlarrConfig {
|
||||
url: String,
|
||||
api_key: String,
|
||||
categories: Option<Vec<i32>>,
|
||||
}
|
||||
|
||||
async fn load_prowlarr_config(
|
||||
pool: &sqlx::PgPool,
|
||||
) -> Result<(String, String, Vec<i32>), ApiError> {
|
||||
let row = sqlx::query("SELECT value FROM app_settings WHERE key = 'prowlarr'")
|
||||
.fetch_optional(pool)
|
||||
.await?;
|
||||
|
||||
let row = row.ok_or_else(|| ApiError::bad_request("Prowlarr is not configured"))?;
|
||||
let value: serde_json::Value = row.get("value");
|
||||
let config: ProwlarrConfig = serde_json::from_value(value)
|
||||
.map_err(|e| ApiError::internal(format!("invalid prowlarr config: {e}")))?;
|
||||
|
||||
if config.url.is_empty() || config.api_key.is_empty() {
|
||||
return Err(ApiError::bad_request(
|
||||
"Prowlarr URL and API key must be configured in settings",
|
||||
));
|
||||
}
|
||||
|
||||
let url = config.url.trim_end_matches('/').to_string();
|
||||
let categories = config.categories.unwrap_or_else(|| vec![7030, 7020]);
|
||||
|
||||
Ok((url, config.api_key, categories))
|
||||
}
|
||||
|
||||
// ─── Volume matching ─────────────────────────────────────────────────────────
|
||||
|
||||
/// Extract volume numbers from a release title.
|
||||
/// Looks for patterns like: T01, Tome 01, Vol. 01, v01, #01,
|
||||
/// or standalone numbers that appear after common separators.
|
||||
fn extract_volumes_from_title(title: &str) -> Vec<i32> {
|
||||
let lower = title.to_lowercase();
|
||||
let mut volumes = Vec::new();
|
||||
|
||||
// Patterns: T01, Tome 01, Tome01, Vol 01, Vol.01, v01, #01
|
||||
let prefixes = ["tome", "vol.", "vol ", "t", "v", "#"];
|
||||
let chars: Vec<char> = lower.chars().collect();
|
||||
let len = chars.len();
|
||||
|
||||
for prefix in &prefixes {
|
||||
let mut start = 0;
|
||||
while let Some(pos) = lower[start..].find(prefix) {
|
||||
let abs_pos = start + pos;
|
||||
let after = abs_pos + prefix.len();
|
||||
|
||||
// For single-char prefixes (t, v, #), ensure it's at a word boundary
|
||||
if prefix.len() == 1 && *prefix != "#" {
|
||||
if abs_pos > 0 && chars[abs_pos - 1].is_alphanumeric() {
|
||||
start = after;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Skip optional spaces after prefix
|
||||
let mut i = after;
|
||||
while i < len && chars[i] == ' ' {
|
||||
i += 1;
|
||||
}
|
||||
|
||||
// Read digits
|
||||
let digit_start = i;
|
||||
while i < len && chars[i].is_ascii_digit() {
|
||||
i += 1;
|
||||
}
|
||||
|
||||
if i > digit_start {
|
||||
if let Ok(num) = lower[digit_start..i].parse::<i32>() {
|
||||
if !volumes.contains(&num) {
|
||||
volumes.push(num);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
start = after;
|
||||
}
|
||||
}
|
||||
|
||||
volumes
|
||||
}
|
||||
|
||||
/// Match releases against missing volume numbers.
|
||||
fn match_missing_volumes(
|
||||
releases: Vec<ProwlarrRawRelease>,
|
||||
missing: &[MissingVolumeInput],
|
||||
) -> Vec<ProwlarrRelease> {
|
||||
let missing_numbers: Vec<i32> = missing
|
||||
.iter()
|
||||
.filter_map(|m| m.volume_number)
|
||||
.collect();
|
||||
|
||||
releases
|
||||
.into_iter()
|
||||
.map(|r| {
|
||||
let matched = if missing_numbers.is_empty() {
|
||||
None
|
||||
} else {
|
||||
let title_volumes = extract_volumes_from_title(&r.title);
|
||||
let matched: Vec<i32> = title_volumes
|
||||
.into_iter()
|
||||
.filter(|v| missing_numbers.contains(v))
|
||||
.collect();
|
||||
if matched.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(matched)
|
||||
}
|
||||
};
|
||||
|
||||
ProwlarrRelease {
|
||||
guid: r.guid,
|
||||
title: r.title,
|
||||
size: r.size,
|
||||
download_url: r.download_url,
|
||||
indexer: r.indexer,
|
||||
seeders: r.seeders,
|
||||
leechers: r.leechers,
|
||||
publish_date: r.publish_date,
|
||||
protocol: r.protocol,
|
||||
info_url: r.info_url,
|
||||
categories: r.categories,
|
||||
matched_missing_volumes: matched,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
// ─── Handlers ───────────────────────────────────────────────────────────────
|
||||
|
||||
/// Search for releases on Prowlarr
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/prowlarr/search",
|
||||
tag = "prowlarr",
|
||||
request_body = ProwlarrSearchRequest,
|
||||
responses(
|
||||
(status = 200, body = ProwlarrSearchResponse),
|
||||
(status = 400, description = "Bad request or Prowlarr not configured"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 500, description = "Prowlarr connection error"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn search_prowlarr(
|
||||
State(state): State<AppState>,
|
||||
Json(body): Json<ProwlarrSearchRequest>,
|
||||
) -> Result<Json<ProwlarrSearchResponse>, ApiError> {
|
||||
let (url, api_key, categories) = load_prowlarr_config(&state.pool).await?;
|
||||
|
||||
let query = if let Some(custom) = &body.custom_query {
|
||||
custom.clone()
|
||||
} else if let Some(vol) = body.volume_number {
|
||||
format!("\"{}\" {}", body.series_name, vol)
|
||||
} else {
|
||||
format!("\"{}\"", body.series_name)
|
||||
};
|
||||
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.build()
|
||||
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||
|
||||
let mut params: Vec<(&str, String)> = vec![
|
||||
("query", query.clone()),
|
||||
("type", "search".to_string()),
|
||||
];
|
||||
for cat in &categories {
|
||||
params.push(("categories", cat.to_string()));
|
||||
}
|
||||
|
||||
let resp = client
|
||||
.get(format!("{url}/api/v1/search"))
|
||||
.query(¶ms)
|
||||
.header("X-Api-Key", &api_key)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(format!("Prowlarr request failed: {e}")))?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let text = resp.text().await.unwrap_or_default();
|
||||
return Err(ApiError::internal(format!(
|
||||
"Prowlarr returned {status}: {text}"
|
||||
)));
|
||||
}
|
||||
|
||||
let raw_text = resp
|
||||
.text()
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(format!("Failed to read Prowlarr response: {e}")))?;
|
||||
|
||||
tracing::debug!("Prowlarr raw response length: {} chars", raw_text.len());
|
||||
|
||||
let raw_releases: Vec<ProwlarrRawRelease> = serde_json::from_str(&raw_text)
|
||||
.map_err(|e| {
|
||||
tracing::error!("Failed to parse Prowlarr response: {e}");
|
||||
tracing::error!("Raw response (first 500 chars): {}", &raw_text[..raw_text.len().min(500)]);
|
||||
ApiError::internal(format!("Failed to parse Prowlarr response: {e}"))
|
||||
})?;
|
||||
|
||||
let results = if let Some(missing) = &body.missing_volumes {
|
||||
match_missing_volumes(raw_releases, missing)
|
||||
} else {
|
||||
raw_releases
|
||||
.into_iter()
|
||||
.map(|r| ProwlarrRelease {
|
||||
guid: r.guid,
|
||||
title: r.title,
|
||||
size: r.size,
|
||||
download_url: r.download_url,
|
||||
indexer: r.indexer,
|
||||
seeders: r.seeders,
|
||||
leechers: r.leechers,
|
||||
publish_date: r.publish_date,
|
||||
protocol: r.protocol,
|
||||
info_url: r.info_url,
|
||||
categories: r.categories,
|
||||
matched_missing_volumes: None,
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
|
||||
Ok(Json(ProwlarrSearchResponse { results, query }))
|
||||
}
|
||||
|
||||
/// Test connection to Prowlarr
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/prowlarr/test",
|
||||
tag = "prowlarr",
|
||||
responses(
|
||||
(status = 200, body = ProwlarrTestResponse),
|
||||
(status = 400, description = "Prowlarr not configured"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn test_prowlarr(
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<ProwlarrTestResponse>, ApiError> {
|
||||
let (url, api_key, _categories) = load_prowlarr_config(&state.pool).await?;
|
||||
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(10))
|
||||
.build()
|
||||
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||
|
||||
let resp = client
|
||||
.get(format!("{url}/api/v1/indexer"))
|
||||
.header("X-Api-Key", &api_key)
|
||||
.send()
|
||||
.await;
|
||||
|
||||
match resp {
|
||||
Ok(r) if r.status().is_success() => {
|
||||
let indexers: Vec<serde_json::Value> = r.json().await.unwrap_or_default();
|
||||
Ok(Json(ProwlarrTestResponse {
|
||||
success: true,
|
||||
message: format!("Connected successfully ({} indexers)", indexers.len()),
|
||||
indexer_count: Some(indexers.len() as i32),
|
||||
}))
|
||||
}
|
||||
Ok(r) => {
|
||||
let status = r.status();
|
||||
let text = r.text().await.unwrap_or_default();
|
||||
Ok(Json(ProwlarrTestResponse {
|
||||
success: false,
|
||||
message: format!("Prowlarr returned {status}: {text}"),
|
||||
indexer_count: None,
|
||||
}))
|
||||
}
|
||||
Err(e) => Ok(Json(ProwlarrTestResponse {
|
||||
success: false,
|
||||
message: format!("Connection failed: {e}"),
|
||||
indexer_count: None,
|
||||
})),
|
||||
}
|
||||
}
|
||||
218
apps/api/src/qbittorrent.rs
Normal file
218
apps/api/src/qbittorrent.rs
Normal file
@@ -0,0 +1,218 @@
|
||||
use axum::{extract::State, Json};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Row;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::{error::ApiError, state::AppState};
|
||||
|
||||
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
pub struct QBittorrentAddRequest {
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct QBittorrentAddResponse {
|
||||
pub success: bool,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct QBittorrentTestResponse {
|
||||
pub success: bool,
|
||||
pub message: String,
|
||||
pub version: Option<String>,
|
||||
}
|
||||
|
||||
// ─── Config helper ──────────────────────────────────────────────────────────
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct QBittorrentConfig {
|
||||
url: String,
|
||||
username: String,
|
||||
password: String,
|
||||
}
|
||||
|
||||
async fn load_qbittorrent_config(
|
||||
pool: &sqlx::PgPool,
|
||||
) -> Result<(String, String, String), ApiError> {
|
||||
let row = sqlx::query("SELECT value FROM app_settings WHERE key = 'qbittorrent'")
|
||||
.fetch_optional(pool)
|
||||
.await?;
|
||||
|
||||
let row = row.ok_or_else(|| ApiError::bad_request("qBittorrent is not configured"))?;
|
||||
let value: serde_json::Value = row.get("value");
|
||||
let config: QBittorrentConfig = serde_json::from_value(value)
|
||||
.map_err(|e| ApiError::internal(format!("invalid qbittorrent config: {e}")))?;
|
||||
|
||||
if config.url.is_empty() || config.username.is_empty() {
|
||||
return Err(ApiError::bad_request(
|
||||
"qBittorrent URL and username must be configured in settings",
|
||||
));
|
||||
}
|
||||
|
||||
let url = config.url.trim_end_matches('/').to_string();
|
||||
Ok((url, config.username, config.password))
|
||||
}
|
||||
|
||||
// ─── Login helper ───────────────────────────────────────────────────────────
|
||||
|
||||
async fn qbittorrent_login(
|
||||
client: &reqwest::Client,
|
||||
base_url: &str,
|
||||
username: &str,
|
||||
password: &str,
|
||||
) -> Result<String, ApiError> {
|
||||
let resp = client
|
||||
.post(format!("{base_url}/api/v2/auth/login"))
|
||||
.form(&[("username", username), ("password", password)])
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(format!("qBittorrent login request failed: {e}")))?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let text = resp.text().await.unwrap_or_default();
|
||||
return Err(ApiError::internal(format!(
|
||||
"qBittorrent login failed ({status}): {text}"
|
||||
)));
|
||||
}
|
||||
|
||||
// Extract SID from Set-Cookie header
|
||||
let cookie_header = resp
|
||||
.headers()
|
||||
.get("set-cookie")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.unwrap_or("");
|
||||
|
||||
let sid = cookie_header
|
||||
.split(';')
|
||||
.next()
|
||||
.and_then(|s| s.strip_prefix("SID="))
|
||||
.ok_or_else(|| ApiError::internal("Failed to get SID cookie from qBittorrent"))?
|
||||
.to_string();
|
||||
|
||||
Ok(sid)
|
||||
}
|
||||
|
||||
// ─── Handlers ───────────────────────────────────────────────────────────────
|
||||
|
||||
/// Add a torrent to qBittorrent
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/qbittorrent/add",
|
||||
tag = "qbittorrent",
|
||||
request_body = QBittorrentAddRequest,
|
||||
responses(
|
||||
(status = 200, body = QBittorrentAddResponse),
|
||||
(status = 400, description = "Bad request or qBittorrent not configured"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 500, description = "qBittorrent connection error"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn add_torrent(
|
||||
State(state): State<AppState>,
|
||||
Json(body): Json<QBittorrentAddRequest>,
|
||||
) -> Result<Json<QBittorrentAddResponse>, ApiError> {
|
||||
if body.url.is_empty() {
|
||||
return Err(ApiError::bad_request("url is required"));
|
||||
}
|
||||
|
||||
let (base_url, username, password) = load_qbittorrent_config(&state.pool).await?;
|
||||
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(10))
|
||||
.build()
|
||||
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||
|
||||
let sid = qbittorrent_login(&client, &base_url, &username, &password).await?;
|
||||
|
||||
let resp = client
|
||||
.post(format!("{base_url}/api/v2/torrents/add"))
|
||||
.header("Cookie", format!("SID={sid}"))
|
||||
.form(&[("urls", &body.url)])
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(format!("qBittorrent add request failed: {e}")))?;
|
||||
|
||||
if resp.status().is_success() {
|
||||
Ok(Json(QBittorrentAddResponse {
|
||||
success: true,
|
||||
message: "Torrent added to qBittorrent".to_string(),
|
||||
}))
|
||||
} else {
|
||||
let status = resp.status();
|
||||
let text = resp.text().await.unwrap_or_default();
|
||||
Ok(Json(QBittorrentAddResponse {
|
||||
success: false,
|
||||
message: format!("qBittorrent returned {status}: {text}"),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
/// Test connection to qBittorrent
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/qbittorrent/test",
|
||||
tag = "qbittorrent",
|
||||
responses(
|
||||
(status = 200, body = QBittorrentTestResponse),
|
||||
(status = 400, description = "qBittorrent not configured"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn test_qbittorrent(
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<QBittorrentTestResponse>, ApiError> {
|
||||
let (base_url, username, password) = load_qbittorrent_config(&state.pool).await?;
|
||||
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(10))
|
||||
.build()
|
||||
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||
|
||||
let sid = match qbittorrent_login(&client, &base_url, &username, &password).await {
|
||||
Ok(sid) => sid,
|
||||
Err(e) => {
|
||||
return Ok(Json(QBittorrentTestResponse {
|
||||
success: false,
|
||||
message: format!("Login failed: {}", e.message),
|
||||
version: None,
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
let resp = client
|
||||
.get(format!("{base_url}/api/v2/app/version"))
|
||||
.header("Cookie", format!("SID={sid}"))
|
||||
.send()
|
||||
.await;
|
||||
|
||||
match resp {
|
||||
Ok(r) if r.status().is_success() => {
|
||||
let version = r.text().await.unwrap_or_default();
|
||||
Ok(Json(QBittorrentTestResponse {
|
||||
success: true,
|
||||
message: format!("Connected successfully ({})", version.trim()),
|
||||
version: Some(version.trim().to_string()),
|
||||
}))
|
||||
}
|
||||
Ok(r) => {
|
||||
let status = r.status();
|
||||
let text = r.text().await.unwrap_or_default();
|
||||
Ok(Json(QBittorrentTestResponse {
|
||||
success: false,
|
||||
message: format!("qBittorrent returned {status}: {text}"),
|
||||
version: None,
|
||||
}))
|
||||
}
|
||||
Err(e) => Ok(Json(QBittorrentTestResponse {
|
||||
success: false,
|
||||
message: format!("Connection failed: {e}"),
|
||||
version: None,
|
||||
})),
|
||||
}
|
||||
}
|
||||
@@ -43,11 +43,11 @@ pub struct SearchResponse {
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/search",
|
||||
tag = "books",
|
||||
tag = "search",
|
||||
params(
|
||||
("q" = String, Query, description = "Search query (books + series via PostgreSQL full-text)"),
|
||||
("library_id" = Option<String>, Query, description = "Filter by library ID"),
|
||||
("type" = Option<String>, Query, description = "Filter by type (cbz, cbr, pdf)"),
|
||||
("type" = Option<String>, Query, description = "Filter by type (cbz, cbr, pdf, epub)"),
|
||||
("kind" = Option<String>, Query, description = "Filter by kind (alias for type)"),
|
||||
("limit" = Option<usize>, Query, description = "Max results per type (max 100)"),
|
||||
),
|
||||
|
||||
1028
apps/api/src/series.rs
Normal file
1028
apps/api/src/series.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,12 @@
|
||||
use axum::{
|
||||
extract::State,
|
||||
routing::{get, post},
|
||||
extract::{Path as AxumPath, State},
|
||||
routing::{delete, get, post},
|
||||
Json, Router,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use sqlx::Row;
|
||||
use uuid::Uuid;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::{error::ApiError, state::{AppState, load_dynamic_settings}};
|
||||
@@ -42,6 +43,14 @@ pub fn settings_routes() -> Router<AppState> {
|
||||
.route("/settings/cache/clear", post(clear_cache))
|
||||
.route("/settings/cache/stats", get(get_cache_stats))
|
||||
.route("/settings/thumbnail/stats", get(get_thumbnail_stats))
|
||||
.route(
|
||||
"/settings/status-mappings",
|
||||
get(list_status_mappings).post(upsert_status_mapping),
|
||||
)
|
||||
.route(
|
||||
"/settings/status-mappings/:id",
|
||||
delete(delete_status_mapping),
|
||||
)
|
||||
}
|
||||
|
||||
/// List all settings
|
||||
@@ -324,3 +333,125 @@ pub async fn get_thumbnail_stats(State(_state): State<AppState>) -> Result<Json<
|
||||
|
||||
Ok(Json(stats))
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Status Mappings
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
|
||||
pub struct StatusMappingDto {
|
||||
pub id: String,
|
||||
pub provider_status: String,
|
||||
pub mapped_status: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, ToSchema)]
|
||||
pub struct UpsertStatusMappingRequest {
|
||||
pub provider_status: String,
|
||||
pub mapped_status: String,
|
||||
}
|
||||
|
||||
/// List all status mappings
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/settings/status-mappings",
|
||||
tag = "settings",
|
||||
responses(
|
||||
(status = 200, body = Vec<StatusMappingDto>),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn list_status_mappings(
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<Vec<StatusMappingDto>>, ApiError> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, provider_status, mapped_status FROM status_mappings ORDER BY mapped_status NULLS LAST, provider_status",
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
let mappings = rows
|
||||
.iter()
|
||||
.map(|row| StatusMappingDto {
|
||||
id: row.get::<Uuid, _>("id").to_string(),
|
||||
provider_status: row.get("provider_status"),
|
||||
mapped_status: row.get::<Option<String>, _>("mapped_status"),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(mappings))
|
||||
}
|
||||
|
||||
/// Create or update a status mapping
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/settings/status-mappings",
|
||||
tag = "settings",
|
||||
request_body = UpsertStatusMappingRequest,
|
||||
responses(
|
||||
(status = 200, body = StatusMappingDto),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn upsert_status_mapping(
|
||||
State(state): State<AppState>,
|
||||
Json(body): Json<UpsertStatusMappingRequest>,
|
||||
) -> Result<Json<StatusMappingDto>, ApiError> {
|
||||
let provider_status = body.provider_status.to_lowercase();
|
||||
|
||||
let row = sqlx::query(
|
||||
r#"
|
||||
INSERT INTO status_mappings (provider_status, mapped_status)
|
||||
VALUES ($1, $2)
|
||||
ON CONFLICT (provider_status)
|
||||
DO UPDATE SET mapped_status = $2, updated_at = NOW()
|
||||
RETURNING id, provider_status, mapped_status
|
||||
"#,
|
||||
)
|
||||
.bind(&provider_status)
|
||||
.bind(&body.mapped_status)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
|
||||
Ok(Json(StatusMappingDto {
|
||||
id: row.get::<Uuid, _>("id").to_string(),
|
||||
provider_status: row.get("provider_status"),
|
||||
mapped_status: row.get::<Option<String>, _>("mapped_status"),
|
||||
}))
|
||||
}
|
||||
|
||||
/// Unmap a status mapping (sets mapped_status to NULL, keeps the provider status known)
|
||||
#[utoipa::path(
|
||||
delete,
|
||||
path = "/settings/status-mappings/{id}",
|
||||
tag = "settings",
|
||||
params(("id" = String, Path, description = "Mapping UUID")),
|
||||
responses(
|
||||
(status = 200, body = StatusMappingDto),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 404, description = "Not found"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn delete_status_mapping(
|
||||
State(state): State<AppState>,
|
||||
AxumPath(id): AxumPath<Uuid>,
|
||||
) -> Result<Json<StatusMappingDto>, ApiError> {
|
||||
let row = sqlx::query(
|
||||
"UPDATE status_mappings SET mapped_status = NULL, updated_at = NOW() WHERE id = $1 RETURNING id, provider_status, mapped_status",
|
||||
)
|
||||
.bind(id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
match row {
|
||||
Some(row) => Ok(Json(StatusMappingDto {
|
||||
id: row.get::<Uuid, _>("id").to_string(),
|
||||
provider_status: row.get("provider_status"),
|
||||
mapped_status: row.get::<Option<String>, _>("mapped_status"),
|
||||
})),
|
||||
None => Err(ApiError::not_found("status mapping not found")),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,6 +58,22 @@ pub struct MonthlyAdditions {
|
||||
pub books_added: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct MetadataStats {
|
||||
pub total_series: i64,
|
||||
pub series_linked: i64,
|
||||
pub series_unlinked: i64,
|
||||
pub books_with_summary: i64,
|
||||
pub books_with_isbn: i64,
|
||||
pub by_provider: Vec<ProviderCount>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct ProviderCount {
|
||||
pub provider: String,
|
||||
pub count: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct StatsResponse {
|
||||
pub overview: StatsOverview,
|
||||
@@ -67,13 +83,14 @@ pub struct StatsResponse {
|
||||
pub by_library: Vec<LibraryStats>,
|
||||
pub top_series: Vec<TopSeries>,
|
||||
pub additions_over_time: Vec<MonthlyAdditions>,
|
||||
pub metadata: MetadataStats,
|
||||
}
|
||||
|
||||
/// Get collection statistics for the dashboard
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/stats",
|
||||
tag = "books",
|
||||
tag = "stats",
|
||||
responses(
|
||||
(status = 200, body = StatsResponse),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
@@ -265,6 +282,51 @@ pub async fn get_stats(
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Metadata stats
|
||||
let meta_row = sqlx::query(
|
||||
r#"
|
||||
SELECT
|
||||
(SELECT COUNT(DISTINCT NULLIF(series, '')) FROM books) AS total_series,
|
||||
(SELECT COUNT(DISTINCT series_name) FROM external_metadata_links WHERE status = 'approved') AS series_linked,
|
||||
(SELECT COUNT(*) FROM books WHERE summary IS NOT NULL AND summary != '') AS books_with_summary,
|
||||
(SELECT COUNT(*) FROM books WHERE isbn IS NOT NULL AND isbn != '') AS books_with_isbn
|
||||
"#,
|
||||
)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
|
||||
let meta_total_series: i64 = meta_row.get("total_series");
|
||||
let meta_series_linked: i64 = meta_row.get("series_linked");
|
||||
|
||||
let provider_rows = sqlx::query(
|
||||
r#"
|
||||
SELECT provider, COUNT(DISTINCT series_name) AS count
|
||||
FROM external_metadata_links
|
||||
WHERE status = 'approved'
|
||||
GROUP BY provider
|
||||
ORDER BY count DESC
|
||||
"#,
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
let by_provider: Vec<ProviderCount> = provider_rows
|
||||
.iter()
|
||||
.map(|r| ProviderCount {
|
||||
provider: r.get("provider"),
|
||||
count: r.get("count"),
|
||||
})
|
||||
.collect();
|
||||
|
||||
let metadata = MetadataStats {
|
||||
total_series: meta_total_series,
|
||||
series_linked: meta_series_linked,
|
||||
series_unlinked: meta_total_series - meta_series_linked,
|
||||
books_with_summary: meta_row.get("books_with_summary"),
|
||||
books_with_isbn: meta_row.get("books_with_isbn"),
|
||||
by_provider,
|
||||
};
|
||||
|
||||
Ok(Json(StatsResponse {
|
||||
overview,
|
||||
reading_status,
|
||||
@@ -273,5 +335,6 @@ pub async fn get_stats(
|
||||
by_library,
|
||||
top_series,
|
||||
additions_over_time,
|
||||
metadata,
|
||||
}))
|
||||
}
|
||||
|
||||
46
apps/api/src/telegram.rs
Normal file
46
apps/api/src/telegram.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use axum::{extract::State, Json};
|
||||
use serde::Serialize;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::{error::ApiError, state::AppState};
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct TelegramTestResponse {
|
||||
pub success: bool,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
/// Test Telegram connection by sending a test message
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/telegram/test",
|
||||
tag = "notifications",
|
||||
responses(
|
||||
(status = 200, body = TelegramTestResponse),
|
||||
(status = 400, description = "Telegram not configured"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn test_telegram(
|
||||
State(state): State<AppState>,
|
||||
) -> Result<Json<TelegramTestResponse>, ApiError> {
|
||||
let config = notifications::load_telegram_config(&state.pool)
|
||||
.await
|
||||
.ok_or_else(|| {
|
||||
ApiError::bad_request(
|
||||
"Telegram is not configured or disabled. Set bot_token, chat_id, and enable it.",
|
||||
)
|
||||
})?;
|
||||
|
||||
match notifications::send_test_message(&config).await {
|
||||
Ok(()) => Ok(Json(TelegramTestResponse {
|
||||
success: true,
|
||||
message: "Test message sent successfully".to_string(),
|
||||
})),
|
||||
Err(e) => Ok(Json(TelegramTestResponse {
|
||||
success: false,
|
||||
message: format!("Failed to send: {e}"),
|
||||
})),
|
||||
}
|
||||
}
|
||||
@@ -7,10 +7,11 @@ export async function GET(request: NextRequest) {
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode(""));
|
||||
|
||||
|
||||
let lastData: string | null = null;
|
||||
let isActive = true;
|
||||
let consecutiveErrors = 0;
|
||||
let intervalId: ReturnType<typeof setInterval> | null = null;
|
||||
|
||||
const fetchJobs = async () => {
|
||||
if (!isActive) return;
|
||||
@@ -25,51 +26,52 @@ export async function GET(request: NextRequest) {
|
||||
const data = await response.json();
|
||||
const dataStr = JSON.stringify(data);
|
||||
|
||||
// Send if data changed
|
||||
// Send only if data changed
|
||||
if (dataStr !== lastData && isActive) {
|
||||
lastData = dataStr;
|
||||
try {
|
||||
controller.enqueue(
|
||||
new TextEncoder().encode(`data: ${dataStr}\n\n`)
|
||||
);
|
||||
} catch (err) {
|
||||
// Controller closed, ignore
|
||||
} catch {
|
||||
isActive = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Adapt interval: 2s when active jobs exist, 15s when idle
|
||||
const hasActiveJobs = data.some((j: { status: string }) =>
|
||||
j.status === "running" || j.status === "pending" || j.status === "extracting_pages" || j.status === "generating_thumbnails"
|
||||
);
|
||||
const nextInterval = hasActiveJobs ? 2000 : 15000;
|
||||
restartInterval(nextInterval);
|
||||
}
|
||||
} catch (error) {
|
||||
if (isActive) {
|
||||
consecutiveErrors++;
|
||||
// Only log first failure and every 30th to avoid spam
|
||||
if (consecutiveErrors === 1 || consecutiveErrors % 30 === 0) {
|
||||
console.warn(`SSE fetch error (${consecutiveErrors} consecutive):`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Initial fetch
|
||||
|
||||
const restartInterval = (ms: number) => {
|
||||
if (intervalId !== null) clearInterval(intervalId);
|
||||
intervalId = setInterval(fetchJobs, ms);
|
||||
};
|
||||
|
||||
// Initial fetch + start polling
|
||||
await fetchJobs();
|
||||
|
||||
// Poll every 2 seconds
|
||||
const interval = setInterval(async () => {
|
||||
if (!isActive) {
|
||||
clearInterval(interval);
|
||||
return;
|
||||
}
|
||||
await fetchJobs();
|
||||
}, 2000);
|
||||
|
||||
|
||||
// Cleanup
|
||||
request.signal.addEventListener("abort", () => {
|
||||
isActive = false;
|
||||
clearInterval(interval);
|
||||
if (intervalId !== null) clearInterval(intervalId);
|
||||
controller.close();
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
return new Response(stream, {
|
||||
headers: {
|
||||
"Content-Type": "text/event-stream",
|
||||
|
||||
@@ -7,8 +7,8 @@ export async function PATCH(
|
||||
) {
|
||||
const { id } = await params;
|
||||
try {
|
||||
const { monitor_enabled, scan_mode, watcher_enabled } = await request.json();
|
||||
const data = await updateLibraryMonitoring(id, monitor_enabled, scan_mode, watcher_enabled);
|
||||
const { monitor_enabled, scan_mode, watcher_enabled, metadata_refresh_mode } = await request.json();
|
||||
const data = await updateLibraryMonitoring(id, monitor_enabled, scan_mode, watcher_enabled, metadata_refresh_mode);
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Failed to update monitoring settings";
|
||||
|
||||
16
apps/backoffice/app/api/metadata/refresh/report/route.ts
Normal file
16
apps/backoffice/app/api/metadata/refresh/report/route.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { apiFetch } from "@/lib/api";
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const jobId = request.nextUrl.searchParams.get("job_id");
|
||||
if (!jobId) {
|
||||
return NextResponse.json({ error: "job_id required" }, { status: 400 });
|
||||
}
|
||||
const data = await apiFetch(`/metadata/refresh/${jobId}/report`);
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Failed to get report";
|
||||
return NextResponse.json({ error: message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
16
apps/backoffice/app/api/metadata/refresh/route.ts
Normal file
16
apps/backoffice/app/api/metadata/refresh/route.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { apiFetch } from "@/lib/api";
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json();
|
||||
const data = await apiFetch<{ id: string; status: string }>("/metadata/refresh", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Failed to start refresh";
|
||||
return NextResponse.json({ error: message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
16
apps/backoffice/app/api/prowlarr/search/route.ts
Normal file
16
apps/backoffice/app/api/prowlarr/search/route.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { NextResponse, NextRequest } from "next/server";
|
||||
import { apiFetch } from "@/lib/api";
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json();
|
||||
const data = await apiFetch("/prowlarr/search", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Failed to search Prowlarr";
|
||||
return NextResponse.json({ error: message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
12
apps/backoffice/app/api/prowlarr/test/route.ts
Normal file
12
apps/backoffice/app/api/prowlarr/test/route.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { apiFetch } from "@/lib/api";
|
||||
|
||||
export async function GET() {
|
||||
try {
|
||||
const data = await apiFetch("/prowlarr/test");
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Failed to test Prowlarr connection";
|
||||
return NextResponse.json({ error: message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
16
apps/backoffice/app/api/qbittorrent/add/route.ts
Normal file
16
apps/backoffice/app/api/qbittorrent/add/route.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { NextResponse, NextRequest } from "next/server";
|
||||
import { apiFetch } from "@/lib/api";
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json();
|
||||
const data = await apiFetch("/qbittorrent/add", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Failed to add torrent";
|
||||
return NextResponse.json({ error: message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
12
apps/backoffice/app/api/qbittorrent/test/route.ts
Normal file
12
apps/backoffice/app/api/qbittorrent/test/route.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { apiFetch } from "@/lib/api";
|
||||
|
||||
export async function GET() {
|
||||
try {
|
||||
const data = await apiFetch("/qbittorrent/test");
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Failed to test qBittorrent";
|
||||
return NextResponse.json({ error: message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
11
apps/backoffice/app/api/series/provider-statuses/route.ts
Normal file
11
apps/backoffice/app/api/series/provider-statuses/route.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { apiFetch } from "@/lib/api";
|
||||
|
||||
export async function GET() {
|
||||
try {
|
||||
const data = await apiFetch<string[]>("/series/provider-statuses");
|
||||
return NextResponse.json(data);
|
||||
} catch {
|
||||
return NextResponse.json([], { status: 200 });
|
||||
}
|
||||
}
|
||||
11
apps/backoffice/app/api/series/statuses/route.ts
Normal file
11
apps/backoffice/app/api/series/statuses/route.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { apiFetch } from "@/lib/api";
|
||||
|
||||
export async function GET() {
|
||||
try {
|
||||
const data = await apiFetch<string[]>("/series/statuses");
|
||||
return NextResponse.json(data);
|
||||
} catch {
|
||||
return NextResponse.json([], { status: 200 });
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { apiFetch } from "@/lib/api";
|
||||
|
||||
export async function DELETE(
|
||||
_request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const { id } = await params;
|
||||
try {
|
||||
const data = await apiFetch<unknown>(`/settings/status-mappings/${id}`, {
|
||||
method: "DELETE",
|
||||
});
|
||||
return NextResponse.json(data);
|
||||
} catch {
|
||||
return NextResponse.json({ error: "Failed to delete status mapping" }, { status: 500 });
|
||||
}
|
||||
}
|
||||
24
apps/backoffice/app/api/settings/status-mappings/route.ts
Normal file
24
apps/backoffice/app/api/settings/status-mappings/route.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { apiFetch } from "@/lib/api";
|
||||
|
||||
export async function GET() {
|
||||
try {
|
||||
const data = await apiFetch<unknown>("/settings/status-mappings");
|
||||
return NextResponse.json(data);
|
||||
} catch {
|
||||
return NextResponse.json({ error: "Failed to fetch status mappings" }, { status: 500 });
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json();
|
||||
const data = await apiFetch<unknown>("/settings/status-mappings", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
return NextResponse.json(data);
|
||||
} catch {
|
||||
return NextResponse.json({ error: "Failed to save status mapping" }, { status: 500 });
|
||||
}
|
||||
}
|
||||
12
apps/backoffice/app/api/telegram/test/route.ts
Normal file
12
apps/backoffice/app/api/telegram/test/route.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { NextResponse } from "next/server";
|
||||
import { apiFetch } from "@/lib/api";
|
||||
|
||||
export async function GET() {
|
||||
try {
|
||||
const data = await apiFetch("/telegram/test");
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : "Failed to test Telegram connection";
|
||||
return NextResponse.json({ error: message }, { status: 500 });
|
||||
}
|
||||
}
|
||||
135
apps/backoffice/app/authors/[name]/page.tsx
Normal file
135
apps/backoffice/app/authors/[name]/page.tsx
Normal file
@@ -0,0 +1,135 @@
|
||||
import { fetchBooks, fetchAllSeries, BooksPageDto, SeriesPageDto, getBookCoverUrl } from "../../../lib/api";
|
||||
import { getServerTranslations } from "../../../lib/i18n/server";
|
||||
import { BooksGrid } from "../../components/BookCard";
|
||||
import { OffsetPagination } from "../../components/ui";
|
||||
import Image from "next/image";
|
||||
import Link from "next/link";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
export default async function AuthorDetailPage({
|
||||
params,
|
||||
searchParams,
|
||||
}: {
|
||||
params: Promise<{ name: string }>;
|
||||
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||
}) {
|
||||
const { t } = await getServerTranslations();
|
||||
const { name: encodedName } = await params;
|
||||
const authorName = decodeURIComponent(encodedName);
|
||||
const searchParamsAwaited = await searchParams;
|
||||
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||
|
||||
// Fetch books by this author (server-side filtering via API) and series by this author
|
||||
const [booksPage, seriesPage] = await Promise.all([
|
||||
fetchBooks(undefined, undefined, page, limit, undefined, undefined, authorName).catch(
|
||||
() => ({ items: [], total: 0, page: 1, limit }) as BooksPageDto
|
||||
),
|
||||
fetchAllSeries(undefined, undefined, undefined, 1, 200, undefined, undefined, undefined, undefined, authorName).catch(
|
||||
() => ({ items: [], total: 0, page: 1, limit: 200 }) as SeriesPageDto
|
||||
),
|
||||
]);
|
||||
|
||||
const totalPages = Math.ceil(booksPage.total / limit);
|
||||
|
||||
const authorSeries = seriesPage.items;
|
||||
|
||||
return (
|
||||
<>
|
||||
{/* Breadcrumb */}
|
||||
<nav className="flex items-center gap-2 text-sm text-muted-foreground mb-6">
|
||||
<Link href="/authors" className="hover:text-foreground transition-colors">
|
||||
{t("authors.title")}
|
||||
</Link>
|
||||
<span>/</span>
|
||||
<span className="text-foreground font-medium">{authorName}</span>
|
||||
</nav>
|
||||
|
||||
{/* Author Header */}
|
||||
<div className="flex items-center gap-4 mb-8">
|
||||
<div className="w-16 h-16 rounded-full bg-accent/50 flex items-center justify-center flex-shrink-0">
|
||||
<span className="text-2xl font-bold text-accent-foreground">
|
||||
{authorName.charAt(0).toUpperCase()}
|
||||
</span>
|
||||
</div>
|
||||
<div>
|
||||
<h1 className="text-3xl font-bold text-foreground">{authorName}</h1>
|
||||
<div className="flex items-center gap-4 mt-1">
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{t("authors.bookCount", { count: String(booksPage.total), plural: booksPage.total !== 1 ? "s" : "" })}
|
||||
</span>
|
||||
{authorSeries.length > 0 && (
|
||||
<span className="text-sm text-muted-foreground">
|
||||
{t("authors.seriesCount", { count: String(authorSeries.length), plural: authorSeries.length !== 1 ? "s" : "" })}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Series Section */}
|
||||
{authorSeries.length > 0 && (
|
||||
<section className="mb-8">
|
||||
<h2 className="text-xl font-semibold text-foreground mb-4">
|
||||
{t("authors.seriesBy", { name: authorName })}
|
||||
</h2>
|
||||
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 xl:grid-cols-6 gap-4">
|
||||
{authorSeries.map((s) => (
|
||||
<Link
|
||||
key={`${s.library_id}-${s.name}`}
|
||||
href={`/libraries/${s.library_id}/series/${encodeURIComponent(s.name)}`}
|
||||
className="group"
|
||||
>
|
||||
<div className="bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md hover:-translate-y-1 transition-all duration-200">
|
||||
<div className="aspect-[2/3] relative bg-muted/50">
|
||||
<Image
|
||||
src={getBookCoverUrl(s.first_book_id)}
|
||||
alt={s.name}
|
||||
fill
|
||||
className="object-cover"
|
||||
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
|
||||
/>
|
||||
</div>
|
||||
<div className="p-3">
|
||||
<h3 className="font-medium text-foreground truncate text-sm" title={s.name}>
|
||||
{s.name}
|
||||
</h3>
|
||||
<p className="text-xs text-muted-foreground mt-1">
|
||||
{t("authors.bookCount", { count: String(s.book_count), plural: s.book_count !== 1 ? "s" : "" })}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{/* Books Section */}
|
||||
{booksPage.items.length > 0 && (
|
||||
<section>
|
||||
<h2 className="text-xl font-semibold text-foreground mb-4">
|
||||
{t("authors.booksBy", { name: authorName })}
|
||||
</h2>
|
||||
<BooksGrid books={booksPage.items} />
|
||||
<OffsetPagination
|
||||
currentPage={page}
|
||||
totalPages={totalPages}
|
||||
pageSize={limit}
|
||||
totalItems={booksPage.total}
|
||||
/>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{/* Empty State */}
|
||||
{booksPage.items.length === 0 && authorSeries.length === 0 && (
|
||||
<div className="flex flex-col items-center justify-center py-16 text-center">
|
||||
<p className="text-muted-foreground text-lg">
|
||||
{t("authors.noResults")}
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
122
apps/backoffice/app/authors/page.tsx
Normal file
122
apps/backoffice/app/authors/page.tsx
Normal file
@@ -0,0 +1,122 @@
|
||||
import { fetchAuthors, AuthorsPageDto } from "../../lib/api";
|
||||
import { getServerTranslations } from "../../lib/i18n/server";
|
||||
import { LiveSearchForm } from "../components/LiveSearchForm";
|
||||
import { Card, CardContent, OffsetPagination } from "../components/ui";
|
||||
import Link from "next/link";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
export default async function AuthorsPage({
|
||||
searchParams,
|
||||
}: {
|
||||
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||
}) {
|
||||
const { t } = await getServerTranslations();
|
||||
const searchParamsAwaited = await searchParams;
|
||||
const searchQuery = typeof searchParamsAwaited.q === "string" ? searchParamsAwaited.q : "";
|
||||
const sort = typeof searchParamsAwaited.sort === "string" ? searchParamsAwaited.sort : undefined;
|
||||
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||
|
||||
const authorsPage = await fetchAuthors(
|
||||
searchQuery || undefined,
|
||||
page,
|
||||
limit,
|
||||
sort,
|
||||
).catch(() => ({ items: [], total: 0, page: 1, limit }) as AuthorsPageDto);
|
||||
|
||||
const totalPages = Math.ceil(authorsPage.total / limit);
|
||||
const hasFilters = searchQuery || sort;
|
||||
|
||||
const sortOptions = [
|
||||
{ value: "", label: t("authors.sortName") },
|
||||
{ value: "books", label: t("authors.sortBooks") },
|
||||
];
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="mb-6">
|
||||
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||
<svg className="w-8 h-8 text-violet-500" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z" />
|
||||
</svg>
|
||||
{t("authors.title")}
|
||||
</h1>
|
||||
</div>
|
||||
|
||||
<Card className="mb-6">
|
||||
<CardContent className="pt-6">
|
||||
<LiveSearchForm
|
||||
basePath="/authors"
|
||||
fields={[
|
||||
{ name: "q", type: "text", label: t("common.search"), placeholder: t("authors.searchPlaceholder") },
|
||||
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions },
|
||||
]}
|
||||
/>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
{/* Results count */}
|
||||
<p className="text-sm text-muted-foreground mb-4">
|
||||
{authorsPage.total} {t("authors.title").toLowerCase()}
|
||||
{searchQuery && <> {t("authors.matchingQuery")} "{searchQuery}"</>}
|
||||
</p>
|
||||
|
||||
{/* Authors List */}
|
||||
{authorsPage.items.length > 0 ? (
|
||||
<>
|
||||
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4 gap-4">
|
||||
{authorsPage.items.map((author) => (
|
||||
<Link
|
||||
key={author.name}
|
||||
href={`/authors/${encodeURIComponent(author.name)}`}
|
||||
className="group"
|
||||
>
|
||||
<div className="bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md hover:-translate-y-1 transition-all duration-200 p-4">
|
||||
<div className="flex items-center gap-3">
|
||||
<div className="w-10 h-10 rounded-full bg-accent/50 flex items-center justify-center flex-shrink-0">
|
||||
<span className="text-lg font-semibold text-violet-500">
|
||||
{author.name.charAt(0).toUpperCase()}
|
||||
</span>
|
||||
</div>
|
||||
<div className="min-w-0">
|
||||
<h3 className="font-medium text-foreground truncate text-sm group-hover:text-violet-500 transition-colors" title={author.name}>
|
||||
{author.name}
|
||||
</h3>
|
||||
<div className="flex items-center gap-3 mt-0.5">
|
||||
<span className="text-xs text-muted-foreground">
|
||||
{t("authors.bookCount", { count: String(author.book_count), plural: author.book_count !== 1 ? "s" : "" })}
|
||||
</span>
|
||||
<span className="text-xs text-muted-foreground">
|
||||
{t("authors.seriesCount", { count: String(author.series_count), plural: author.series_count !== 1 ? "s" : "" })}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<OffsetPagination
|
||||
currentPage={page}
|
||||
totalPages={totalPages}
|
||||
pageSize={limit}
|
||||
totalItems={authorsPage.total}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
<div className="flex flex-col items-center justify-center py-16 text-center">
|
||||
<div className="w-16 h-16 mb-4 text-muted-foreground/30">
|
||||
<svg fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={1.5} d="M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z" />
|
||||
</svg>
|
||||
</div>
|
||||
<p className="text-muted-foreground text-lg">
|
||||
{hasFilters ? t("authors.noResults") : t("authors.noAuthors")}
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -2,11 +2,15 @@ import { fetchLibraries, getBookCoverUrl, BookDto, apiFetch, ReadingStatus } fro
|
||||
import { BookPreview } from "../../components/BookPreview";
|
||||
import { ConvertButton } from "../../components/ConvertButton";
|
||||
import { MarkBookReadButton } from "../../components/MarkBookReadButton";
|
||||
import { EditBookForm } from "../../components/EditBookForm";
|
||||
import nextDynamic from "next/dynamic";
|
||||
import { SafeHtml } from "../../components/SafeHtml";
|
||||
import { getServerTranslations } from "../../../lib/i18n/server";
|
||||
import Image from "next/image";
|
||||
import Link from "next/link";
|
||||
|
||||
const EditBookForm = nextDynamic(
|
||||
() => import("../../components/EditBookForm").then(m => m.EditBookForm)
|
||||
);
|
||||
import { notFound } from "next/navigation";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
@@ -95,7 +99,7 @@ export default async function BookDetailPage({
|
||||
alt={t("bookDetail.coverOf", { title: book.title })}
|
||||
fill
|
||||
className="object-cover"
|
||||
unoptimized
|
||||
sizes="192px"
|
||||
loading="lazy"
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -18,6 +18,8 @@ export default async function BooksPage({
|
||||
const libraryId = typeof searchParamsAwaited.library === "string" ? searchParamsAwaited.library : undefined;
|
||||
const searchQuery = typeof searchParamsAwaited.q === "string" ? searchParamsAwaited.q : "";
|
||||
const readingStatus = typeof searchParamsAwaited.status === "string" ? searchParamsAwaited.status : undefined;
|
||||
const format = typeof searchParamsAwaited.format === "string" ? searchParamsAwaited.format : undefined;
|
||||
const metadataProvider = typeof searchParamsAwaited.metadata === "string" ? searchParamsAwaited.metadata : undefined;
|
||||
const sort = typeof searchParamsAwaited.sort === "string" ? searchParamsAwaited.sort : undefined;
|
||||
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||
@@ -62,7 +64,7 @@ export default async function BooksPage({
|
||||
totalHits = searchResponse.estimated_total_hits;
|
||||
}
|
||||
} else {
|
||||
const booksPage = await fetchBooks(libraryId, undefined, page, limit, readingStatus, sort).catch(() => ({
|
||||
const booksPage = await fetchBooks(libraryId, undefined, page, limit, readingStatus, sort, undefined, format, metadataProvider).catch(() => ({
|
||||
items: [] as BookDto[],
|
||||
total: 0,
|
||||
page: 1,
|
||||
@@ -91,12 +93,26 @@ export default async function BooksPage({
|
||||
{ value: "read", label: t("status.read") },
|
||||
];
|
||||
|
||||
const formatOptions = [
|
||||
{ value: "", label: t("books.allFormats") },
|
||||
{ value: "cbz", label: "CBZ" },
|
||||
{ value: "cbr", label: "CBR" },
|
||||
{ value: "pdf", label: "PDF" },
|
||||
{ value: "epub", label: "EPUB" },
|
||||
];
|
||||
|
||||
const metadataOptions = [
|
||||
{ value: "", label: t("series.metadataAll") },
|
||||
{ value: "linked", label: t("series.metadataLinked") },
|
||||
{ value: "unlinked", label: t("series.metadataUnlinked") },
|
||||
];
|
||||
|
||||
const sortOptions = [
|
||||
{ value: "", label: t("books.sortTitle") },
|
||||
{ value: "latest", label: t("books.sortLatest") },
|
||||
];
|
||||
|
||||
const hasFilters = searchQuery || libraryId || readingStatus || sort;
|
||||
const hasFilters = searchQuery || libraryId || readingStatus || format || metadataProvider || sort;
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -114,10 +130,12 @@ export default async function BooksPage({
|
||||
<LiveSearchForm
|
||||
basePath="/books"
|
||||
fields={[
|
||||
{ name: "q", type: "text", label: t("common.search"), placeholder: t("books.searchPlaceholder"), className: "flex-1 w-full" },
|
||||
{ name: "library", type: "select", label: t("books.library"), options: libraryOptions, className: "w-full sm:w-48" },
|
||||
{ name: "status", type: "select", label: t("books.status"), options: statusOptions, className: "w-full sm:w-40" },
|
||||
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions, className: "w-full sm:w-40" },
|
||||
{ name: "q", type: "text", label: t("common.search"), placeholder: t("books.searchPlaceholder") },
|
||||
{ name: "library", type: "select", label: t("books.library"), options: libraryOptions },
|
||||
{ name: "status", type: "select", label: t("books.status"), options: statusOptions },
|
||||
{ name: "format", type: "select", label: t("books.format"), options: formatOptions },
|
||||
{ name: "metadata", type: "select", label: t("series.metadata"), options: metadataOptions },
|
||||
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions },
|
||||
]}
|
||||
/>
|
||||
</CardContent>
|
||||
@@ -152,7 +170,7 @@ export default async function BooksPage({
|
||||
alt={t("books.coverOf", { name: s.name })}
|
||||
fill
|
||||
className="object-cover"
|
||||
unoptimized
|
||||
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
|
||||
/>
|
||||
</div>
|
||||
<div className="p-2">
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { memo, useState } from "react";
|
||||
import Image from "next/image";
|
||||
import Link from "next/link";
|
||||
import { BookDto, ReadingStatus } from "../../lib/api";
|
||||
@@ -17,7 +17,7 @@ interface BookCardProps {
|
||||
readingStatus?: ReadingStatus;
|
||||
}
|
||||
|
||||
function BookImage({ src, alt }: { src: string; alt: string }) {
|
||||
const BookImage = memo(function BookImage({ src, alt }: { src: string; alt: string }) {
|
||||
const [isLoaded, setIsLoaded] = useState(false);
|
||||
const [hasError, setHasError] = useState(false);
|
||||
|
||||
@@ -51,13 +51,12 @@ function BookImage({ src, alt }: { src: string; alt: string }) {
|
||||
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
|
||||
onLoad={() => setIsLoaded(true)}
|
||||
onError={() => setHasError(true)}
|
||||
unoptimized
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
export function BookCard({ book, readingStatus }: BookCardProps) {
|
||||
export const BookCard = memo(function BookCard({ book, readingStatus }: BookCardProps) {
|
||||
const { t } = useTranslation();
|
||||
const coverUrl = book.coverUrl || `/api/books/${book.id}/thumbnail`;
|
||||
const status = readingStatus ?? book.reading_status;
|
||||
@@ -115,6 +114,7 @@ export function BookCard({ book, readingStatus }: BookCardProps) {
|
||||
${(book.format ?? book.kind) === 'cbz' ? 'bg-success/10 text-success' : ''}
|
||||
${(book.format ?? book.kind) === 'cbr' ? 'bg-warning/10 text-warning' : ''}
|
||||
${(book.format ?? book.kind) === 'pdf' ? 'bg-destructive/10 text-destructive' : ''}
|
||||
${(book.format ?? book.kind) === 'epub' ? 'bg-info/10 text-info' : ''}
|
||||
`}>
|
||||
{book.format ?? book.kind}
|
||||
</span>
|
||||
@@ -128,7 +128,7 @@ export function BookCard({ book, readingStatus }: BookCardProps) {
|
||||
</div>
|
||||
</Link>
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
interface BooksGridProps {
|
||||
books: (BookDto & { coverUrl?: string })[];
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { createPortal } from "react-dom";
|
||||
import { FolderBrowser } from "./FolderBrowser";
|
||||
import { FolderItem } from "../../lib/api";
|
||||
import { Button } from "./ui";
|
||||
@@ -64,14 +65,14 @@ export function FolderPicker({ initialFolders, selectedPath, onSelect }: FolderP
|
||||
</div>
|
||||
|
||||
{/* Popup Modal */}
|
||||
{isOpen && (
|
||||
{isOpen && createPortal(
|
||||
<>
|
||||
{/* Backdrop */}
|
||||
<div
|
||||
<div
|
||||
className="fixed inset-0 bg-black/30 backdrop-blur-sm z-50"
|
||||
onClick={() => setIsOpen(false)}
|
||||
/>
|
||||
|
||||
|
||||
{/* Modal */}
|
||||
<div className="fixed inset-0 flex items-center justify-center z-50 p-4">
|
||||
<div className="bg-card border border-border/50 rounded-xl shadow-2xl w-full max-w-lg overflow-hidden animate-in fade-in zoom-in-95 duration-200">
|
||||
@@ -121,7 +122,8 @@ export function FolderPicker({ initialFolders, selectedPath, onSelect }: FolderP
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
</>,
|
||||
document.body
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
44
apps/backoffice/app/components/JobDetailLive.tsx
Normal file
44
apps/backoffice/app/components/JobDetailLive.tsx
Normal file
@@ -0,0 +1,44 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useRef } from "react";
|
||||
import { useRouter } from "next/navigation";
|
||||
|
||||
interface JobDetailLiveProps {
|
||||
jobId: string;
|
||||
isTerminal: boolean;
|
||||
}
|
||||
|
||||
export function JobDetailLive({ jobId, isTerminal }: JobDetailLiveProps) {
|
||||
const router = useRouter();
|
||||
const isTerminalRef = useRef(isTerminal);
|
||||
isTerminalRef.current = isTerminal;
|
||||
|
||||
useEffect(() => {
|
||||
if (isTerminalRef.current) return;
|
||||
|
||||
const eventSource = new EventSource(`/api/jobs/${jobId}/stream`);
|
||||
|
||||
eventSource.onmessage = (event) => {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
router.refresh();
|
||||
|
||||
if (data.status === "success" || data.status === "failed" || data.status === "cancelled") {
|
||||
eventSource.close();
|
||||
}
|
||||
} catch {
|
||||
// ignore parse errors
|
||||
}
|
||||
};
|
||||
|
||||
eventSource.onerror = () => {
|
||||
eventSource.close();
|
||||
};
|
||||
|
||||
return () => {
|
||||
eventSource.close();
|
||||
};
|
||||
}, [jobId, router]);
|
||||
|
||||
return null;
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import { useState } from "react";
|
||||
import Link from "next/link";
|
||||
import { useTranslation } from "../../lib/i18n/context";
|
||||
import { JobProgress } from "./JobProgress";
|
||||
import { StatusBadge, JobTypeBadge, Button, MiniProgressBar } from "./ui";
|
||||
import { StatusBadge, JobTypeBadge, Button, MiniProgressBar, Icon } from "./ui";
|
||||
|
||||
interface JobRowProps {
|
||||
job: {
|
||||
@@ -59,28 +59,11 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
|
||||
const isThumbnailJob = job.type === "thumbnail_rebuild" || job.type === "thumbnail_regenerate";
|
||||
const hasThumbnailPhase = isPhase2 || isThumbnailJob;
|
||||
|
||||
// Files column: index-phase stats only (Phase 1 discovery)
|
||||
const filesDisplay =
|
||||
job.status === "running" && !isPhase2
|
||||
? job.total_files != null
|
||||
? `${job.processed_files ?? 0}/${job.total_files}`
|
||||
: scanned > 0
|
||||
? t("jobRow.scanned", { count: scanned })
|
||||
: "-"
|
||||
: job.status === "success" && (indexed > 0 || removed > 0 || errors > 0)
|
||||
? null // rendered below as ✓ / − / ⚠
|
||||
: scanned > 0
|
||||
? t("jobRow.scanned", { count: scanned })
|
||||
: "—";
|
||||
const isMetadataBatch = job.type === "metadata_batch";
|
||||
const isMetadataRefresh = job.type === "metadata_refresh";
|
||||
|
||||
// Thumbnails column (Phase 2: extracting_pages + generating_thumbnails)
|
||||
// Thumbnails progress (Phase 2: extracting_pages + generating_thumbnails)
|
||||
const thumbInProgress = hasThumbnailPhase && (job.status === "running" || isPhase2);
|
||||
const thumbDisplay =
|
||||
thumbInProgress && job.total_files != null
|
||||
? `${job.processed_files ?? 0}/${job.total_files}`
|
||||
: job.status === "success" && job.total_files != null && hasThumbnailPhase
|
||||
? `✓ ${job.total_files}`
|
||||
: "—";
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -122,25 +105,67 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
<div className="flex flex-col gap-1">
|
||||
{filesDisplay !== null ? (
|
||||
<span className="text-sm text-foreground">{filesDisplay}</span>
|
||||
) : (
|
||||
<div className="flex items-center gap-2 text-xs">
|
||||
<span className="text-success">✓ {indexed}</span>
|
||||
{removed > 0 && <span className="text-warning">− {removed}</span>}
|
||||
{errors > 0 && <span className="text-error">⚠ {errors}</span>}
|
||||
{/* Running progress */}
|
||||
{isActive && job.total_files != null && (
|
||||
<div className="flex flex-col gap-1">
|
||||
<span className="text-sm text-foreground">{job.processed_files ?? 0}/{job.total_files}</span>
|
||||
<MiniProgressBar value={job.processed_files ?? 0} max={job.total_files} className="w-24" />
|
||||
</div>
|
||||
)}
|
||||
{job.status === "running" && !isPhase2 && job.total_files != null && (
|
||||
<MiniProgressBar value={job.processed_files ?? 0} max={job.total_files} className="w-24" />
|
||||
)}
|
||||
</div>
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
<div className="flex flex-col gap-1">
|
||||
<span className="text-sm text-foreground">{thumbDisplay}</span>
|
||||
{thumbInProgress && job.total_files != null && (
|
||||
<MiniProgressBar value={job.processed_files ?? 0} max={job.total_files} className="w-24" />
|
||||
{/* Completed stats with icons */}
|
||||
{!isActive && (
|
||||
<div className="flex items-center gap-3 text-xs">
|
||||
{/* Files: indexed count */}
|
||||
{indexed > 0 && (
|
||||
<span className="inline-flex items-center gap-1 text-success" title={t("jobRow.filesIndexed", { count: indexed })}>
|
||||
<Icon name="document" size="sm" />
|
||||
{indexed}
|
||||
</span>
|
||||
)}
|
||||
{/* Removed files */}
|
||||
{removed > 0 && (
|
||||
<span className="inline-flex items-center gap-1 text-warning" title={t("jobRow.filesRemoved", { count: removed })}>
|
||||
<Icon name="trash" size="sm" />
|
||||
{removed}
|
||||
</span>
|
||||
)}
|
||||
{/* Thumbnails */}
|
||||
{hasThumbnailPhase && job.total_files != null && job.total_files > 0 && (
|
||||
<span className="inline-flex items-center gap-1 text-primary" title={t("jobRow.thumbnailsGenerated", { count: job.total_files })}>
|
||||
<Icon name="image" size="sm" />
|
||||
{job.total_files}
|
||||
</span>
|
||||
)}
|
||||
{/* Metadata batch: series processed */}
|
||||
{isMetadataBatch && job.total_files != null && job.total_files > 0 && (
|
||||
<span className="inline-flex items-center gap-1 text-info" title={t("jobRow.metadataProcessed", { count: job.total_files })}>
|
||||
<Icon name="tag" size="sm" />
|
||||
{job.total_files}
|
||||
</span>
|
||||
)}
|
||||
{/* Metadata refresh: links refreshed */}
|
||||
{isMetadataRefresh && job.total_files != null && job.total_files > 0 && (
|
||||
<span className="inline-flex items-center gap-1 text-info" title={t("jobRow.metadataRefreshed", { count: job.total_files })}>
|
||||
<Icon name="tag" size="sm" />
|
||||
{job.total_files}
|
||||
</span>
|
||||
)}
|
||||
{/* Errors */}
|
||||
{errors > 0 && (
|
||||
<span className="inline-flex items-center gap-1 text-error" title={t("jobRow.errors", { count: errors })}>
|
||||
<Icon name="warning" size="sm" />
|
||||
{errors}
|
||||
</span>
|
||||
)}
|
||||
{/* Scanned only (no other stats) */}
|
||||
{indexed === 0 && removed === 0 && errors === 0 && !hasThumbnailPhase && !isMetadataBatch && !isMetadataRefresh && scanned > 0 && (
|
||||
<span className="text-sm text-muted-foreground">{t("jobRow.scanned", { count: scanned })}</span>
|
||||
)}
|
||||
{/* Nothing to show */}
|
||||
{indexed === 0 && removed === 0 && errors === 0 && scanned === 0 && !hasThumbnailPhase && !isMetadataBatch && !isMetadataRefresh && (
|
||||
<span className="text-sm text-muted-foreground">—</span>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</td>
|
||||
@@ -172,7 +197,7 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
|
||||
</tr>
|
||||
{showProgress && isActive && (
|
||||
<tr>
|
||||
<td colSpan={9} className="px-4 py-3 bg-muted/50">
|
||||
<td colSpan={8} className="px-4 py-3 bg-muted/50">
|
||||
<JobProgress
|
||||
jobId={job.id}
|
||||
onComplete={handleComplete}
|
||||
|
||||
@@ -54,21 +54,62 @@ export function JobsIndicator() {
|
||||
const [popinStyle, setPopinStyle] = useState<React.CSSProperties>({});
|
||||
|
||||
useEffect(() => {
|
||||
const fetchActiveJobs = async () => {
|
||||
try {
|
||||
const response = await fetch("/api/jobs/active");
|
||||
if (response.ok) {
|
||||
const jobs = await response.json();
|
||||
setActiveJobs(jobs);
|
||||
let eventSource: EventSource | null = null;
|
||||
let reconnectTimeout: ReturnType<typeof setTimeout> | null = null;
|
||||
|
||||
const connect = () => {
|
||||
if (eventSource) {
|
||||
eventSource.close();
|
||||
}
|
||||
eventSource = new EventSource("/api/jobs/stream");
|
||||
|
||||
eventSource.onmessage = (event) => {
|
||||
try {
|
||||
const allJobs: Job[] = JSON.parse(event.data);
|
||||
const active = allJobs.filter(j =>
|
||||
j.status === "running" || j.status === "pending" ||
|
||||
j.status === "extracting_pages" || j.status === "generating_thumbnails"
|
||||
);
|
||||
setActiveJobs(active);
|
||||
} catch {
|
||||
// ignore malformed data
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to fetch jobs:", error);
|
||||
};
|
||||
|
||||
eventSource.onerror = () => {
|
||||
eventSource?.close();
|
||||
eventSource = null;
|
||||
// Reconnect after 5s on error
|
||||
reconnectTimeout = setTimeout(connect, 5000);
|
||||
};
|
||||
};
|
||||
|
||||
const disconnect = () => {
|
||||
if (reconnectTimeout) {
|
||||
clearTimeout(reconnectTimeout);
|
||||
reconnectTimeout = null;
|
||||
}
|
||||
if (eventSource) {
|
||||
eventSource.close();
|
||||
eventSource = null;
|
||||
}
|
||||
};
|
||||
|
||||
fetchActiveJobs();
|
||||
const interval = setInterval(fetchActiveJobs, 2000);
|
||||
return () => clearInterval(interval);
|
||||
const handleVisibilityChange = () => {
|
||||
if (document.hidden) {
|
||||
disconnect();
|
||||
} else {
|
||||
connect();
|
||||
}
|
||||
};
|
||||
|
||||
connect();
|
||||
document.addEventListener("visibilitychange", handleVisibilityChange);
|
||||
|
||||
return () => {
|
||||
disconnect();
|
||||
document.removeEventListener("visibilitychange", handleVisibilityChange);
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Position the popin relative to the button
|
||||
|
||||
@@ -57,13 +57,13 @@ function getDateParts(dateStr: string): { mins: number; hours: number; useDate:
|
||||
}
|
||||
|
||||
export function JobsList({ initialJobs, libraries, highlightJobId }: JobsListProps) {
|
||||
const { t } = useTranslation();
|
||||
const { t, locale } = useTranslation();
|
||||
const [jobs, setJobs] = useState(initialJobs);
|
||||
|
||||
const formatDate = (dateStr: string): string => {
|
||||
const parts = getDateParts(dateStr);
|
||||
if (parts.useDate) {
|
||||
return parts.date.toLocaleDateString();
|
||||
return parts.date.toLocaleDateString(locale);
|
||||
}
|
||||
if (parts.mins < 1) return t("time.justNow");
|
||||
if (parts.hours > 0) return t("time.hoursAgo", { count: parts.hours });
|
||||
@@ -117,8 +117,7 @@ export function JobsList({ initialJobs, libraries, highlightJobId }: JobsListPro
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.library")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.type")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.status")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.files")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.thumbnails")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.stats")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.duration")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.created")}</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("jobsList.actions")}</th>
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useRef, useEffect, useTransition } from "react";
|
||||
import { useState, useTransition } from "react";
|
||||
import { createPortal } from "react-dom";
|
||||
import { Button } from "../components/ui";
|
||||
import { ProviderIcon } from "../components/ProviderIcon";
|
||||
import { useTranslation } from "../../lib/i18n/context";
|
||||
@@ -12,6 +13,7 @@ interface LibraryActionsProps {
|
||||
watcherEnabled: boolean;
|
||||
metadataProvider: string | null;
|
||||
fallbackMetadataProvider: string | null;
|
||||
metadataRefreshMode: string;
|
||||
onUpdate?: () => void;
|
||||
}
|
||||
|
||||
@@ -22,23 +24,12 @@ export function LibraryActions({
|
||||
watcherEnabled,
|
||||
metadataProvider,
|
||||
fallbackMetadataProvider,
|
||||
onUpdate
|
||||
metadataRefreshMode,
|
||||
}: LibraryActionsProps) {
|
||||
const { t } = useTranslation();
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [isPending, startTransition] = useTransition();
|
||||
const [saveError, setSaveError] = useState<string | null>(null);
|
||||
const dropdownRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
if (dropdownRef.current && !dropdownRef.current.contains(event.target as Node)) {
|
||||
setIsOpen(false);
|
||||
}
|
||||
};
|
||||
document.addEventListener("mousedown", handleClickOutside);
|
||||
return () => document.removeEventListener("mousedown", handleClickOutside);
|
||||
}, []);
|
||||
|
||||
const handleSubmit = (formData: FormData) => {
|
||||
setSaveError(null);
|
||||
@@ -48,6 +39,7 @@ export function LibraryActions({
|
||||
const scanMode = formData.get("scan_mode") as string;
|
||||
const newMetadataProvider = (formData.get("metadata_provider") as string) || null;
|
||||
const newFallbackProvider = (formData.get("fallback_metadata_provider") as string) || null;
|
||||
const newMetadataRefreshMode = formData.get("metadata_refresh_mode") as string;
|
||||
|
||||
try {
|
||||
const [response] = await Promise.all([
|
||||
@@ -58,6 +50,7 @@ export function LibraryActions({
|
||||
monitor_enabled: monitorEnabled,
|
||||
scan_mode: scanMode,
|
||||
watcher_enabled: watcherEnabled,
|
||||
metadata_refresh_mode: newMetadataRefreshMode,
|
||||
}),
|
||||
}),
|
||||
fetch(`/api/libraries/${libraryId}/metadata-provider`, {
|
||||
@@ -85,11 +78,11 @@ export function LibraryActions({
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="relative" ref={dropdownRef}>
|
||||
<Button
|
||||
variant="ghost"
|
||||
<>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => setIsOpen(!isOpen)}
|
||||
onClick={() => setIsOpen(true)}
|
||||
className={isOpen ? "bg-accent" : ""}
|
||||
>
|
||||
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
@@ -98,107 +91,201 @@ export function LibraryActions({
|
||||
</svg>
|
||||
</Button>
|
||||
|
||||
{isOpen && (
|
||||
<div className="absolute right-0 top-full mt-2 w-72 bg-card rounded-xl shadow-md border border-border/60 p-4 z-50">
|
||||
<form action={handleSubmit}>
|
||||
<div className="space-y-4">
|
||||
<div className="flex items-center justify-between">
|
||||
<label className="text-sm font-medium text-foreground flex items-center gap-2">
|
||||
<input
|
||||
type="checkbox"
|
||||
name="monitor_enabled"
|
||||
value="true"
|
||||
defaultChecked={monitorEnabled}
|
||||
className="w-4 h-4 rounded border-border text-primary focus:ring-ring"
|
||||
/>
|
||||
{t("libraryActions.autoScan")}
|
||||
</label>
|
||||
</div>
|
||||
{isOpen && createPortal(
|
||||
<>
|
||||
{/* Backdrop */}
|
||||
<div
|
||||
className="fixed inset-0 bg-black/30 backdrop-blur-sm z-50"
|
||||
onClick={() => setIsOpen(false)}
|
||||
/>
|
||||
|
||||
<div className="flex items-center justify-between">
|
||||
<label className="text-sm font-medium text-foreground flex items-center gap-2">
|
||||
<input
|
||||
type="checkbox"
|
||||
name="watcher_enabled"
|
||||
value="true"
|
||||
defaultChecked={watcherEnabled}
|
||||
className="w-4 h-4 rounded border-border text-primary focus:ring-ring"
|
||||
/>
|
||||
{t("libraryActions.fileWatch")}
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between">
|
||||
<label className="text-sm font-medium text-foreground">{t("libraryActions.schedule")}</label>
|
||||
<select
|
||||
name="scan_mode"
|
||||
defaultValue={scanMode}
|
||||
className="text-sm border border-border rounded-lg px-2 py-1 bg-background"
|
||||
{/* Modal */}
|
||||
<div className="fixed inset-0 flex items-center justify-center z-50 p-4">
|
||||
<div className="bg-card border border-border/50 rounded-xl shadow-2xl w-full max-w-lg overflow-hidden animate-in fade-in zoom-in-95 duration-200">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between px-5 py-4 border-b border-border/50 bg-muted/30">
|
||||
<div className="flex items-center gap-2.5">
|
||||
<svg className="w-5 h-5 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M10.325 4.317c.426-1.756 2.924-1.756 3.35 0a1.724 1.724 0 002.573 1.066c1.543-.94 3.31.826 2.37 2.37a1.724 1.724 0 001.065 2.572c1.756.426 1.756 2.924 0 3.35a1.724 1.724 0 00-1.066 2.573c.94 1.543-.826 3.31-2.37 2.37a1.724 1.724 0 00-2.572 1.065c-.426 1.756-2.924 1.756-3.35 0a1.724 1.724 0 00-2.573-1.066c-1.543.94-3.31-.826-2.37-2.37a1.724 1.724 0 00-1.065-2.572c-1.756-.426-1.756-2.924 0-3.35a1.724 1.724 0 001.066-2.573c-.94-1.543.826-3.31 2.37-2.37.996.608 2.296.07 2.572-1.065z" />
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M15 12a3 3 0 11-6 0 3 3 0 016 0z" />
|
||||
</svg>
|
||||
<span className="font-semibold text-lg">{t("libraryActions.settingsTitle")}</span>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setIsOpen(false)}
|
||||
className="text-muted-foreground hover:text-foreground transition-colors p-1.5 hover:bg-accent rounded-lg"
|
||||
>
|
||||
<option value="manual">{t("monitoring.manual")}</option>
|
||||
<option value="hourly">{t("monitoring.hourly")}</option>
|
||||
<option value="daily">{t("monitoring.daily")}</option>
|
||||
<option value="weekly">{t("monitoring.weekly")}</option>
|
||||
</select>
|
||||
<svg className="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M6 18L18 6M6 6l12 12" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between">
|
||||
<label className="text-sm font-medium text-foreground flex items-center gap-1.5">
|
||||
{metadataProvider && <ProviderIcon provider={metadataProvider} size={16} />}
|
||||
{t("libraryActions.provider")}
|
||||
</label>
|
||||
<select
|
||||
name="metadata_provider"
|
||||
defaultValue={metadataProvider || ""}
|
||||
className="text-sm border border-border rounded-lg px-2 py-1 bg-background"
|
||||
>
|
||||
<option value="">{t("libraryActions.default")}</option>
|
||||
<option value="none">{t("libraryActions.none")}</option>
|
||||
<option value="google_books">Google Books</option>
|
||||
<option value="comicvine">ComicVine</option>
|
||||
<option value="open_library">Open Library</option>
|
||||
<option value="anilist">AniList</option>
|
||||
<option value="bedetheque">Bédéthèque</option>
|
||||
</select>
|
||||
</div>
|
||||
{/* Form */}
|
||||
<form action={handleSubmit}>
|
||||
<div className="p-6 space-y-8 max-h-[70vh] overflow-y-auto">
|
||||
|
||||
<div className="flex items-center justify-between">
|
||||
<label className="text-sm font-medium text-foreground flex items-center gap-1.5">
|
||||
{fallbackMetadataProvider && fallbackMetadataProvider !== "none" && <ProviderIcon provider={fallbackMetadataProvider} size={16} />}
|
||||
{t("libraryActions.fallback")}
|
||||
</label>
|
||||
<select
|
||||
name="fallback_metadata_provider"
|
||||
defaultValue={fallbackMetadataProvider || ""}
|
||||
className="text-sm border border-border rounded-lg px-2 py-1 bg-background"
|
||||
>
|
||||
<option value="">{t("libraryActions.none")}</option>
|
||||
<option value="google_books">Google Books</option>
|
||||
<option value="comicvine">ComicVine</option>
|
||||
<option value="open_library">Open Library</option>
|
||||
<option value="anilist">AniList</option>
|
||||
<option value="bedetheque">Bédéthèque</option>
|
||||
</select>
|
||||
</div>
|
||||
{/* Section: Indexation */}
|
||||
<div className="space-y-5">
|
||||
<h3 className="flex items-center gap-2 text-sm font-semibold text-foreground uppercase tracking-wide">
|
||||
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" />
|
||||
</svg>
|
||||
{t("libraryActions.sectionIndexation")}
|
||||
</h3>
|
||||
|
||||
{saveError && (
|
||||
<p className="text-xs text-destructive bg-destructive/10 px-2 py-1.5 rounded-lg break-all">
|
||||
{saveError}
|
||||
</p>
|
||||
)}
|
||||
{/* Auto scan */}
|
||||
<div className="flex items-start justify-between gap-4">
|
||||
<div className="flex-1">
|
||||
<label className="text-sm font-medium text-foreground flex items-center gap-2 cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
name="monitor_enabled"
|
||||
value="true"
|
||||
defaultChecked={monitorEnabled}
|
||||
className="w-4 h-4 rounded border-border text-primary focus:ring-ring"
|
||||
/>
|
||||
{t("libraryActions.autoScan")}
|
||||
</label>
|
||||
<p className="text-xs text-muted-foreground mt-1.5 ml-6">{t("libraryActions.autoScanDesc")}</p>
|
||||
</div>
|
||||
<select
|
||||
name="scan_mode"
|
||||
defaultValue={scanMode}
|
||||
className="text-sm border border-border rounded-lg px-3 py-1.5 bg-background min-w-[130px] shrink-0"
|
||||
>
|
||||
<option value="manual">{t("monitoring.manual")}</option>
|
||||
<option value="hourly">{t("monitoring.hourly")}</option>
|
||||
<option value="daily">{t("monitoring.daily")}</option>
|
||||
<option value="weekly">{t("monitoring.weekly")}</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
type="submit"
|
||||
size="sm"
|
||||
className="w-full"
|
||||
disabled={isPending}
|
||||
>
|
||||
{isPending ? t("libraryActions.saving") : t("common.save")}
|
||||
</Button>
|
||||
{/* File watcher */}
|
||||
<div>
|
||||
<label className="text-sm font-medium text-foreground flex items-center gap-2 cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
name="watcher_enabled"
|
||||
value="true"
|
||||
defaultChecked={watcherEnabled}
|
||||
className="w-4 h-4 rounded border-border text-primary focus:ring-ring"
|
||||
/>
|
||||
{t("libraryActions.fileWatch")}
|
||||
</label>
|
||||
<p className="text-xs text-muted-foreground mt-1.5 ml-6">{t("libraryActions.fileWatchDesc")}</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<hr className="border-border/40" />
|
||||
|
||||
{/* Section: Metadata */}
|
||||
<div className="space-y-5">
|
||||
<h3 className="flex items-center gap-2 text-sm font-semibold text-foreground uppercase tracking-wide">
|
||||
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M7 7h.01M7 3h5c.512 0 1.024.195 1.414.586l7 7a2 2 0 010 2.828l-7 7a2 2 0 01-2.828 0l-7-7A1.994 1.994 0 013 12V7a4 4 0 014-4z" />
|
||||
</svg>
|
||||
{t("libraryActions.sectionMetadata")}
|
||||
</h3>
|
||||
|
||||
{/* Provider */}
|
||||
<div>
|
||||
<div className="flex items-center justify-between gap-4">
|
||||
<label className="text-sm font-medium text-foreground flex items-center gap-1.5">
|
||||
{metadataProvider && metadataProvider !== "none" && <ProviderIcon provider={metadataProvider} size={16} />}
|
||||
{t("libraryActions.provider")}
|
||||
</label>
|
||||
<select
|
||||
name="metadata_provider"
|
||||
defaultValue={metadataProvider || ""}
|
||||
className="text-sm border border-border rounded-lg px-3 py-1.5 bg-background min-w-[160px] shrink-0"
|
||||
>
|
||||
<option value="">{t("libraryActions.default")}</option>
|
||||
<option value="none">{t("libraryActions.none")}</option>
|
||||
<option value="google_books">Google Books</option>
|
||||
<option value="comicvine">ComicVine</option>
|
||||
<option value="open_library">Open Library</option>
|
||||
<option value="anilist">AniList</option>
|
||||
<option value="bedetheque">Bédéthèque</option>
|
||||
</select>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mt-1.5">{t("libraryActions.providerDesc")}</p>
|
||||
</div>
|
||||
|
||||
{/* Fallback */}
|
||||
<div>
|
||||
<div className="flex items-center justify-between gap-4">
|
||||
<label className="text-sm font-medium text-foreground flex items-center gap-1.5">
|
||||
{fallbackMetadataProvider && fallbackMetadataProvider !== "none" && <ProviderIcon provider={fallbackMetadataProvider} size={16} />}
|
||||
{t("libraryActions.fallback")}
|
||||
</label>
|
||||
<select
|
||||
name="fallback_metadata_provider"
|
||||
defaultValue={fallbackMetadataProvider || ""}
|
||||
className="text-sm border border-border rounded-lg px-3 py-1.5 bg-background min-w-[160px] shrink-0"
|
||||
>
|
||||
<option value="">{t("libraryActions.none")}</option>
|
||||
<option value="google_books">Google Books</option>
|
||||
<option value="comicvine">ComicVine</option>
|
||||
<option value="open_library">Open Library</option>
|
||||
<option value="anilist">AniList</option>
|
||||
<option value="bedetheque">Bédéthèque</option>
|
||||
</select>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mt-1.5">{t("libraryActions.fallbackDesc")}</p>
|
||||
</div>
|
||||
|
||||
{/* Metadata refresh */}
|
||||
<div>
|
||||
<div className="flex items-center justify-between gap-4">
|
||||
<label className="text-sm font-medium text-foreground">{t("libraryActions.metadataRefreshSchedule")}</label>
|
||||
<select
|
||||
name="metadata_refresh_mode"
|
||||
defaultValue={metadataRefreshMode}
|
||||
className="text-sm border border-border rounded-lg px-3 py-1.5 bg-background min-w-[160px] shrink-0"
|
||||
>
|
||||
<option value="manual">{t("monitoring.manual")}</option>
|
||||
<option value="hourly">{t("monitoring.hourly")}</option>
|
||||
<option value="daily">{t("monitoring.daily")}</option>
|
||||
<option value="weekly">{t("monitoring.weekly")}</option>
|
||||
</select>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mt-1.5">{t("libraryActions.metadataRefreshDesc")}</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{saveError && (
|
||||
<p className="text-sm text-destructive bg-destructive/10 px-3 py-2 rounded-lg break-all">
|
||||
{saveError}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Footer */}
|
||||
<div className="flex items-center justify-end gap-2 px-5 py-4 border-t border-border/50 bg-muted/30">
|
||||
<Button
|
||||
type="button"
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => setIsOpen(false)}
|
||||
>
|
||||
{t("common.cancel")}
|
||||
</Button>
|
||||
<Button
|
||||
type="submit"
|
||||
size="sm"
|
||||
disabled={isPending}
|
||||
>
|
||||
{isPending ? t("libraryActions.saving") : t("common.save")}
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</>,
|
||||
document.body
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -4,6 +4,26 @@ import { useRef, useCallback, useEffect } from "react";
|
||||
import { useRouter, useSearchParams } from "next/navigation";
|
||||
import { useTranslation } from "../../lib/i18n/context";
|
||||
|
||||
// SVG path data for filter icons, keyed by field name
|
||||
const FILTER_ICONS: Record<string, string> = {
|
||||
// Library - building/collection
|
||||
library: "M8 14v3m4-3v3m4-3v3M3 21h18M3 10h18M3 7l9-4 9 4M4 10h16v11H4V10z",
|
||||
// Reading status - open book
|
||||
status: "M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253",
|
||||
// Series status - signal/activity
|
||||
series_status: "M9 19v-6a2 2 0 00-2-2H5a2 2 0 00-2 2v6a2 2 0 002 2h2a2 2 0 002-2zm0 0V9a2 2 0 012-2h2a2 2 0 012 2v10m-6 0a2 2 0 002 2h2a2 2 0 002-2m0 0V5a2 2 0 012-2h2a2 2 0 012 2v14a2 2 0 01-2 2h-2a2 2 0 01-2-2z",
|
||||
// Missing books - warning triangle
|
||||
has_missing: "M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z",
|
||||
// Metadata provider - tag
|
||||
metadata_provider: "M7 7h.01M7 3h5c.512 0 1.024.195 1.414.586l7 7a2 2 0 010 2.828l-7 7a2 2 0 01-2.828 0l-7-7A1.994 1.994 0 013 12V7a4 4 0 014-4z",
|
||||
// Sort - arrows up/down
|
||||
sort: "M3 4h13M3 8h9m-9 4h6m4 0l4-4m0 0l4 4m-4-4v12",
|
||||
// Format - document/file
|
||||
format: "M7 21h10a2 2 0 002-2V9.414a1 1 0 00-.293-.707l-5.414-5.414A1 1 0 0012.586 3H7a2 2 0 00-2 2v14a2 2 0 002 2z",
|
||||
// Metadata - link/chain
|
||||
metadata: "M13.828 10.172a4 4 0 00-5.656 0l-4 4a4 4 0 105.656 5.656l1.102-1.101m-.758-4.899a4 4 0 005.656 0l4-4a4 4 0 00-5.656-5.656l-1.1 1.1",
|
||||
};
|
||||
|
||||
interface FieldDef {
|
||||
name: string;
|
||||
type: "text" | "select";
|
||||
@@ -19,12 +39,17 @@ interface LiveSearchFormProps {
|
||||
debounceMs?: number;
|
||||
}
|
||||
|
||||
const STORAGE_KEY_PREFIX = "filters:";
|
||||
|
||||
export function LiveSearchForm({ fields, basePath, debounceMs = 300 }: LiveSearchFormProps) {
|
||||
const router = useRouter();
|
||||
const searchParams = useSearchParams();
|
||||
const { t } = useTranslation();
|
||||
const timerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
const formRef = useRef<HTMLFormElement>(null);
|
||||
const restoredRef = useRef(false);
|
||||
|
||||
const storageKey = `${STORAGE_KEY_PREFIX}${basePath}`;
|
||||
|
||||
const buildUrl = useCallback((): string => {
|
||||
if (!formRef.current) return basePath;
|
||||
@@ -38,16 +63,58 @@ export function LiveSearchForm({ fields, basePath, debounceMs = 300 }: LiveSearc
|
||||
return qs ? `${basePath}?${qs}` : basePath;
|
||||
}, [basePath]);
|
||||
|
||||
const saveFilters = useCallback(() => {
|
||||
if (!formRef.current) return;
|
||||
const formData = new FormData(formRef.current);
|
||||
const filters: Record<string, string> = {};
|
||||
for (const [key, value] of formData.entries()) {
|
||||
const str = value.toString().trim();
|
||||
if (str) filters[key] = str;
|
||||
}
|
||||
try {
|
||||
localStorage.setItem(storageKey, JSON.stringify(filters));
|
||||
} catch {}
|
||||
}, [storageKey]);
|
||||
|
||||
const navigate = useCallback((immediate: boolean) => {
|
||||
if (timerRef.current) clearTimeout(timerRef.current);
|
||||
if (immediate) {
|
||||
saveFilters();
|
||||
router.replace(buildUrl() as any);
|
||||
} else {
|
||||
timerRef.current = setTimeout(() => {
|
||||
saveFilters();
|
||||
router.replace(buildUrl() as any);
|
||||
}, debounceMs);
|
||||
}
|
||||
}, [router, buildUrl, debounceMs]);
|
||||
}, [router, buildUrl, debounceMs, saveFilters]);
|
||||
|
||||
// Restore filters from localStorage on mount if URL has no filters
|
||||
useEffect(() => {
|
||||
if (restoredRef.current) return;
|
||||
restoredRef.current = true;
|
||||
|
||||
const hasUrlFilters = fields.some((f) => {
|
||||
const val = searchParams.get(f.name);
|
||||
return val && val.trim() !== "";
|
||||
});
|
||||
if (hasUrlFilters) return;
|
||||
|
||||
try {
|
||||
const saved = localStorage.getItem(storageKey);
|
||||
if (!saved) return;
|
||||
const filters: Record<string, string> = JSON.parse(saved);
|
||||
const fieldNames = new Set(fields.map((f) => f.name));
|
||||
const params = new URLSearchParams();
|
||||
for (const [key, value] of Object.entries(filters)) {
|
||||
if (fieldNames.has(key) && value) params.set(key, value);
|
||||
}
|
||||
const qs = params.toString();
|
||||
if (qs) {
|
||||
router.replace(`${basePath}?${qs}` as any);
|
||||
}
|
||||
} catch {}
|
||||
}, []); // eslint-disable-line react-hooks/exhaustive-deps
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
@@ -60,70 +127,99 @@ export function LiveSearchForm({ fields, basePath, debounceMs = 300 }: LiveSearc
|
||||
return val && val.trim() !== "";
|
||||
});
|
||||
|
||||
const textFields = fields.filter((f) => f.type === "text");
|
||||
const selectFields = fields.filter((f) => f.type === "select");
|
||||
|
||||
return (
|
||||
<form
|
||||
ref={formRef}
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
if (timerRef.current) clearTimeout(timerRef.current);
|
||||
saveFilters();
|
||||
router.replace(buildUrl() as any);
|
||||
}}
|
||||
className="flex flex-col sm:flex-row sm:flex-wrap gap-3 items-start sm:items-end"
|
||||
className="space-y-4"
|
||||
>
|
||||
{fields.map((field) =>
|
||||
field.type === "text" ? (
|
||||
<div key={field.name} className={field.className || "flex-1 w-full"}>
|
||||
<label className="block text-sm font-medium text-foreground mb-1.5">
|
||||
{field.label}
|
||||
</label>
|
||||
<input
|
||||
name={field.name}
|
||||
type="text"
|
||||
placeholder={field.placeholder}
|
||||
defaultValue={searchParams.get(field.name) || ""}
|
||||
onChange={() => navigate(false)}
|
||||
className="flex h-10 w-full rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2"
|
||||
/>
|
||||
{/* Search input with icon */}
|
||||
{textFields.map((field) => (
|
||||
<div key={field.name} className="relative">
|
||||
<svg
|
||||
className="absolute left-3 top-1/2 -translate-y-1/2 w-5 h-5 text-muted-foreground pointer-events-none"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
>
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
|
||||
</svg>
|
||||
<input
|
||||
name={field.name}
|
||||
type="text"
|
||||
placeholder={field.placeholder}
|
||||
defaultValue={searchParams.get(field.name) || ""}
|
||||
onChange={() => navigate(false)}
|
||||
className="flex h-11 w-full rounded-lg border border-input bg-background pl-10 pr-3 py-2 text-sm ring-offset-background placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2"
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
|
||||
{/* Filters row */}
|
||||
{selectFields.length > 0 && (
|
||||
<>
|
||||
{textFields.length > 0 && (
|
||||
<div className="border-t border-border/60" />
|
||||
)}
|
||||
<div className="flex flex-wrap gap-3 items-center">
|
||||
{selectFields.map((field) => (
|
||||
<div key={field.name} className="flex items-center gap-1.5">
|
||||
{FILTER_ICONS[field.name] && (
|
||||
<svg className="w-3.5 h-3.5 text-muted-foreground shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d={FILTER_ICONS[field.name]} />
|
||||
</svg>
|
||||
)}
|
||||
<label className="text-xs font-medium text-muted-foreground whitespace-nowrap">
|
||||
{field.label}
|
||||
</label>
|
||||
<select
|
||||
name={field.name}
|
||||
defaultValue={searchParams.get(field.name) || ""}
|
||||
onChange={() => navigate(true)}
|
||||
className="h-8 rounded-md border border-input bg-background px-2 py-1 text-xs ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2"
|
||||
>
|
||||
{field.options?.map((opt) => (
|
||||
<option key={opt.value} value={opt.value}>
|
||||
{opt.label}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
))}
|
||||
{hasFilters && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => {
|
||||
formRef.current?.reset();
|
||||
try { localStorage.removeItem(storageKey); } catch {}
|
||||
router.replace(basePath as any);
|
||||
}}
|
||||
className="
|
||||
inline-flex items-center gap-1
|
||||
h-8 px-2.5
|
||||
text-xs font-medium
|
||||
text-muted-foreground
|
||||
rounded-md
|
||||
hover:bg-accent hover:text-accent-foreground
|
||||
transition-colors duration-200
|
||||
"
|
||||
>
|
||||
<svg className="w-3.5 h-3.5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M6 18L18 6M6 6l12 12" />
|
||||
</svg>
|
||||
{t("common.clear")}
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<div key={field.name} className={field.className || "w-full sm:w-48"}>
|
||||
<label className="block text-sm font-medium text-foreground mb-1.5">
|
||||
{field.label}
|
||||
</label>
|
||||
<select
|
||||
name={field.name}
|
||||
defaultValue={searchParams.get(field.name) || ""}
|
||||
onChange={() => navigate(true)}
|
||||
className="flex h-10 w-full rounded-md border border-input bg-background px-3 py-2 text-sm ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2"
|
||||
>
|
||||
{field.options?.map((opt) => (
|
||||
<option key={opt.value} value={opt.value}>
|
||||
{opt.label}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
</div>
|
||||
)
|
||||
)}
|
||||
{hasFilters && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => router.replace(basePath as any)}
|
||||
className="
|
||||
inline-flex items-center justify-center
|
||||
h-10 px-4
|
||||
border border-input
|
||||
text-sm font-medium
|
||||
text-muted-foreground
|
||||
bg-background
|
||||
rounded-md
|
||||
hover:bg-accent hover:text-accent-foreground
|
||||
transition-colors duration-200
|
||||
w-full sm:w-auto
|
||||
"
|
||||
>
|
||||
{t("common.clear")}
|
||||
</button>
|
||||
</>
|
||||
)}
|
||||
</form>
|
||||
);
|
||||
|
||||
@@ -683,13 +683,6 @@ export function MetadataSearchModal({
|
||||
{existingLink && existingLink.status === "approved" ? t("metadata.metadataButton") : t("metadata.searchButton")}
|
||||
</button>
|
||||
|
||||
{/* Inline badge when linked */}
|
||||
{existingLink && existingLink.status === "approved" && initialMissing && initialMissing.missing_count > 0 && (
|
||||
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full bg-yellow-500/10 text-yellow-600 text-xs border border-yellow-500/30">
|
||||
{t("series.missingCount", { count: initialMissing.missing_count, plural: initialMissing.missing_count !== 1 ? "s" : "" })}
|
||||
</span>
|
||||
)}
|
||||
|
||||
{existingLink && existingLink.status === "approved" && (
|
||||
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full bg-primary/10 text-primary text-xs border border-primary/30">
|
||||
<ProviderIcon provider={existingLink.provider} size={12} />
|
||||
|
||||
@@ -7,9 +7,9 @@ import { NavIcon } from "./ui";
|
||||
import { useTranslation } from "../../lib/i18n/context";
|
||||
|
||||
type NavItem = {
|
||||
href: "/" | "/books" | "/series" | "/libraries" | "/jobs" | "/tokens" | "/settings";
|
||||
href: "/" | "/books" | "/series" | "/authors" | "/libraries" | "/jobs" | "/tokens" | "/settings";
|
||||
label: string;
|
||||
icon: "dashboard" | "books" | "series" | "libraries" | "jobs" | "tokens" | "settings";
|
||||
icon: "dashboard" | "books" | "series" | "authors" | "libraries" | "jobs" | "tokens" | "settings";
|
||||
};
|
||||
|
||||
const HamburgerIcon = () => (
|
||||
|
||||
383
apps/backoffice/app/components/ProwlarrSearchModal.tsx
Normal file
383
apps/backoffice/app/components/ProwlarrSearchModal.tsx
Normal file
@@ -0,0 +1,383 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect, useCallback } from "react";
|
||||
import { createPortal } from "react-dom";
|
||||
import { Icon } from "./ui";
|
||||
import type { ProwlarrRelease, ProwlarrSearchResponse } from "../../lib/api";
|
||||
import { useTranslation } from "../../lib/i18n/context";
|
||||
|
||||
interface MissingBookItem {
|
||||
title: string | null;
|
||||
volume_number: number | null;
|
||||
external_book_id: string | null;
|
||||
}
|
||||
|
||||
interface ProwlarrSearchModalProps {
|
||||
seriesName: string;
|
||||
missingBooks: MissingBookItem[] | null;
|
||||
}
|
||||
|
||||
function formatSize(bytes: number): string {
|
||||
if (bytes >= 1073741824) return (bytes / 1073741824).toFixed(1) + " GB";
|
||||
if (bytes >= 1048576) return (bytes / 1048576).toFixed(1) + " MB";
|
||||
if (bytes >= 1024) return (bytes / 1024).toFixed(0) + " KB";
|
||||
return bytes + " B";
|
||||
}
|
||||
|
||||
export function ProwlarrSearchModal({ seriesName, missingBooks }: ProwlarrSearchModalProps) {
|
||||
const { t } = useTranslation();
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [isConfigured, setIsConfigured] = useState<boolean | null>(null);
|
||||
const [isSearching, setIsSearching] = useState(false);
|
||||
const [results, setResults] = useState<ProwlarrRelease[]>([]);
|
||||
const [query, setQuery] = useState("");
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
// qBittorrent state
|
||||
const [isQbConfigured, setIsQbConfigured] = useState(false);
|
||||
const [sendingGuid, setSendingGuid] = useState<string | null>(null);
|
||||
const [sentGuids, setSentGuids] = useState<Set<string>>(new Set());
|
||||
const [sendError, setSendError] = useState<string | null>(null);
|
||||
|
||||
// Check if Prowlarr and qBittorrent are configured on mount
|
||||
useEffect(() => {
|
||||
fetch("/api/settings/prowlarr")
|
||||
.then((r) => (r.ok ? r.json() : null))
|
||||
.then((data) => {
|
||||
setIsConfigured(!!(data && data.api_key && data.api_key.trim()));
|
||||
})
|
||||
.catch(() => setIsConfigured(false));
|
||||
fetch("/api/settings/qbittorrent")
|
||||
.then((r) => (r.ok ? r.json() : null))
|
||||
.then((data) => {
|
||||
setIsQbConfigured(!!(data && data.url && data.url.trim() && data.username && data.username.trim()));
|
||||
})
|
||||
.catch(() => setIsQbConfigured(false));
|
||||
}, []);
|
||||
|
||||
const [searchInput, setSearchInput] = useState(`"${seriesName}"`);
|
||||
|
||||
const doSearch = useCallback(async (queryOverride?: string) => {
|
||||
const searchQuery = queryOverride ?? searchInput;
|
||||
if (!searchQuery.trim()) return;
|
||||
setIsSearching(true);
|
||||
setError(null);
|
||||
setResults([]);
|
||||
try {
|
||||
const missing_volumes = missingBooks?.map((b) => ({
|
||||
volume_number: b.volume_number,
|
||||
title: b.title,
|
||||
})) ?? undefined;
|
||||
const body = { series_name: seriesName, custom_query: searchQuery.trim(), missing_volumes };
|
||||
const resp = await fetch("/api/prowlarr/search", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
const data = await resp.json();
|
||||
if (data.error) {
|
||||
setError(data.error);
|
||||
} else {
|
||||
const searchResp = data as ProwlarrSearchResponse;
|
||||
setResults(searchResp.results);
|
||||
setQuery(searchResp.query);
|
||||
}
|
||||
} catch {
|
||||
setError(t("prowlarr.searchError"));
|
||||
} finally {
|
||||
setIsSearching(false);
|
||||
}
|
||||
}, [t, seriesName, searchInput]);
|
||||
|
||||
const defaultQuery = `"${seriesName}"`;
|
||||
|
||||
function handleOpen() {
|
||||
setIsOpen(true);
|
||||
setResults([]);
|
||||
setError(null);
|
||||
setQuery("");
|
||||
setSearchInput(defaultQuery);
|
||||
// Auto-search the series on open
|
||||
doSearch(defaultQuery);
|
||||
}
|
||||
|
||||
function handleClose() {
|
||||
setIsOpen(false);
|
||||
}
|
||||
|
||||
async function handleSendToQbittorrent(downloadUrl: string, guid: string) {
|
||||
setSendingGuid(guid);
|
||||
setSendError(null);
|
||||
try {
|
||||
const resp = await fetch("/api/qbittorrent/add", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ url: downloadUrl }),
|
||||
});
|
||||
const data = await resp.json();
|
||||
if (data.error) {
|
||||
setSendError(data.error);
|
||||
} else if (data.success) {
|
||||
setSentGuids((prev) => new Set(prev).add(guid));
|
||||
} else {
|
||||
setSendError(data.message || t("prowlarr.sentError"));
|
||||
}
|
||||
} catch {
|
||||
setSendError(t("prowlarr.sentError"));
|
||||
} finally {
|
||||
setSendingGuid(null);
|
||||
}
|
||||
}
|
||||
|
||||
// Don't render button if not configured
|
||||
if (isConfigured === false) return null;
|
||||
if (isConfigured === null) return null;
|
||||
|
||||
const modal = isOpen
|
||||
? createPortal(
|
||||
<>
|
||||
<div
|
||||
className="fixed inset-0 bg-black/30 backdrop-blur-sm z-50"
|
||||
onClick={handleClose}
|
||||
/>
|
||||
<div className="fixed inset-0 flex items-center justify-center z-50 p-4">
|
||||
<div className="bg-card border border-border/50 rounded-xl shadow-2xl w-full max-w-5xl max-h-[90vh] overflow-y-auto animate-in fade-in zoom-in-95 duration-200">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between px-5 py-4 border-b border-border/50 bg-muted/30 sticky top-0 z-10">
|
||||
<h3 className="font-semibold text-foreground">{t("prowlarr.modalTitle")}</h3>
|
||||
<button type="button" onClick={handleClose}>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" className="text-muted-foreground hover:text-foreground">
|
||||
<path d="M4 4L12 12M12 4L4 12" stroke="currentColor" strokeWidth="2" strokeLinecap="round" />
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="p-5 space-y-4">
|
||||
{/* Search input */}
|
||||
<form
|
||||
onSubmit={(e) => {
|
||||
e.preventDefault();
|
||||
if (searchInput.trim()) doSearch(searchInput.trim());
|
||||
}}
|
||||
className="flex items-center gap-2"
|
||||
>
|
||||
<input
|
||||
type="text"
|
||||
value={searchInput}
|
||||
onChange={(e) => setSearchInput(e.target.value)}
|
||||
className="flex-1 px-3 py-2 rounded-lg border border-border bg-background text-sm text-foreground placeholder:text-muted-foreground focus:outline-none focus:ring-2 focus:ring-primary/50 focus:border-primary"
|
||||
placeholder={t("prowlarr.searchPlaceholder")}
|
||||
/>
|
||||
<button
|
||||
type="submit"
|
||||
disabled={isSearching || !searchInput.trim()}
|
||||
className="inline-flex items-center gap-1.5 px-4 py-2 rounded-lg text-sm font-medium bg-primary text-primary-foreground hover:bg-primary/90 disabled:opacity-50 transition-colors"
|
||||
>
|
||||
<Icon name="search" size="sm" />
|
||||
{t("prowlarr.searchAction")}
|
||||
</button>
|
||||
</form>
|
||||
|
||||
{/* Quick search badges */}
|
||||
<div className="flex flex-wrap items-center gap-2 max-h-24 overflow-y-auto">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => { setSearchInput(defaultQuery); doSearch(defaultQuery); }}
|
||||
disabled={isSearching}
|
||||
className="inline-flex items-center gap-1.5 px-3 py-1.5 rounded-lg text-xs font-medium border border-primary/50 bg-primary/10 text-primary hover:bg-primary/20 disabled:opacity-50 transition-colors"
|
||||
>
|
||||
{seriesName}
|
||||
</button>
|
||||
{missingBooks && missingBooks.length > 0 && missingBooks.map((book, i) => {
|
||||
const label = book.title || `Vol. ${book.volume_number}`;
|
||||
const q = book.volume_number != null ? `"${seriesName}" ${book.volume_number}` : `"${seriesName}" ${label}`;
|
||||
return (
|
||||
<button
|
||||
key={i}
|
||||
type="button"
|
||||
onClick={() => { setSearchInput(q); doSearch(q); }}
|
||||
disabled={isSearching}
|
||||
className="inline-flex items-center gap-1.5 px-3 py-1.5 rounded-lg text-xs font-medium border border-border bg-muted/30 hover:bg-muted/50 disabled:opacity-50 transition-colors"
|
||||
>
|
||||
{label}
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
|
||||
{/* Error */}
|
||||
{error && (
|
||||
<div className="p-3 rounded-lg bg-destructive/10 text-destructive text-sm">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Searching indicator */}
|
||||
{isSearching && (
|
||||
<div className="flex items-center gap-2 text-muted-foreground text-sm">
|
||||
<Icon name="spinner" size="sm" className="animate-spin" />
|
||||
{t("prowlarr.searching")}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Results */}
|
||||
{!isSearching && results.length > 0 && (
|
||||
<div>
|
||||
<p className="text-sm text-muted-foreground mb-3">
|
||||
{t("prowlarr.resultCount", { count: results.length, plural: results.length !== 1 ? "s" : "" })}
|
||||
{query && <span className="ml-1 text-xs opacity-70">({query})</span>}
|
||||
</p>
|
||||
<div className="overflow-x-auto rounded-lg border border-border">
|
||||
<table className="w-full text-sm">
|
||||
<thead>
|
||||
<tr className="bg-muted/50 text-left">
|
||||
<th className="px-3 py-2 font-medium text-muted-foreground">{t("prowlarr.columnTitle")}</th>
|
||||
<th className="px-3 py-2 font-medium text-muted-foreground">{t("prowlarr.columnIndexer")}</th>
|
||||
<th className="px-3 py-2 font-medium text-muted-foreground text-right">{t("prowlarr.columnSize")}</th>
|
||||
<th className="px-3 py-2 font-medium text-muted-foreground text-center">{t("prowlarr.columnSeeders")}</th>
|
||||
<th className="px-3 py-2 font-medium text-muted-foreground text-center">{t("prowlarr.columnLeechers")}</th>
|
||||
<th className="px-3 py-2 font-medium text-muted-foreground">{t("prowlarr.columnProtocol")}</th>
|
||||
<th className="px-3 py-2 font-medium text-muted-foreground text-right"></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody className="divide-y divide-border">
|
||||
{results.map((release, i) => {
|
||||
const hasMissing = release.matchedMissingVolumes && release.matchedMissingVolumes.length > 0;
|
||||
return (
|
||||
<tr key={release.guid || i} className={`transition-colors ${hasMissing ? "bg-green-500/10 hover:bg-green-500/20 border-l-2 border-l-green-500" : "hover:bg-muted/20"}`}>
|
||||
<td className="px-3 py-2 max-w-[400px]">
|
||||
<span className="truncate block" title={release.title}>
|
||||
{release.title}
|
||||
</span>
|
||||
{hasMissing && (
|
||||
<div className="flex items-center gap-1 mt-1">
|
||||
{release.matchedMissingVolumes!.map((vol) => (
|
||||
<span key={vol} className="inline-flex items-center px-1.5 py-0.5 rounded text-[10px] font-medium bg-green-500/20 text-green-600">
|
||||
{t("prowlarr.missingVol", { vol })}
|
||||
</span>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</td>
|
||||
<td className="px-3 py-2 text-muted-foreground whitespace-nowrap">
|
||||
{release.indexer || "—"}
|
||||
</td>
|
||||
<td className="px-3 py-2 text-right text-muted-foreground whitespace-nowrap">
|
||||
{release.size > 0 ? formatSize(release.size) : "—"}
|
||||
</td>
|
||||
<td className="px-3 py-2 text-center">
|
||||
{release.seeders != null ? (
|
||||
<span className={release.seeders > 0 ? "text-green-500 font-medium" : "text-muted-foreground"}>
|
||||
{release.seeders}
|
||||
</span>
|
||||
) : "—"}
|
||||
</td>
|
||||
<td className="px-3 py-2 text-center text-muted-foreground">
|
||||
{release.leechers != null ? release.leechers : "—"}
|
||||
</td>
|
||||
<td className="px-3 py-2">
|
||||
{release.protocol && (
|
||||
<span className={`inline-flex items-center px-2 py-0.5 rounded-full text-xs font-medium ${
|
||||
release.protocol === "torrent"
|
||||
? "bg-blue-500/15 text-blue-600"
|
||||
: "bg-amber-500/15 text-amber-600"
|
||||
}`}>
|
||||
{release.protocol}
|
||||
</span>
|
||||
)}
|
||||
</td>
|
||||
<td className="px-3 py-2">
|
||||
<div className="flex items-center justify-end gap-1.5">
|
||||
{isQbConfigured && release.downloadUrl && (
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => handleSendToQbittorrent(release.downloadUrl!, release.guid)}
|
||||
disabled={sendingGuid === release.guid || sentGuids.has(release.guid)}
|
||||
className={`inline-flex items-center justify-center w-7 h-7 rounded-md transition-colors disabled:opacity-50 ${
|
||||
sentGuids.has(release.guid)
|
||||
? "text-green-500"
|
||||
: "text-primary hover:bg-primary/10"
|
||||
}`}
|
||||
title={sentGuids.has(release.guid) ? t("prowlarr.sentSuccess") : t("prowlarr.sendToQbittorrent")}
|
||||
>
|
||||
{sendingGuid === release.guid ? (
|
||||
<Icon name="spinner" size="sm" className="animate-spin" />
|
||||
) : sentGuids.has(release.guid) ? (
|
||||
<svg width="14" height="14" viewBox="0 0 16 16" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round">
|
||||
<path d="M3 8l4 4 6-7" />
|
||||
</svg>
|
||||
) : (
|
||||
<svg width="14" height="14" viewBox="0 0 16 16" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round">
|
||||
<path d="M14 8V14H2V2H8M10 2H14V6M14 2L7 9" />
|
||||
</svg>
|
||||
)}
|
||||
</button>
|
||||
)}
|
||||
{release.downloadUrl && (
|
||||
<a
|
||||
href={release.downloadUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="inline-flex items-center justify-center w-7 h-7 rounded-md text-primary hover:bg-primary/10 transition-colors"
|
||||
title={t("prowlarr.download")}
|
||||
>
|
||||
<svg width="14" height="14" viewBox="0 0 16 16" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round">
|
||||
<path d="M8 2v8M4 7l4 4 4-4M2 13h12" />
|
||||
</svg>
|
||||
</a>
|
||||
)}
|
||||
{release.infoUrl && (
|
||||
<a
|
||||
href={release.infoUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="inline-flex items-center justify-center w-7 h-7 rounded-md text-muted-foreground hover:bg-muted/50 transition-colors"
|
||||
title={t("prowlarr.info")}
|
||||
>
|
||||
<Icon name="externalLink" size="sm" />
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* qBittorrent send error */}
|
||||
{sendError && (
|
||||
<div className="p-3 rounded-lg bg-destructive/10 text-destructive text-sm">
|
||||
{sendError}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* No results */}
|
||||
{!isSearching && !error && query && results.length === 0 && (
|
||||
<p className="text-sm text-muted-foreground">{t("prowlarr.noResults")}</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>,
|
||||
document.body,
|
||||
)
|
||||
: null;
|
||||
|
||||
return (
|
||||
<>
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleOpen}
|
||||
className="inline-flex items-center gap-2 px-3 py-2 rounded-lg text-sm font-medium border border-border bg-card text-muted-foreground hover:text-foreground hover:border-primary/50 transition-colors"
|
||||
>
|
||||
<Icon name="search" size="sm" />
|
||||
{t("prowlarr.searchButton")}
|
||||
</button>
|
||||
{modal}
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -93,6 +93,7 @@ export function StatusBadge({ status, className = "" }: StatusBadgeProps) {
|
||||
// Job type badge
|
||||
const jobTypeVariants: Record<string, BadgeVariant> = {
|
||||
rebuild: "primary",
|
||||
rescan: "primary",
|
||||
full_rebuild: "warning",
|
||||
thumbnail_rebuild: "secondary",
|
||||
thumbnail_regenerate: "warning",
|
||||
@@ -109,11 +110,13 @@ export function JobTypeBadge({ type, className = "" }: JobTypeBadgeProps) {
|
||||
const variant = jobTypeVariants[key] || "default";
|
||||
const jobTypeLabels: Record<string, string> = {
|
||||
rebuild: t("jobType.rebuild"),
|
||||
rescan: t("jobType.rescan"),
|
||||
full_rebuild: t("jobType.full_rebuild"),
|
||||
thumbnail_rebuild: t("jobType.thumbnail_rebuild"),
|
||||
thumbnail_regenerate: t("jobType.thumbnail_regenerate"),
|
||||
cbr_to_cbz: t("jobType.cbr_to_cbz"),
|
||||
metadata_batch: t("jobType.metadata_batch"),
|
||||
metadata_refresh: t("jobType.metadata_refresh"),
|
||||
};
|
||||
const label = jobTypeLabels[key] ?? type;
|
||||
return <Badge variant={variant} className={className}>{label}</Badge>;
|
||||
|
||||
@@ -31,7 +31,11 @@ type IconName =
|
||||
| "play"
|
||||
| "stop"
|
||||
| "spinner"
|
||||
| "warning";
|
||||
| "warning"
|
||||
| "tag"
|
||||
| "document"
|
||||
| "authors"
|
||||
| "bell";
|
||||
|
||||
type IconSize = "sm" | "md" | "lg" | "xl";
|
||||
|
||||
@@ -82,6 +86,10 @@ const icons: Record<IconName, string> = {
|
||||
stop: "M21 12a9 9 0 11-18 0 9 9 0 0118 0z M9 10a1 1 0 011-1h4a1 1 0 011 1v4a1 1 0 01-1 1h-4a1 1 0 01-1-1v-4z",
|
||||
spinner: "M4 4v5h.582m15.582 0A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15",
|
||||
warning: "M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z",
|
||||
tag: "M7 7h.01M7 3h5a1.99 1.99 0 011.414.586l7 7a2 2 0 010 2.828l-7 7a2 2 0 01-2.828 0l-7-7A1.994 1.994 0 013 12V7a4 4 0 014-4z",
|
||||
document: "M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z",
|
||||
authors: "M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z",
|
||||
bell: "M15 17h5l-1.405-1.405A2.032 2.032 0 0118 14.158V11a6.002 6.002 0 00-4-5.659V5a2 2 0 10-4 0v.341C7.67 6.165 6 8.388 6 11v3.159c0 .538-.214 1.055-.595 1.436L4 17h5m6 0v1a3 3 0 11-6 0v-1m6 0H9",
|
||||
};
|
||||
|
||||
const colorClasses: Partial<Record<IconName, string>> = {
|
||||
@@ -95,6 +103,7 @@ const colorClasses: Partial<Record<IconName, string>> = {
|
||||
image: "text-primary",
|
||||
cache: "text-warning",
|
||||
performance: "text-success",
|
||||
authors: "text-violet-500",
|
||||
};
|
||||
|
||||
export function Icon({ name, size = "md", className = "" }: IconProps) {
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
import { notFound } from "next/navigation";
|
||||
import Link from "next/link";
|
||||
import { apiFetch, getMetadataBatchReport, getMetadataBatchResults, MetadataBatchReportDto, MetadataBatchResultDto } from "../../../lib/api";
|
||||
import { apiFetch, getMetadataBatchReport, getMetadataBatchResults, getMetadataRefreshReport, MetadataBatchReportDto, MetadataBatchResultDto, MetadataRefreshReportDto } from "../../../lib/api";
|
||||
import {
|
||||
Card, CardHeader, CardTitle, CardDescription, CardContent,
|
||||
StatusBadge, JobTypeBadge, StatBox, ProgressBar
|
||||
} from "../../components/ui";
|
||||
import { JobDetailLive } from "../../components/JobDetailLive";
|
||||
import { getServerTranslations } from "../../../lib/i18n/server";
|
||||
|
||||
interface JobDetailPageProps {
|
||||
@@ -99,6 +102,11 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
description: t("jobType.full_rebuildDesc"),
|
||||
isThumbnailOnly: false,
|
||||
},
|
||||
rescan: {
|
||||
label: t("jobType.rescanLabel"),
|
||||
description: t("jobType.rescanDesc"),
|
||||
isThumbnailOnly: false,
|
||||
},
|
||||
thumbnail_rebuild: {
|
||||
label: t("jobType.thumbnail_rebuildLabel"),
|
||||
description: t("jobType.thumbnail_rebuildDesc"),
|
||||
@@ -119,9 +127,15 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
description: t("jobType.metadata_batchDesc"),
|
||||
isThumbnailOnly: false,
|
||||
},
|
||||
metadata_refresh: {
|
||||
label: t("jobType.metadata_refreshLabel"),
|
||||
description: t("jobType.metadata_refreshDesc"),
|
||||
isThumbnailOnly: false,
|
||||
},
|
||||
};
|
||||
|
||||
const isMetadataBatch = job.type === "metadata_batch";
|
||||
const isMetadataRefresh = job.type === "metadata_refresh";
|
||||
|
||||
// Fetch batch report & results for metadata_batch jobs
|
||||
let batchReport: MetadataBatchReportDto | null = null;
|
||||
@@ -133,6 +147,12 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
]);
|
||||
}
|
||||
|
||||
// Fetch refresh report for metadata_refresh jobs
|
||||
let refreshReport: MetadataRefreshReportDto | null = null;
|
||||
if (isMetadataRefresh) {
|
||||
refreshReport = await getMetadataRefreshReport(id).catch(() => null);
|
||||
}
|
||||
|
||||
const typeInfo = JOB_TYPE_INFO[job.type] ?? {
|
||||
label: job.type,
|
||||
description: null,
|
||||
@@ -146,6 +166,7 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
const isCompleted = job.status === "success";
|
||||
const isFailed = job.status === "failed";
|
||||
const isCancelled = job.status === "cancelled";
|
||||
const isTerminal = isCompleted || isFailed || isCancelled;
|
||||
const isExtractingPages = job.status === "extracting_pages";
|
||||
const isThumbnailPhase = job.status === "generating_thumbnails";
|
||||
const isPhase2 = isExtractingPages || isThumbnailPhase;
|
||||
@@ -154,6 +175,8 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
// Which label to use for the progress card
|
||||
const progressTitle = isMetadataBatch
|
||||
? t("jobDetail.metadataSearch")
|
||||
: isMetadataRefresh
|
||||
? t("jobDetail.metadataRefresh")
|
||||
: isThumbnailOnly
|
||||
? t("jobType.thumbnail_rebuild")
|
||||
: isExtractingPages
|
||||
@@ -164,6 +187,8 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
|
||||
const progressDescription = isMetadataBatch
|
||||
? t("jobDetail.metadataSearchDesc")
|
||||
: isMetadataRefresh
|
||||
? t("jobDetail.metadataRefreshDesc")
|
||||
: isThumbnailOnly
|
||||
? undefined
|
||||
: isExtractingPages
|
||||
@@ -183,6 +208,7 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
|
||||
return (
|
||||
<>
|
||||
<JobDetailLive jobId={id} isTerminal={isTerminal} />
|
||||
<div className="mb-6">
|
||||
<Link
|
||||
href="/jobs"
|
||||
@@ -209,7 +235,12 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
— {batchReport.auto_matched} {t("jobDetail.autoMatched").toLowerCase()}, {batchReport.already_linked} {t("jobDetail.alreadyLinked").toLowerCase()}, {batchReport.no_results} {t("jobDetail.noResults").toLowerCase()}, {batchReport.errors} {t("jobDetail.errors").toLowerCase()}
|
||||
</span>
|
||||
)}
|
||||
{!isMetadataBatch && job.stats_json && (
|
||||
{isMetadataRefresh && refreshReport && (
|
||||
<span className="ml-2 text-success/80">
|
||||
— {refreshReport.refreshed} {t("jobDetail.refreshed").toLowerCase()}, {refreshReport.unchanged} {t("jobDetail.unchanged").toLowerCase()}, {refreshReport.errors} {t("jobDetail.errors").toLowerCase()}
|
||||
</span>
|
||||
)}
|
||||
{!isMetadataBatch && !isMetadataRefresh && job.stats_json && (
|
||||
<span className="ml-2 text-success/80">
|
||||
— {job.stats_json.scanned_files} {t("jobDetail.scanned").toLowerCase()}, {job.stats_json.indexed_files} {t("jobDetail.indexed").toLowerCase()}
|
||||
{job.stats_json.removed_files > 0 && `, ${job.stats_json.removed_files} ${t("jobDetail.removed").toLowerCase()}`}
|
||||
@@ -218,7 +249,7 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
{job.total_files != null && job.total_files > 0 && `, ${job.total_files} ${t("jobType.thumbnail_rebuild").toLowerCase()}`}
|
||||
</span>
|
||||
)}
|
||||
{!isMetadataBatch && !job.stats_json && isThumbnailOnly && job.total_files != null && (
|
||||
{!isMetadataBatch && !isMetadataRefresh && !job.stats_json && isThumbnailOnly && job.total_files != null && (
|
||||
<span className="ml-2 text-success/80">
|
||||
— {job.processed_files ?? job.total_files} {t("jobDetail.generated").toLowerCase()}
|
||||
</span>
|
||||
@@ -483,7 +514,7 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
)}
|
||||
|
||||
{/* Index Statistics — index jobs only */}
|
||||
{job.stats_json && !isThumbnailOnly && !isMetadataBatch && (
|
||||
{job.stats_json && !isThumbnailOnly && !isMetadataBatch && !isMetadataRefresh && (
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>{t("jobDetail.indexStats")}</CardTitle>
|
||||
@@ -547,6 +578,132 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Metadata refresh report */}
|
||||
{isMetadataRefresh && refreshReport && (
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>{t("jobDetail.refreshReport")}</CardTitle>
|
||||
<CardDescription>{t("jobDetail.refreshReportDesc", { count: String(refreshReport.total_links) })}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="grid grid-cols-2 sm:grid-cols-4 gap-4">
|
||||
<StatBox value={refreshReport.refreshed} label={t("jobDetail.refreshed")} variant="success" />
|
||||
<StatBox value={refreshReport.unchanged} label={t("jobDetail.unchanged")} />
|
||||
<StatBox value={refreshReport.errors} label={t("jobDetail.errors")} variant={refreshReport.errors > 0 ? "error" : "default"} />
|
||||
<StatBox value={refreshReport.total_links} label={t("jobDetail.total")} />
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Metadata refresh changes detail */}
|
||||
{isMetadataRefresh && refreshReport && refreshReport.changes.length > 0 && (
|
||||
<Card className="lg:col-span-2">
|
||||
<CardHeader>
|
||||
<CardTitle>{t("jobDetail.refreshChanges")}</CardTitle>
|
||||
<CardDescription>{t("jobDetail.refreshChangesDesc", { count: String(refreshReport.changes.length) })}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-3 max-h-[600px] overflow-y-auto">
|
||||
{refreshReport.changes.map((r, idx) => (
|
||||
<div
|
||||
key={idx}
|
||||
className={`p-3 rounded-lg border ${
|
||||
r.status === "updated" ? "bg-success/10 border-success/20" :
|
||||
r.status === "error" ? "bg-destructive/10 border-destructive/20" :
|
||||
"bg-muted/50 border-border/60"
|
||||
}`}
|
||||
>
|
||||
<div className="flex items-center justify-between gap-2">
|
||||
{job.library_id ? (
|
||||
<Link
|
||||
href={`/libraries/${job.library_id}/series/${encodeURIComponent(r.series_name)}`}
|
||||
className="font-medium text-sm text-primary hover:underline truncate"
|
||||
>
|
||||
{r.series_name}
|
||||
</Link>
|
||||
) : (
|
||||
<span className="font-medium text-sm text-foreground truncate">{r.series_name}</span>
|
||||
)}
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-[10px] text-muted-foreground">{r.provider}</span>
|
||||
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap ${
|
||||
r.status === "updated" ? "bg-success/20 text-success" :
|
||||
r.status === "error" ? "bg-destructive/20 text-destructive" :
|
||||
"bg-muted text-muted-foreground"
|
||||
}`}>
|
||||
{r.status === "updated" ? t("jobDetail.refreshed") :
|
||||
r.status === "error" ? t("common.error") :
|
||||
t("jobDetail.unchanged")}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{r.error && (
|
||||
<p className="text-xs text-destructive/80 mt-1">{r.error}</p>
|
||||
)}
|
||||
|
||||
{/* Series field changes */}
|
||||
{r.series_changes.length > 0 && (
|
||||
<div className="mt-2">
|
||||
<span className="text-[10px] uppercase tracking-wide text-muted-foreground font-semibold">{t("metadata.seriesLabel")}</span>
|
||||
<div className="mt-1 space-y-1">
|
||||
{r.series_changes.map((c, ci) => (
|
||||
<div key={ci} className="flex items-start gap-2 text-xs">
|
||||
<span className="font-medium text-foreground shrink-0 w-24">{t(`field.${c.field}` as never) || c.field}</span>
|
||||
<span className="text-muted-foreground line-through truncate max-w-[200px]" title={String(c.old ?? "—")}>
|
||||
{c.old != null ? (Array.isArray(c.old) ? (c.old as string[]).join(", ") : String(c.old)) : "—"}
|
||||
</span>
|
||||
<span className="text-success shrink-0">→</span>
|
||||
<span className="text-success truncate max-w-[200px]" title={String(c.new ?? "—")}>
|
||||
{c.new != null ? (Array.isArray(c.new) ? (c.new as string[]).join(", ") : String(c.new)) : "—"}
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Book field changes */}
|
||||
{r.book_changes.length > 0 && (
|
||||
<div className="mt-2">
|
||||
<span className="text-[10px] uppercase tracking-wide text-muted-foreground font-semibold">
|
||||
{t("metadata.booksLabel")} ({r.book_changes.length})
|
||||
</span>
|
||||
<div className="mt-1 space-y-2">
|
||||
{r.book_changes.map((b, bi) => (
|
||||
<div key={bi} className="pl-2 border-l-2 border-border/60">
|
||||
<Link
|
||||
href={`/books/${b.book_id}`}
|
||||
className="text-xs text-primary hover:underline font-medium"
|
||||
>
|
||||
{b.volume != null && <span className="text-muted-foreground mr-1">T.{b.volume}</span>}
|
||||
{b.title}
|
||||
</Link>
|
||||
<div className="mt-0.5 space-y-0.5">
|
||||
{b.changes.map((c, ci) => (
|
||||
<div key={ci} className="flex items-start gap-2 text-xs">
|
||||
<span className="font-medium text-foreground shrink-0 w-24">{t(`field.${c.field}` as never) || c.field}</span>
|
||||
<span className="text-muted-foreground line-through truncate max-w-[150px]" title={String(c.old ?? "—")}>
|
||||
{c.old != null ? (Array.isArray(c.old) ? (c.old as string[]).join(", ") : String(c.old).substring(0, 60)) : "—"}
|
||||
</span>
|
||||
<span className="text-success shrink-0">→</span>
|
||||
<span className="text-success truncate max-w-[150px]" title={String(c.new ?? "—")}>
|
||||
{c.new != null ? (Array.isArray(c.new) ? (c.new as string[]).join(", ") : String(c.new).substring(0, 60)) : "—"}
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Metadata batch results */}
|
||||
{isMetadataBatch && batchResults.length > 0 && (
|
||||
<Card className="lg:col-span-2">
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { revalidatePath } from "next/cache";
|
||||
import { redirect } from "next/navigation";
|
||||
import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, IndexJobDto, LibraryDto } from "../../lib/api";
|
||||
import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, startMetadataRefresh, IndexJobDto, LibraryDto } from "../../lib/api";
|
||||
import { JobsList } from "../components/JobsList";
|
||||
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormSelect, FormRow } from "../components/ui";
|
||||
import { Card, CardHeader, CardTitle, CardDescription, CardContent, FormField, FormSelect } from "../components/ui";
|
||||
import { getServerTranslations } from "../../lib/i18n/server";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
@@ -33,6 +33,14 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
||||
redirect(`/jobs?highlight=${result.id}`);
|
||||
}
|
||||
|
||||
async function triggerRescan(formData: FormData) {
|
||||
"use server";
|
||||
const libraryId = formData.get("library_id") as string;
|
||||
const result = await rebuildIndex(libraryId || undefined, false, true);
|
||||
revalidatePath("/jobs");
|
||||
redirect(`/jobs?highlight=${result.id}`);
|
||||
}
|
||||
|
||||
async function triggerThumbnailsRebuild(formData: FormData) {
|
||||
"use server";
|
||||
const libraryId = formData.get("library_id") as string;
|
||||
@@ -52,10 +60,62 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
||||
async function triggerMetadataBatch(formData: FormData) {
|
||||
"use server";
|
||||
const libraryId = formData.get("library_id") as string;
|
||||
if (!libraryId) return;
|
||||
const result = await startMetadataBatch(libraryId);
|
||||
revalidatePath("/jobs");
|
||||
redirect(`/jobs?highlight=${result.id}`);
|
||||
if (libraryId) {
|
||||
let result;
|
||||
try {
|
||||
result = await startMetadataBatch(libraryId);
|
||||
} catch {
|
||||
// Library may have metadata disabled — ignore silently
|
||||
return;
|
||||
}
|
||||
revalidatePath("/jobs");
|
||||
redirect(`/jobs?highlight=${result.id}`);
|
||||
} else {
|
||||
// All libraries — skip those with metadata disabled
|
||||
const allLibraries = await fetchLibraries().catch(() => [] as LibraryDto[]);
|
||||
let lastId: string | undefined;
|
||||
for (const lib of allLibraries) {
|
||||
if (lib.metadata_provider === "none") continue;
|
||||
try {
|
||||
const result = await startMetadataBatch(lib.id);
|
||||
if (result.status !== "already_running") lastId = result.id;
|
||||
} catch {
|
||||
// Library may have metadata disabled or other issue — skip
|
||||
}
|
||||
}
|
||||
revalidatePath("/jobs");
|
||||
redirect(lastId ? `/jobs?highlight=${lastId}` : "/jobs");
|
||||
}
|
||||
}
|
||||
|
||||
async function triggerMetadataRefresh(formData: FormData) {
|
||||
"use server";
|
||||
const libraryId = formData.get("library_id") as string;
|
||||
if (libraryId) {
|
||||
let result;
|
||||
try {
|
||||
result = await startMetadataRefresh(libraryId);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
revalidatePath("/jobs");
|
||||
redirect(`/jobs?highlight=${result.id}`);
|
||||
} else {
|
||||
// All libraries — skip those with metadata disabled
|
||||
const allLibraries = await fetchLibraries().catch(() => [] as LibraryDto[]);
|
||||
let lastId: string | undefined;
|
||||
for (const lib of allLibraries) {
|
||||
if (lib.metadata_provider === "none") continue;
|
||||
try {
|
||||
const result = await startMetadataRefresh(lib.id);
|
||||
if (result.status !== "already_running") lastId = result.id;
|
||||
} catch {
|
||||
// Library may have metadata disabled or no approved links — skip
|
||||
}
|
||||
}
|
||||
revalidatePath("/jobs");
|
||||
redirect(lastId ? `/jobs?highlight=${lastId}` : "/jobs");
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
@@ -76,8 +136,8 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<form>
|
||||
<FormRow>
|
||||
<FormField className="flex-1 max-w-xs">
|
||||
<div className="mb-6">
|
||||
<FormField className="max-w-xs">
|
||||
<FormSelect name="library_id" defaultValue="">
|
||||
<option value="">{t("jobs.allLibraries")}</option>
|
||||
{libraries.map((lib) => (
|
||||
@@ -85,106 +145,117 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
||||
))}
|
||||
</FormSelect>
|
||||
</FormField>
|
||||
<div className="flex flex-wrap gap-2">
|
||||
<Button type="submit" formAction={triggerRebuild}>
|
||||
<svg className="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||
</div>
|
||||
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
||||
|
||||
{/* Indexation group */}
|
||||
<div className="space-y-3">
|
||||
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
|
||||
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" />
|
||||
</svg>
|
||||
{t("jobs.rebuild")}
|
||||
</Button>
|
||||
<Button type="submit" formAction={triggerFullRebuild} variant="warning">
|
||||
<svg className="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
|
||||
</svg>
|
||||
{t("jobs.fullRebuild")}
|
||||
</Button>
|
||||
<Button type="submit" formAction={triggerThumbnailsRebuild} variant="secondary">
|
||||
<svg className="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
{t("jobs.groupIndexation")}
|
||||
</div>
|
||||
<div className="space-y-2">
|
||||
<button type="submit" formAction={triggerRebuild}
|
||||
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
|
||||
<div className="flex items-center gap-2">
|
||||
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||
</svg>
|
||||
<span className="font-medium text-sm text-foreground">{t("jobs.rebuild")}</span>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.rebuildShort")}</p>
|
||||
</button>
|
||||
<button type="submit" formAction={triggerRescan}
|
||||
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
|
||||
<div className="flex items-center gap-2">
|
||||
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
|
||||
</svg>
|
||||
<span className="font-medium text-sm text-foreground">{t("jobs.rescan")}</span>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.rescanShort")}</p>
|
||||
</button>
|
||||
<button type="submit" formAction={triggerFullRebuild}
|
||||
className="w-full text-left rounded-lg border border-destructive/30 bg-destructive/5 p-3 hover:bg-destructive/10 transition-colors group cursor-pointer">
|
||||
<div className="flex items-center gap-2">
|
||||
<svg className="w-4 h-4 text-destructive shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" />
|
||||
</svg>
|
||||
<span className="font-medium text-sm text-destructive">{t("jobs.fullRebuild")}</span>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.fullRebuildShort")}</p>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Thumbnails group */}
|
||||
<div className="space-y-3">
|
||||
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
|
||||
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" />
|
||||
</svg>
|
||||
{t("jobs.generateThumbnails")}
|
||||
</Button>
|
||||
<Button type="submit" formAction={triggerThumbnailsRegenerate} variant="warning">
|
||||
<svg className="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||
</svg>
|
||||
{t("jobs.regenerateThumbnails")}
|
||||
</Button>
|
||||
<Button type="submit" formAction={triggerMetadataBatch} variant="secondary">
|
||||
<svg className="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
|
||||
</svg>
|
||||
{t("jobs.batchMetadata")}
|
||||
</Button>
|
||||
{t("jobs.groupThumbnails")}
|
||||
</div>
|
||||
<div className="space-y-2">
|
||||
<button type="submit" formAction={triggerThumbnailsRebuild}
|
||||
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
|
||||
<div className="flex items-center gap-2">
|
||||
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6v6m0 0v6m0-6h6m-6 0H6" />
|
||||
</svg>
|
||||
<span className="font-medium text-sm text-foreground">{t("jobs.generateThumbnails")}</span>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.generateThumbnailsShort")}</p>
|
||||
</button>
|
||||
<button type="submit" formAction={triggerThumbnailsRegenerate}
|
||||
className="w-full text-left rounded-lg border border-warning/30 bg-warning/5 p-3 hover:bg-warning/10 transition-colors group cursor-pointer">
|
||||
<div className="flex items-center gap-2">
|
||||
<svg className="w-4 h-4 text-warning shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" />
|
||||
</svg>
|
||||
<span className="font-medium text-sm text-warning">{t("jobs.regenerateThumbnails")}</span>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.regenerateThumbnailsShort")}</p>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</FormRow>
|
||||
</form>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
{/* Job types legend */}
|
||||
<Card className="mb-6">
|
||||
<CardHeader>
|
||||
<CardTitle className="text-base">{t("jobs.referenceTitle")}</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4 text-sm">
|
||||
<div className="flex gap-3">
|
||||
<div className="shrink-0 mt-0.5">
|
||||
<svg className="w-5 h-5 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||
</svg>
|
||||
</div>
|
||||
<div>
|
||||
<span className="font-medium text-foreground">{t("jobs.rebuild")}</span>
|
||||
<p className="text-muted-foreground text-xs mt-0.5" dangerouslySetInnerHTML={{ __html: t("jobs.rebuildDescription") }} />
|
||||
{/* Metadata group */}
|
||||
<div className="space-y-3">
|
||||
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
|
||||
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M7 7h.01M7 3h5c.512 0 1.024.195 1.414.586l7 7a2 2 0 010 2.828l-7 7a2 2 0 01-2.828 0l-7-7A1.994 1.994 0 013 12V7a4 4 0 014-4z" />
|
||||
</svg>
|
||||
{t("jobs.groupMetadata")}
|
||||
</div>
|
||||
<div className="space-y-2">
|
||||
<button type="submit" formAction={triggerMetadataBatch}
|
||||
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer disabled:opacity-50 disabled:cursor-not-allowed disabled:hover:bg-background">
|
||||
<div className="flex items-center gap-2">
|
||||
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
|
||||
</svg>
|
||||
<span className="font-medium text-sm text-foreground">{t("jobs.batchMetadata")}</span>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.batchMetadataShort")}</p>
|
||||
</button>
|
||||
<button type="submit" formAction={triggerMetadataRefresh}
|
||||
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer disabled:opacity-50 disabled:cursor-not-allowed disabled:hover:bg-background">
|
||||
<div className="flex items-center gap-2">
|
||||
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||
</svg>
|
||||
<span className="font-medium text-sm text-foreground">{t("jobs.refreshMetadata")}</span>
|
||||
</div>
|
||||
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.refreshMetadataShort")}</p>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<div className="flex gap-3">
|
||||
<div className="shrink-0 mt-0.5">
|
||||
<svg className="w-5 h-5 text-warning" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
|
||||
</svg>
|
||||
</div>
|
||||
<div>
|
||||
<span className="font-medium text-foreground">{t("jobs.fullRebuild")}</span>
|
||||
<p className="text-muted-foreground text-xs mt-0.5" dangerouslySetInnerHTML={{ __html: t("jobs.fullRebuildDescription") }} />
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex gap-3">
|
||||
<div className="shrink-0 mt-0.5">
|
||||
<svg className="w-5 h-5 text-muted-foreground" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" />
|
||||
</svg>
|
||||
</div>
|
||||
<div>
|
||||
<span className="font-medium text-foreground">{t("jobs.generateThumbnails")}</span>
|
||||
<p className="text-muted-foreground text-xs mt-0.5" dangerouslySetInnerHTML={{ __html: t("jobs.generateThumbnailsDescription") }} />
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex gap-3">
|
||||
<div className="shrink-0 mt-0.5">
|
||||
<svg className="w-5 h-5 text-warning" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||
</svg>
|
||||
</div>
|
||||
<div>
|
||||
<span className="font-medium text-foreground">{t("jobs.regenerateThumbnails")}</span>
|
||||
<p className="text-muted-foreground text-xs mt-0.5" dangerouslySetInnerHTML={{ __html: t("jobs.regenerateThumbnailsDescription") }} />
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex gap-3">
|
||||
<div className="shrink-0 mt-0.5">
|
||||
<svg className="w-5 h-5 text-muted-foreground" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
|
||||
</svg>
|
||||
</div>
|
||||
<div>
|
||||
<span className="font-medium text-foreground">{t("jobs.batchMetadata")}</span>
|
||||
<p className="text-muted-foreground text-xs mt-0.5" dangerouslySetInnerHTML={{ __html: t("jobs.batchMetadataDescription") }} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
|
||||
@@ -18,15 +18,16 @@ export const metadata: Metadata = {
|
||||
};
|
||||
|
||||
type NavItem = {
|
||||
href: "/" | "/books" | "/series" | "/libraries" | "/jobs" | "/tokens" | "/settings";
|
||||
href: "/" | "/books" | "/series" | "/authors" | "/libraries" | "/jobs" | "/tokens" | "/settings";
|
||||
labelKey: TranslationKey;
|
||||
icon: "dashboard" | "books" | "series" | "libraries" | "jobs" | "tokens" | "settings";
|
||||
icon: "dashboard" | "books" | "series" | "authors" | "libraries" | "jobs" | "tokens" | "settings";
|
||||
};
|
||||
|
||||
const navItems: NavItem[] = [
|
||||
{ href: "/", labelKey: "nav.dashboard", icon: "dashboard" },
|
||||
{ href: "/books", labelKey: "nav.books", icon: "books" },
|
||||
{ href: "/series", labelKey: "nav.series", icon: "series" },
|
||||
{ href: "/authors", labelKey: "nav.authors", icon: "authors" },
|
||||
{ href: "/libraries", labelKey: "nav.libraries", icon: "libraries" },
|
||||
{ href: "/jobs", labelKey: "nav.jobs", icon: "jobs" },
|
||||
{ href: "/tokens", labelKey: "nav.tokens", icon: "tokens" },
|
||||
|
||||
@@ -2,12 +2,21 @@ import { fetchLibraries, fetchBooks, fetchSeriesMetadata, getBookCoverUrl, getMe
|
||||
import { BooksGrid, EmptyState } from "../../../../components/BookCard";
|
||||
import { MarkSeriesReadButton } from "../../../../components/MarkSeriesReadButton";
|
||||
import { MarkBookReadButton } from "../../../../components/MarkBookReadButton";
|
||||
import { EditSeriesForm } from "../../../../components/EditSeriesForm";
|
||||
import { MetadataSearchModal } from "../../../../components/MetadataSearchModal";
|
||||
import nextDynamic from "next/dynamic";
|
||||
import { OffsetPagination } from "../../../../components/ui";
|
||||
import { SafeHtml } from "../../../../components/SafeHtml";
|
||||
import Image from "next/image";
|
||||
import Link from "next/link";
|
||||
|
||||
const EditSeriesForm = nextDynamic(
|
||||
() => import("../../../../components/EditSeriesForm").then(m => m.EditSeriesForm)
|
||||
);
|
||||
const MetadataSearchModal = nextDynamic(
|
||||
() => import("../../../../components/MetadataSearchModal").then(m => m.MetadataSearchModal)
|
||||
);
|
||||
const ProwlarrSearchModal = nextDynamic(
|
||||
() => import("../../../../components/ProwlarrSearchModal").then(m => m.ProwlarrSearchModal)
|
||||
);
|
||||
import { notFound } from "next/navigation";
|
||||
import { getServerTranslations } from "../../../../../lib/i18n/server";
|
||||
|
||||
@@ -93,7 +102,7 @@ export default async function SeriesDetailPage({
|
||||
alt={t("books.coverOf", { name: displayName })}
|
||||
fill
|
||||
className="object-cover"
|
||||
unoptimized
|
||||
sizes="160px"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
@@ -138,10 +147,10 @@ export default async function SeriesDetailPage({
|
||||
</span>
|
||||
<span className="w-px h-4 bg-border" />
|
||||
<span className="text-muted-foreground">
|
||||
{t("series.readCount", { read: String(booksReadCount), total: String(booksPage.total) })}
|
||||
{t("series.readCount", { read: String(booksReadCount), total: String(booksPage.total), plural: booksPage.total !== 1 ? "s" : "" })}
|
||||
</span>
|
||||
|
||||
{/* Progress bar */}
|
||||
{/* Reading progress bar */}
|
||||
<div className="flex items-center gap-2 flex-1 min-w-[120px] max-w-[200px]">
|
||||
<div className="flex-1 h-2 bg-muted rounded-full overflow-hidden">
|
||||
<div
|
||||
@@ -150,6 +159,22 @@ export default async function SeriesDetailPage({
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Collection progress bar (owned / expected) */}
|
||||
{missingData && missingData.total_external > 0 && (
|
||||
<>
|
||||
<span className="w-px h-4 bg-border" />
|
||||
<span className="text-muted-foreground">
|
||||
{booksPage.total}/{missingData.total_external} — {t("series.missingCount", { count: missingData.missing_count, plural: missingData.missing_count !== 1 ? "s" : "" })}
|
||||
</span>
|
||||
<div className="w-[150px] h-2 bg-muted rounded-full overflow-hidden">
|
||||
<div
|
||||
className="h-full bg-amber-500 rounded-full transition-all"
|
||||
style={{ width: `${Math.round((booksPage.total / missingData.total_external) * 100)}%` }}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="flex flex-wrap items-center gap-3">
|
||||
@@ -171,6 +196,10 @@ export default async function SeriesDetailPage({
|
||||
currentStatus={seriesMeta?.status ?? null}
|
||||
currentLockedFields={seriesMeta?.locked_fields ?? {}}
|
||||
/>
|
||||
<ProwlarrSearchModal
|
||||
seriesName={seriesName}
|
||||
missingBooks={missingData?.missing_books ?? null}
|
||||
/>
|
||||
<MetadataSearchModal
|
||||
libraryId={id}
|
||||
seriesName={seriesName}
|
||||
|
||||
@@ -86,7 +86,7 @@ export default async function LibrarySeriesPage({
|
||||
alt={t("books.coverOf", { name: s.name })}
|
||||
fill
|
||||
className="object-cover"
|
||||
unoptimized
|
||||
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 20vw"
|
||||
/>
|
||||
</div>
|
||||
<div className="p-3">
|
||||
@@ -95,7 +95,7 @@ export default async function LibrarySeriesPage({
|
||||
</h3>
|
||||
<div className="flex items-center justify-between mt-1">
|
||||
<p className="text-xs text-muted-foreground">
|
||||
{t("series.readCount", { read: String(s.books_read_count), total: String(s.book_count) })}
|
||||
{t("series.readCount", { read: String(s.books_read_count), total: String(s.book_count), plural: s.book_count !== 1 ? "s" : "" })}
|
||||
</p>
|
||||
<MarkSeriesReadButton
|
||||
seriesName={s.name}
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import { revalidatePath } from "next/cache";
|
||||
import Image from "next/image";
|
||||
import Link from "next/link";
|
||||
import { listFolders, createLibrary, deleteLibrary, fetchLibraries, fetchSeries, scanLibrary, startMetadataBatch, LibraryDto, FolderItem } from "../../lib/api";
|
||||
import { listFolders, createLibrary, deleteLibrary, fetchLibraries, getBookCoverUrl, LibraryDto, FolderItem } from "../../lib/api";
|
||||
import type { TranslationKey } from "../../lib/i18n/fr";
|
||||
import { getServerTranslations } from "../../lib/i18n/server";
|
||||
import { LibraryActions } from "../components/LibraryActions";
|
||||
import { LibraryForm } from "../components/LibraryForm";
|
||||
import { ProviderIcon } from "../components/ProviderIcon";
|
||||
import {
|
||||
Card, CardHeader, CardTitle, CardDescription, CardContent,
|
||||
Button, Badge
|
||||
@@ -31,18 +34,12 @@ export default async function LibrariesPage() {
|
||||
listFolders().catch(() => [] as FolderItem[])
|
||||
]);
|
||||
|
||||
const seriesCounts = await Promise.all(
|
||||
libraries.map(async (lib) => {
|
||||
try {
|
||||
const seriesPage = await fetchSeries(lib.id);
|
||||
return { id: lib.id, count: seriesPage.items.length };
|
||||
} catch {
|
||||
return { id: lib.id, count: 0 };
|
||||
}
|
||||
})
|
||||
const thumbnailMap = new Map(
|
||||
libraries.map(lib => [
|
||||
lib.id,
|
||||
(lib.thumbnail_book_ids || []).map(bookId => getBookCoverUrl(bookId)),
|
||||
])
|
||||
);
|
||||
|
||||
const seriesCountMap = new Map(seriesCounts.map(s => [s.id, s.count]));
|
||||
|
||||
async function addLibrary(formData: FormData) {
|
||||
"use server";
|
||||
@@ -61,30 +58,6 @@ export default async function LibrariesPage() {
|
||||
revalidatePath("/libraries");
|
||||
}
|
||||
|
||||
async function scanLibraryAction(formData: FormData) {
|
||||
"use server";
|
||||
const id = formData.get("id") as string;
|
||||
await scanLibrary(id);
|
||||
revalidatePath("/libraries");
|
||||
revalidatePath("/jobs");
|
||||
}
|
||||
|
||||
async function scanLibraryFullAction(formData: FormData) {
|
||||
"use server";
|
||||
const id = formData.get("id") as string;
|
||||
await scanLibrary(id, true);
|
||||
revalidatePath("/libraries");
|
||||
revalidatePath("/jobs");
|
||||
}
|
||||
|
||||
async function batchMetadataAction(formData: FormData) {
|
||||
"use server";
|
||||
const id = formData.get("id") as string;
|
||||
await startMetadataBatch(id);
|
||||
revalidatePath("/libraries");
|
||||
revalidatePath("/jobs");
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="mb-6">
|
||||
@@ -95,7 +68,7 @@ export default async function LibrariesPage() {
|
||||
{t("libraries.title")}
|
||||
</h1>
|
||||
</div>
|
||||
|
||||
|
||||
{/* Add Library Form */}
|
||||
<Card className="mb-6">
|
||||
<CardHeader>
|
||||
@@ -110,101 +83,140 @@ export default async function LibrariesPage() {
|
||||
{/* Libraries Grid */}
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
|
||||
{libraries.map((lib) => {
|
||||
const seriesCount = seriesCountMap.get(lib.id) || 0;
|
||||
const thumbnails = thumbnailMap.get(lib.id) || [];
|
||||
return (
|
||||
<Card key={lib.id} className="flex flex-col">
|
||||
<Card key={lib.id} className="flex flex-col overflow-hidden">
|
||||
{/* Thumbnail fan */}
|
||||
{thumbnails.length > 0 ? (
|
||||
<Link href={`/libraries/${lib.id}/series`} className="block relative h-48 overflow-hidden bg-muted/10">
|
||||
<Image
|
||||
src={thumbnails[0]}
|
||||
alt=""
|
||||
fill
|
||||
className="object-cover blur-xl scale-110 opacity-40"
|
||||
sizes="(max-width: 768px) 100vw, 33vw"
|
||||
loading="lazy"
|
||||
/>
|
||||
<div className="absolute inset-0 flex items-end justify-center">
|
||||
{thumbnails.map((url, i) => {
|
||||
const count = thumbnails.length;
|
||||
const mid = (count - 1) / 2;
|
||||
const angle = (i - mid) * 12;
|
||||
const radius = 220;
|
||||
const rad = ((angle - 90) * Math.PI) / 180;
|
||||
const cx = Math.cos(rad) * radius;
|
||||
const cy = Math.sin(rad) * radius;
|
||||
return (
|
||||
<Image
|
||||
key={i}
|
||||
src={url}
|
||||
alt=""
|
||||
width={96}
|
||||
height={144}
|
||||
className="absolute object-cover shadow-lg"
|
||||
style={{
|
||||
transform: `translate(${cx}px, ${cy}px) rotate(${angle}deg)`,
|
||||
transformOrigin: 'bottom center',
|
||||
zIndex: count - Math.abs(Math.round(i - mid)),
|
||||
bottom: '-185px',
|
||||
}}
|
||||
sizes="96px"
|
||||
loading="lazy"
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</Link>
|
||||
) : (
|
||||
<div className="h-8 bg-muted/10" />
|
||||
)}
|
||||
|
||||
<CardHeader className="pb-2">
|
||||
<div className="flex items-start justify-between">
|
||||
<div>
|
||||
<CardTitle className="text-lg">{lib.name}</CardTitle>
|
||||
{!lib.enabled && <Badge variant="muted" className="mt-1">{t("libraries.disabled")}</Badge>}
|
||||
</div>
|
||||
<LibraryActions
|
||||
libraryId={lib.id}
|
||||
monitorEnabled={lib.monitor_enabled}
|
||||
scanMode={lib.scan_mode}
|
||||
watcherEnabled={lib.watcher_enabled}
|
||||
metadataProvider={lib.metadata_provider}
|
||||
fallbackMetadataProvider={lib.fallback_metadata_provider}
|
||||
/>
|
||||
<div className="flex items-center gap-1">
|
||||
<LibraryActions
|
||||
libraryId={lib.id}
|
||||
monitorEnabled={lib.monitor_enabled}
|
||||
scanMode={lib.scan_mode}
|
||||
watcherEnabled={lib.watcher_enabled}
|
||||
metadataProvider={lib.metadata_provider}
|
||||
fallbackMetadataProvider={lib.fallback_metadata_provider}
|
||||
metadataRefreshMode={lib.metadata_refresh_mode}
|
||||
/>
|
||||
<form>
|
||||
<input type="hidden" name="id" value={lib.id} />
|
||||
<Button type="submit" variant="ghost" size="sm" formAction={removeLibrary} className="text-muted-foreground hover:text-destructive">
|
||||
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
|
||||
</svg>
|
||||
</Button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
<code className="text-xs font-mono text-muted-foreground break-all">{lib.root_path}</code>
|
||||
</CardHeader>
|
||||
<CardContent className="flex-1 pt-0">
|
||||
{/* Path */}
|
||||
<code className="text-xs font-mono text-muted-foreground mb-4 break-all block">{lib.root_path}</code>
|
||||
|
||||
{/* Stats */}
|
||||
<div className="grid grid-cols-2 gap-3 mb-4">
|
||||
<Link
|
||||
href={`/libraries/${lib.id}/books`}
|
||||
className="text-center p-3 bg-muted/50 rounded-lg hover:bg-accent transition-colors duration-200"
|
||||
<div className="grid grid-cols-2 gap-3 mb-3">
|
||||
<Link
|
||||
href={`/libraries/${lib.id}/books`}
|
||||
className="text-center p-2.5 bg-muted/50 rounded-lg hover:bg-accent transition-colors duration-200"
|
||||
>
|
||||
<span className="block text-2xl font-bold text-primary">{lib.book_count}</span>
|
||||
<span className="text-xs text-muted-foreground">{t("libraries.books")}</span>
|
||||
</Link>
|
||||
<Link
|
||||
href={`/libraries/${lib.id}/series`}
|
||||
className="text-center p-3 bg-muted/50 rounded-lg hover:bg-accent transition-colors duration-200"
|
||||
<Link
|
||||
href={`/libraries/${lib.id}/series`}
|
||||
className="text-center p-2.5 bg-muted/50 rounded-lg hover:bg-accent transition-colors duration-200"
|
||||
>
|
||||
<span className="block text-2xl font-bold text-foreground">{seriesCount}</span>
|
||||
<span className="block text-2xl font-bold text-foreground">{lib.series_count}</span>
|
||||
<span className="text-xs text-muted-foreground">{t("libraries.series")}</span>
|
||||
</Link>
|
||||
</div>
|
||||
|
||||
{/* Status */}
|
||||
<div className="flex items-center gap-3 mb-4 text-sm">
|
||||
<span className={`flex items-center gap-1 ${lib.monitor_enabled ? 'text-success' : 'text-muted-foreground'}`}>
|
||||
{lib.monitor_enabled ? '●' : '○'} {lib.monitor_enabled ? t("libraries.auto") : t("libraries.manual")}
|
||||
{/* Configuration tags */}
|
||||
<div className="flex flex-wrap gap-1.5">
|
||||
<span className={`inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium ${
|
||||
lib.monitor_enabled
|
||||
? 'bg-success/10 text-success'
|
||||
: 'bg-muted/50 text-muted-foreground'
|
||||
}`}>
|
||||
<span className="text-[9px]">{lib.monitor_enabled ? '●' : '○'}</span>
|
||||
{t("libraries.scanLabel", { mode: t(`monitoring.${lib.scan_mode}` as TranslationKey) })}
|
||||
</span>
|
||||
{lib.watcher_enabled && (
|
||||
<span className="text-warning" title="Surveillance de fichiers active">⚡</span>
|
||||
|
||||
<span className={`inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium ${
|
||||
lib.watcher_enabled
|
||||
? 'bg-warning/10 text-warning'
|
||||
: 'bg-muted/50 text-muted-foreground'
|
||||
}`}>
|
||||
<span>{lib.watcher_enabled ? '⚡' : '○'}</span>
|
||||
<span>{t("libraries.watcherLabel")}</span>
|
||||
</span>
|
||||
|
||||
{lib.metadata_provider && lib.metadata_provider !== "none" && (
|
||||
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-primary/10 text-primary">
|
||||
<ProviderIcon provider={lib.metadata_provider} size={11} />
|
||||
{lib.metadata_provider.replace('_', ' ')}
|
||||
</span>
|
||||
)}
|
||||
|
||||
{lib.metadata_refresh_mode !== "manual" && (
|
||||
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-muted/50 text-muted-foreground">
|
||||
{t("libraries.metaRefreshLabel", { mode: t(`monitoring.${lib.metadata_refresh_mode}` as TranslationKey) })}
|
||||
</span>
|
||||
)}
|
||||
|
||||
{lib.monitor_enabled && lib.next_scan_at && (
|
||||
<span className="text-xs text-muted-foreground ml-auto">
|
||||
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-muted/50 text-muted-foreground">
|
||||
{t("libraries.nextScan", { time: formatNextScan(lib.next_scan_at, t("libraries.imminent")) })}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Actions */}
|
||||
<div className="flex items-center gap-2">
|
||||
<form className="flex-1">
|
||||
<input type="hidden" name="id" value={lib.id} />
|
||||
<Button type="submit" variant="default" size="sm" className="w-full" formAction={scanLibraryAction}>
|
||||
<svg className="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||
</svg>
|
||||
{t("libraries.index")}
|
||||
</Button>
|
||||
</form>
|
||||
<form className="flex-1">
|
||||
<input type="hidden" name="id" value={lib.id} />
|
||||
<Button type="submit" variant="secondary" size="sm" className="w-full" formAction={scanLibraryFullAction}>
|
||||
<svg className="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||
</svg>
|
||||
{t("libraries.fullIndex")}
|
||||
</Button>
|
||||
</form>
|
||||
{lib.metadata_provider !== "none" && (
|
||||
<form>
|
||||
<input type="hidden" name="id" value={lib.id} />
|
||||
<Button type="submit" variant="secondary" size="sm" formAction={batchMetadataAction} title={t("libraries.batchMetadata")}>
|
||||
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
|
||||
</svg>
|
||||
</Button>
|
||||
</form>
|
||||
)}
|
||||
<form>
|
||||
<input type="hidden" name="id" value={lib.id} />
|
||||
<Button type="submit" variant="destructive" size="sm" formAction={removeLibrary}>
|
||||
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
|
||||
</svg>
|
||||
</Button>
|
||||
</form>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
|
||||
@@ -137,7 +137,7 @@ export default async function DashboardPage() {
|
||||
);
|
||||
}
|
||||
|
||||
const { overview, reading_status, by_format, by_language, by_library, top_series, additions_over_time } = stats;
|
||||
const { overview, reading_status, by_format, by_language, by_library, top_series, additions_over_time, metadata } = stats;
|
||||
|
||||
const readingColors = ["hsl(220 13% 70%)", "hsl(45 93% 47%)", "hsl(142 60% 45%)"];
|
||||
const formatColors = [
|
||||
@@ -231,6 +231,69 @@ export default async function DashboardPage() {
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
{/* Metadata row */}
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
|
||||
{/* Series metadata coverage donut */}
|
||||
<Card hover={false}>
|
||||
<CardHeader>
|
||||
<CardTitle className="text-base">{t("dashboard.metadataCoverage")}</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<DonutChart
|
||||
locale={locale}
|
||||
noDataLabel={noDataLabel}
|
||||
data={[
|
||||
{ label: t("dashboard.seriesLinked"), value: metadata.series_linked, color: "hsl(142 60% 45%)" },
|
||||
{ label: t("dashboard.seriesUnlinked"), value: metadata.series_unlinked, color: "hsl(220 13% 70%)" },
|
||||
]}
|
||||
/>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
{/* By provider donut */}
|
||||
<Card hover={false}>
|
||||
<CardHeader>
|
||||
<CardTitle className="text-base">{t("dashboard.byProvider")}</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<DonutChart
|
||||
locale={locale}
|
||||
noDataLabel={noDataLabel}
|
||||
data={metadata.by_provider.map((p, i) => ({
|
||||
label: p.provider.replace(/_/g, " ").replace(/\b\w/g, (c) => c.toUpperCase()),
|
||||
value: p.count,
|
||||
color: formatColors[i % formatColors.length],
|
||||
}))}
|
||||
/>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
{/* Book metadata quality */}
|
||||
<Card hover={false}>
|
||||
<CardHeader>
|
||||
<CardTitle className="text-base">{t("dashboard.bookMetadata")}</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="space-y-4">
|
||||
<HorizontalBar
|
||||
label={t("dashboard.withSummary")}
|
||||
value={metadata.books_with_summary}
|
||||
max={overview.total_books}
|
||||
subLabel={overview.total_books > 0 ? `${Math.round((metadata.books_with_summary / overview.total_books) * 100)}%` : "0%"}
|
||||
color="hsl(198 78% 37%)"
|
||||
/>
|
||||
<HorizontalBar
|
||||
label={t("dashboard.withIsbn")}
|
||||
value={metadata.books_with_isbn}
|
||||
max={overview.total_books}
|
||||
subLabel={overview.total_books > 0 ? `${Math.round((metadata.books_with_isbn / overview.total_books) * 100)}%` : "0%"}
|
||||
color="hsl(280 60% 50%)"
|
||||
/>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
{/* Second row */}
|
||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||
{/* Monthly additions bar chart */}
|
||||
|
||||
@@ -99,13 +99,13 @@ export default async function SeriesPage({
|
||||
<LiveSearchForm
|
||||
basePath="/series"
|
||||
fields={[
|
||||
{ name: "q", type: "text", label: t("common.search"), placeholder: t("series.searchPlaceholder"), className: "flex-1 w-full" },
|
||||
{ name: "library", type: "select", label: t("books.library"), options: libraryOptions, className: "w-full sm:w-44" },
|
||||
{ name: "status", type: "select", label: t("series.reading"), options: statusOptions, className: "w-full sm:w-32" },
|
||||
{ name: "series_status", type: "select", label: t("editSeries.status"), options: seriesStatusOptions, className: "w-full sm:w-36" },
|
||||
{ name: "has_missing", type: "select", label: t("series.missing"), options: missingOptions, className: "w-full sm:w-36" },
|
||||
{ name: "metadata_provider", type: "select", label: t("series.metadata"), options: metadataOptions, className: "w-full sm:w-36" },
|
||||
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions, className: "w-full sm:w-32" },
|
||||
{ name: "q", type: "text", label: t("common.search"), placeholder: t("series.searchPlaceholder") },
|
||||
{ name: "library", type: "select", label: t("books.library"), options: libraryOptions },
|
||||
{ name: "status", type: "select", label: t("series.reading"), options: statusOptions },
|
||||
{ name: "series_status", type: "select", label: t("editSeries.status"), options: seriesStatusOptions },
|
||||
{ name: "has_missing", type: "select", label: t("series.missing"), options: missingOptions },
|
||||
{ name: "metadata_provider", type: "select", label: t("series.metadata"), options: metadataOptions },
|
||||
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions },
|
||||
]}
|
||||
/>
|
||||
</CardContent>
|
||||
@@ -138,7 +138,7 @@ export default async function SeriesPage({
|
||||
alt={t("books.coverOf", { name: s.name })}
|
||||
fill
|
||||
className="object-cover"
|
||||
unoptimized
|
||||
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
|
||||
/>
|
||||
</div>
|
||||
<div className="p-3">
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import { useState, useEffect, useCallback, useMemo } from "react";
|
||||
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, FormSelect, FormRow, Icon } from "../components/ui";
|
||||
import { ProviderIcon } from "../components/ProviderIcon";
|
||||
import { Settings, CacheStats, ClearCacheResponse, ThumbnailStats, KomgaSyncResponse, KomgaSyncReportSummary } from "../../lib/api";
|
||||
import { Settings, CacheStats, ClearCacheResponse, ThumbnailStats, KomgaSyncResponse, KomgaSyncReportSummary, StatusMappingDto } from "../../lib/api";
|
||||
import { useTranslation } from "../../lib/i18n/context";
|
||||
import type { Locale } from "../../lib/i18n/types";
|
||||
|
||||
@@ -150,11 +150,12 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi
|
||||
}
|
||||
}
|
||||
|
||||
const [activeTab, setActiveTab] = useState<"general" | "integrations">("general");
|
||||
const [activeTab, setActiveTab] = useState<"general" | "integrations" | "notifications">("general");
|
||||
|
||||
const tabs = [
|
||||
{ id: "general" as const, label: t("settings.general"), icon: "settings" as const },
|
||||
{ id: "integrations" as const, label: t("settings.integrations"), icon: "refresh" as const },
|
||||
{ id: "notifications" as const, label: t("settings.notifications"), icon: "bell" as const },
|
||||
];
|
||||
|
||||
return (
|
||||
@@ -577,6 +578,15 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi
|
||||
{/* Metadata Providers */}
|
||||
<MetadataProvidersCard handleUpdateSetting={handleUpdateSetting} />
|
||||
|
||||
{/* Status Mappings */}
|
||||
<StatusMappingsCard />
|
||||
|
||||
{/* Prowlarr */}
|
||||
<ProwlarrCard handleUpdateSetting={handleUpdateSetting} />
|
||||
|
||||
{/* qBittorrent */}
|
||||
<QBittorrentCard handleUpdateSetting={handleUpdateSetting} />
|
||||
|
||||
{/* Komga Sync */}
|
||||
<Card className="mb-6">
|
||||
<CardHeader>
|
||||
@@ -725,7 +735,7 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi
|
||||
>
|
||||
<div className="flex items-center justify-between">
|
||||
<span className="text-sm font-medium text-foreground">
|
||||
{new Date(r.created_at).toLocaleString()}
|
||||
{new Date(r.created_at).toLocaleString(locale)}
|
||||
</span>
|
||||
<span className="text-xs text-muted-foreground truncate ml-2" title={r.komga_url}>
|
||||
{r.komga_url}
|
||||
@@ -817,6 +827,11 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi
|
||||
</CardContent>
|
||||
</Card>
|
||||
</>)}
|
||||
|
||||
{activeTab === "notifications" && (<>
|
||||
{/* Telegram Notifications */}
|
||||
<TelegramCard handleUpdateSetting={handleUpdateSetting} />
|
||||
</>)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -988,3 +1003,737 @@ function MetadataProvidersCard({ handleUpdateSetting }: { handleUpdateSetting: (
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Status Mappings sub-component
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function StatusMappingsCard() {
|
||||
const { t } = useTranslation();
|
||||
const [mappings, setMappings] = useState<StatusMappingDto[]>([]);
|
||||
const [targetStatuses, setTargetStatuses] = useState<string[]>([]);
|
||||
const [providerStatuses, setProviderStatuses] = useState<string[]>([]);
|
||||
const [newTargetName, setNewTargetName] = useState("");
|
||||
const [loading, setLoading] = useState(true);
|
||||
|
||||
const loadData = useCallback(async () => {
|
||||
try {
|
||||
const [mRes, sRes, pRes] = await Promise.all([
|
||||
fetch("/api/settings/status-mappings").then((r) => r.ok ? r.json() : []),
|
||||
fetch("/api/series/statuses").then((r) => r.ok ? r.json() : []),
|
||||
fetch("/api/series/provider-statuses").then((r) => r.ok ? r.json() : []),
|
||||
]);
|
||||
setMappings(mRes);
|
||||
setTargetStatuses(sRes);
|
||||
setProviderStatuses(pRes);
|
||||
} catch {
|
||||
// ignore
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => { loadData(); }, [loadData]);
|
||||
|
||||
// Group mappings by target status (only those with a non-null mapped_status)
|
||||
const grouped = useMemo(() => {
|
||||
const map = new Map<string, StatusMappingDto[]>();
|
||||
for (const m of mappings) {
|
||||
if (m.mapped_status) {
|
||||
const list = map.get(m.mapped_status) || [];
|
||||
list.push(m);
|
||||
map.set(m.mapped_status, list);
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}, [mappings]);
|
||||
|
||||
// Unmapped = mappings with null mapped_status + provider statuses not in status_mappings at all
|
||||
const knownProviderStatuses = useMemo(
|
||||
() => new Set(mappings.map((m) => m.provider_status)),
|
||||
[mappings],
|
||||
);
|
||||
const unmappedMappings = useMemo(
|
||||
() => mappings.filter((m) => !m.mapped_status),
|
||||
[mappings],
|
||||
);
|
||||
const newProviderStatuses = useMemo(
|
||||
() => providerStatuses.filter((ps) => !knownProviderStatuses.has(ps)),
|
||||
[providerStatuses, knownProviderStatuses],
|
||||
);
|
||||
|
||||
// All possible targets = existing statuses from DB + custom ones added locally
|
||||
const [customTargets, setCustomTargets] = useState<string[]>([]);
|
||||
const allTargets = useMemo(() => {
|
||||
const set = new Set([...targetStatuses, ...customTargets]);
|
||||
return [...set].sort();
|
||||
}, [targetStatuses, customTargets]);
|
||||
|
||||
async function handleAssign(providerStatus: string, targetStatus: string) {
|
||||
if (!providerStatus || !targetStatus) return;
|
||||
try {
|
||||
const res = await fetch("/api/settings/status-mappings", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ provider_status: providerStatus, mapped_status: targetStatus }),
|
||||
});
|
||||
if (res.ok) {
|
||||
const created: StatusMappingDto = await res.json();
|
||||
setMappings((prev) => [...prev.filter((m) => m.provider_status !== created.provider_status), created]);
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
async function handleUnmap(id: string) {
|
||||
try {
|
||||
const res = await fetch(`/api/settings/status-mappings/${id}`, { method: "DELETE" });
|
||||
if (res.ok) {
|
||||
const updated: StatusMappingDto = await res.json();
|
||||
setMappings((prev) => prev.map((m) => (m.id === id ? updated : m)));
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
function handleCreateTarget() {
|
||||
const name = newTargetName.trim().toLowerCase();
|
||||
if (!name || allTargets.includes(name)) return;
|
||||
setCustomTargets((prev) => [...prev, name]);
|
||||
setNewTargetName("");
|
||||
}
|
||||
|
||||
function statusLabel(status: string) {
|
||||
const key = `seriesStatus.${status}` as Parameters<typeof t>[0];
|
||||
const translated = t(key);
|
||||
return translated !== key ? translated : status;
|
||||
}
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<Card className="mb-6">
|
||||
<CardContent><p className="text-muted-foreground py-4">{t("common.loading")}</p></CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Card className="mb-6">
|
||||
<CardHeader>
|
||||
<CardTitle className="flex items-center gap-2">
|
||||
<Icon name="settings" size="md" />
|
||||
{t("settings.statusMappings")}
|
||||
</CardTitle>
|
||||
<CardDescription>{t("settings.statusMappingsDesc")}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="space-y-4">
|
||||
{/* Create new target status */}
|
||||
<div className="flex gap-2 items-center">
|
||||
<FormInput
|
||||
placeholder={t("settings.newTargetPlaceholder")}
|
||||
value={newTargetName}
|
||||
onChange={(e) => setNewTargetName(e.target.value)}
|
||||
onKeyDown={(e) => { if (e.key === "Enter") handleCreateTarget(); }}
|
||||
className="max-w-[250px]"
|
||||
/>
|
||||
<Button
|
||||
onClick={handleCreateTarget}
|
||||
disabled={!newTargetName.trim() || allTargets.includes(newTargetName.trim().toLowerCase())}
|
||||
>
|
||||
<Icon name="plus" size="sm" />
|
||||
{t("settings.createTargetStatus")}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Grouped by target status */}
|
||||
{allTargets.map((target) => {
|
||||
const items = grouped.get(target) || [];
|
||||
return (
|
||||
<div key={target} className="border border-border/50 rounded-lg p-3">
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<span className="text-sm font-medium text-foreground">
|
||||
{statusLabel(target)}
|
||||
</span>
|
||||
<span className="text-xs text-muted-foreground font-mono">({target})</span>
|
||||
</div>
|
||||
<div className="flex flex-wrap gap-2">
|
||||
{items.map((m) => (
|
||||
<span
|
||||
key={m.id}
|
||||
className="inline-flex items-center gap-1 px-2 py-1 rounded-md bg-muted/50 text-sm font-mono"
|
||||
>
|
||||
{m.provider_status}
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => handleUnmap(m.id)}
|
||||
className="ml-1 text-muted-foreground hover:text-destructive transition-colors"
|
||||
title={t("common.delete")}
|
||||
>
|
||||
<Icon name="x" size="sm" />
|
||||
</button>
|
||||
</span>
|
||||
))}
|
||||
{items.length === 0 && (
|
||||
<span className="text-xs text-muted-foreground italic">{t("settings.noMappings")}</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
|
||||
{/* Unmapped provider statuses (null mapped_status + brand new from providers) */}
|
||||
{(unmappedMappings.length > 0 || newProviderStatuses.length > 0) && (
|
||||
<div className="border-t border-border/50 pt-4">
|
||||
<h4 className="text-sm font-medium text-foreground mb-3">{t("settings.unmappedSection")}</h4>
|
||||
<div className="space-y-2">
|
||||
{unmappedMappings.map((m) => (
|
||||
<div key={m.id} className="flex items-center gap-2">
|
||||
<span className="text-sm font-mono bg-muted/50 px-2 py-1 rounded-md min-w-[120px]">{m.provider_status}</span>
|
||||
<Icon name="chevronRight" size="sm" />
|
||||
<FormSelect
|
||||
className="w-auto"
|
||||
value=""
|
||||
onChange={(e) => { if (e.target.value) handleAssign(m.provider_status, e.target.value); }}
|
||||
>
|
||||
<option value="">{t("settings.selectTargetStatus")}</option>
|
||||
{allTargets.map((s) => (
|
||||
<option key={s} value={s}>{statusLabel(s)}</option>
|
||||
))}
|
||||
</FormSelect>
|
||||
</div>
|
||||
))}
|
||||
{newProviderStatuses.map((ps) => (
|
||||
<div key={ps} className="flex items-center gap-2">
|
||||
<span className="text-sm font-mono bg-muted/50 px-2 py-1 rounded-md min-w-[120px]">{ps}</span>
|
||||
<Icon name="chevronRight" size="sm" />
|
||||
<FormSelect
|
||||
className="w-auto"
|
||||
value=""
|
||||
onChange={(e) => { if (e.target.value) handleAssign(ps, e.target.value); }}
|
||||
>
|
||||
<option value="">{t("settings.selectTargetStatus")}</option>
|
||||
{allTargets.map((s) => (
|
||||
<option key={s} value={s}>{statusLabel(s)}</option>
|
||||
))}
|
||||
</FormSelect>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Prowlarr sub-component
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function ProwlarrCard({ handleUpdateSetting }: { handleUpdateSetting: (key: string, value: unknown) => Promise<void> }) {
|
||||
const { t } = useTranslation();
|
||||
const [prowlarrUrl, setProwlarrUrl] = useState("");
|
||||
const [prowlarrApiKey, setProwlarrApiKey] = useState("");
|
||||
const [prowlarrCategories, setProwlarrCategories] = useState("7030, 7020");
|
||||
const [isTesting, setIsTesting] = useState(false);
|
||||
const [testResult, setTestResult] = useState<{ success: boolean; message: string } | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
fetch("/api/settings/prowlarr")
|
||||
.then((r) => (r.ok ? r.json() : null))
|
||||
.then((data) => {
|
||||
if (data) {
|
||||
if (data.url) setProwlarrUrl(data.url);
|
||||
if (data.api_key) setProwlarrApiKey(data.api_key);
|
||||
if (data.categories) setProwlarrCategories(data.categories.join(", "));
|
||||
}
|
||||
})
|
||||
.catch(() => {});
|
||||
}, []);
|
||||
|
||||
function saveProwlarr(url?: string, apiKey?: string, cats?: string) {
|
||||
const categories = (cats ?? prowlarrCategories)
|
||||
.split(",")
|
||||
.map((s) => parseInt(s.trim()))
|
||||
.filter((n) => !isNaN(n));
|
||||
handleUpdateSetting("prowlarr", {
|
||||
url: url ?? prowlarrUrl,
|
||||
api_key: apiKey ?? prowlarrApiKey,
|
||||
categories,
|
||||
});
|
||||
}
|
||||
|
||||
async function handleTestConnection() {
|
||||
setIsTesting(true);
|
||||
setTestResult(null);
|
||||
try {
|
||||
const resp = await fetch("/api/prowlarr/test");
|
||||
const data = await resp.json();
|
||||
if (data.error) {
|
||||
setTestResult({ success: false, message: data.error });
|
||||
} else {
|
||||
setTestResult(data);
|
||||
}
|
||||
} catch {
|
||||
setTestResult({ success: false, message: "Failed to connect" });
|
||||
} finally {
|
||||
setIsTesting(false);
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Card className="mb-6">
|
||||
<CardHeader>
|
||||
<CardTitle className="flex items-center gap-2">
|
||||
<Icon name="search" size="md" />
|
||||
{t("settings.prowlarr")}
|
||||
</CardTitle>
|
||||
<CardDescription>{t("settings.prowlarrDesc")}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="space-y-4">
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.prowlarrUrl")}</label>
|
||||
<FormInput
|
||||
type="url"
|
||||
placeholder={t("settings.prowlarrUrlPlaceholder")}
|
||||
value={prowlarrUrl}
|
||||
onChange={(e) => setProwlarrUrl(e.target.value)}
|
||||
onBlur={() => saveProwlarr()}
|
||||
/>
|
||||
</FormField>
|
||||
</FormRow>
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.prowlarrApiKey")}</label>
|
||||
<FormInput
|
||||
type="password"
|
||||
placeholder={t("settings.prowlarrApiKeyPlaceholder")}
|
||||
value={prowlarrApiKey}
|
||||
onChange={(e) => setProwlarrApiKey(e.target.value)}
|
||||
onBlur={() => saveProwlarr()}
|
||||
/>
|
||||
</FormField>
|
||||
</FormRow>
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.prowlarrCategories")}</label>
|
||||
<FormInput
|
||||
type="text"
|
||||
placeholder="7030, 7020"
|
||||
value={prowlarrCategories}
|
||||
onChange={(e) => setProwlarrCategories(e.target.value)}
|
||||
onBlur={() => saveProwlarr()}
|
||||
/>
|
||||
<p className="text-xs text-muted-foreground mt-1">{t("settings.prowlarrCategoriesHelp")}</p>
|
||||
</FormField>
|
||||
</FormRow>
|
||||
|
||||
<div className="flex items-center gap-3">
|
||||
<Button
|
||||
onClick={handleTestConnection}
|
||||
disabled={isTesting || !prowlarrUrl || !prowlarrApiKey}
|
||||
>
|
||||
{isTesting ? (
|
||||
<>
|
||||
<Icon name="spinner" size="sm" className="animate-spin -ml-1 mr-2" />
|
||||
{t("settings.testing")}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Icon name="refresh" size="sm" className="mr-2" />
|
||||
{t("settings.testConnection")}
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
{testResult && (
|
||||
<span className={`text-sm font-medium ${testResult.success ? "text-success" : "text-destructive"}`}>
|
||||
{testResult.message}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// qBittorrent sub-component
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function QBittorrentCard({ handleUpdateSetting }: { handleUpdateSetting: (key: string, value: unknown) => Promise<void> }) {
|
||||
const { t } = useTranslation();
|
||||
const [qbUrl, setQbUrl] = useState("");
|
||||
const [qbUsername, setQbUsername] = useState("");
|
||||
const [qbPassword, setQbPassword] = useState("");
|
||||
const [isTesting, setIsTesting] = useState(false);
|
||||
const [testResult, setTestResult] = useState<{ success: boolean; message: string } | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
fetch("/api/settings/qbittorrent")
|
||||
.then((r) => (r.ok ? r.json() : null))
|
||||
.then((data) => {
|
||||
if (data) {
|
||||
if (data.url) setQbUrl(data.url);
|
||||
if (data.username) setQbUsername(data.username);
|
||||
if (data.password) setQbPassword(data.password);
|
||||
}
|
||||
})
|
||||
.catch(() => {});
|
||||
}, []);
|
||||
|
||||
function saveQbittorrent() {
|
||||
handleUpdateSetting("qbittorrent", {
|
||||
url: qbUrl,
|
||||
username: qbUsername,
|
||||
password: qbPassword,
|
||||
});
|
||||
}
|
||||
|
||||
async function handleTestConnection() {
|
||||
setIsTesting(true);
|
||||
setTestResult(null);
|
||||
try {
|
||||
const resp = await fetch("/api/qbittorrent/test");
|
||||
const data = await resp.json();
|
||||
if (data.error) {
|
||||
setTestResult({ success: false, message: data.error });
|
||||
} else {
|
||||
setTestResult(data);
|
||||
}
|
||||
} catch {
|
||||
setTestResult({ success: false, message: "Failed to connect" });
|
||||
} finally {
|
||||
setIsTesting(false);
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Card className="mb-6">
|
||||
<CardHeader>
|
||||
<CardTitle className="flex items-center gap-2">
|
||||
<Icon name="settings" size="md" />
|
||||
{t("settings.qbittorrent")}
|
||||
</CardTitle>
|
||||
<CardDescription>{t("settings.qbittorrentDesc")}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="space-y-4">
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.qbittorrentUrl")}</label>
|
||||
<FormInput
|
||||
type="url"
|
||||
placeholder={t("settings.qbittorrentUrlPlaceholder")}
|
||||
value={qbUrl}
|
||||
onChange={(e) => setQbUrl(e.target.value)}
|
||||
onBlur={() => saveQbittorrent()}
|
||||
/>
|
||||
</FormField>
|
||||
</FormRow>
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.qbittorrentUsername")}</label>
|
||||
<FormInput
|
||||
type="text"
|
||||
value={qbUsername}
|
||||
onChange={(e) => setQbUsername(e.target.value)}
|
||||
onBlur={() => saveQbittorrent()}
|
||||
/>
|
||||
</FormField>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.qbittorrentPassword")}</label>
|
||||
<FormInput
|
||||
type="password"
|
||||
value={qbPassword}
|
||||
onChange={(e) => setQbPassword(e.target.value)}
|
||||
onBlur={() => saveQbittorrent()}
|
||||
/>
|
||||
</FormField>
|
||||
</FormRow>
|
||||
|
||||
<div className="flex items-center gap-3">
|
||||
<Button
|
||||
onClick={handleTestConnection}
|
||||
disabled={isTesting || !qbUrl || !qbUsername}
|
||||
>
|
||||
{isTesting ? (
|
||||
<>
|
||||
<Icon name="spinner" size="sm" className="animate-spin -ml-1 mr-2" />
|
||||
{t("settings.testing")}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Icon name="refresh" size="sm" className="mr-2" />
|
||||
{t("settings.testConnection")}
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
{testResult && (
|
||||
<span className={`text-sm font-medium ${testResult.success ? "text-success" : "text-destructive"}`}>
|
||||
{testResult.message}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Telegram Notifications sub-component
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const DEFAULT_EVENTS = {
|
||||
scan_completed: true,
|
||||
scan_failed: true,
|
||||
scan_cancelled: true,
|
||||
thumbnail_completed: true,
|
||||
thumbnail_failed: true,
|
||||
conversion_completed: true,
|
||||
conversion_failed: true,
|
||||
metadata_approved: true,
|
||||
metadata_batch_completed: true,
|
||||
metadata_batch_failed: true,
|
||||
metadata_refresh_completed: true,
|
||||
metadata_refresh_failed: true,
|
||||
};
|
||||
|
||||
function TelegramCard({ handleUpdateSetting }: { handleUpdateSetting: (key: string, value: unknown) => Promise<void> }) {
|
||||
const { t } = useTranslation();
|
||||
const [botToken, setBotToken] = useState("");
|
||||
const [chatId, setChatId] = useState("");
|
||||
const [enabled, setEnabled] = useState(false);
|
||||
const [events, setEvents] = useState(DEFAULT_EVENTS);
|
||||
const [isTesting, setIsTesting] = useState(false);
|
||||
const [testResult, setTestResult] = useState<{ success: boolean; message: string } | null>(null);
|
||||
const [showHelp, setShowHelp] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
fetch("/api/settings/telegram")
|
||||
.then((r) => (r.ok ? r.json() : null))
|
||||
.then((data) => {
|
||||
if (data) {
|
||||
if (data.bot_token) setBotToken(data.bot_token);
|
||||
if (data.chat_id) setChatId(data.chat_id);
|
||||
if (data.enabled !== undefined) setEnabled(data.enabled);
|
||||
if (data.events) setEvents({ ...DEFAULT_EVENTS, ...data.events });
|
||||
}
|
||||
})
|
||||
.catch(() => {});
|
||||
}, []);
|
||||
|
||||
function saveTelegram(token?: string, chat?: string, en?: boolean, ev?: typeof events) {
|
||||
handleUpdateSetting("telegram", {
|
||||
bot_token: token ?? botToken,
|
||||
chat_id: chat ?? chatId,
|
||||
enabled: en ?? enabled,
|
||||
events: ev ?? events,
|
||||
});
|
||||
}
|
||||
|
||||
async function handleTestConnection() {
|
||||
setIsTesting(true);
|
||||
setTestResult(null);
|
||||
try {
|
||||
const resp = await fetch("/api/telegram/test");
|
||||
const data = await resp.json();
|
||||
if (data.error) {
|
||||
setTestResult({ success: false, message: data.error });
|
||||
} else {
|
||||
setTestResult(data);
|
||||
}
|
||||
} catch {
|
||||
setTestResult({ success: false, message: "Failed to connect" });
|
||||
} finally {
|
||||
setIsTesting(false);
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Card className="mb-6">
|
||||
<CardHeader>
|
||||
<CardTitle className="flex items-center gap-2">
|
||||
<Icon name="bell" size="md" />
|
||||
{t("settings.telegram")}
|
||||
</CardTitle>
|
||||
<CardDescription>{t("settings.telegramDesc")}</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="space-y-4">
|
||||
{/* Setup guide */}
|
||||
<div>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setShowHelp(!showHelp)}
|
||||
className="text-sm text-primary hover:text-primary/80 flex items-center gap-1 transition-colors"
|
||||
>
|
||||
<Icon name={showHelp ? "chevronDown" : "chevronRight"} size="sm" />
|
||||
{t("settings.telegramHelp")}
|
||||
</button>
|
||||
{showHelp && (
|
||||
<div className="mt-3 p-4 rounded-lg bg-muted/30 space-y-3 text-sm text-foreground">
|
||||
<div>
|
||||
<p className="font-medium mb-1">1. Bot Token</p>
|
||||
<p className="text-muted-foreground" dangerouslySetInnerHTML={{ __html: t("settings.telegramHelpBot") }} />
|
||||
</div>
|
||||
<div>
|
||||
<p className="font-medium mb-1">2. Chat ID</p>
|
||||
<p className="text-muted-foreground" dangerouslySetInnerHTML={{ __html: t("settings.telegramHelpChat") }} />
|
||||
</div>
|
||||
<div>
|
||||
<p className="font-medium mb-1">3. Group chat</p>
|
||||
<p className="text-muted-foreground" dangerouslySetInnerHTML={{ __html: t("settings.telegramHelpGroup") }} />
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-3">
|
||||
<label className="relative inline-flex items-center cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={enabled}
|
||||
onChange={(e) => {
|
||||
setEnabled(e.target.checked);
|
||||
saveTelegram(undefined, undefined, e.target.checked);
|
||||
}}
|
||||
className="sr-only peer"
|
||||
/>
|
||||
<div className="w-11 h-6 bg-muted rounded-full peer peer-checked:after:translate-x-full peer-checked:after:border-white after:content-[''] after:absolute after:top-[2px] after:left-[2px] after:bg-white after:border-gray-300 after:border after:rounded-full after:h-5 after:w-5 after:transition-all peer-checked:bg-primary"></div>
|
||||
</label>
|
||||
<span className="text-sm font-medium text-foreground">{t("settings.telegramEnabled")}</span>
|
||||
</div>
|
||||
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.botToken")}</label>
|
||||
<FormInput
|
||||
type="password"
|
||||
placeholder={t("settings.botTokenPlaceholder")}
|
||||
value={botToken}
|
||||
onChange={(e) => setBotToken(e.target.value)}
|
||||
onBlur={() => saveTelegram()}
|
||||
/>
|
||||
</FormField>
|
||||
</FormRow>
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.chatId")}</label>
|
||||
<FormInput
|
||||
type="text"
|
||||
placeholder={t("settings.chatIdPlaceholder")}
|
||||
value={chatId}
|
||||
onChange={(e) => setChatId(e.target.value)}
|
||||
onBlur={() => saveTelegram()}
|
||||
/>
|
||||
</FormField>
|
||||
</FormRow>
|
||||
|
||||
{/* Event toggles grouped by category */}
|
||||
<div className="border-t border-border/50 pt-4">
|
||||
<h4 className="text-sm font-medium text-foreground mb-4">{t("settings.telegramEvents")}</h4>
|
||||
<div className="grid grid-cols-2 gap-x-6 gap-y-5">
|
||||
{([
|
||||
{
|
||||
category: t("settings.eventCategoryScan"),
|
||||
icon: "search" as const,
|
||||
items: [
|
||||
{ key: "scan_completed" as const, label: t("settings.eventCompleted") },
|
||||
{ key: "scan_failed" as const, label: t("settings.eventFailed") },
|
||||
{ key: "scan_cancelled" as const, label: t("settings.eventCancelled") },
|
||||
],
|
||||
},
|
||||
{
|
||||
category: t("settings.eventCategoryThumbnail"),
|
||||
icon: "image" as const,
|
||||
items: [
|
||||
{ key: "thumbnail_completed" as const, label: t("settings.eventCompleted") },
|
||||
{ key: "thumbnail_failed" as const, label: t("settings.eventFailed") },
|
||||
],
|
||||
},
|
||||
{
|
||||
category: t("settings.eventCategoryConversion"),
|
||||
icon: "refresh" as const,
|
||||
items: [
|
||||
{ key: "conversion_completed" as const, label: t("settings.eventCompleted") },
|
||||
{ key: "conversion_failed" as const, label: t("settings.eventFailed") },
|
||||
],
|
||||
},
|
||||
{
|
||||
category: t("settings.eventCategoryMetadata"),
|
||||
icon: "tag" as const,
|
||||
items: [
|
||||
{ key: "metadata_approved" as const, label: t("settings.eventLinked") },
|
||||
{ key: "metadata_batch_completed" as const, label: t("settings.eventBatchCompleted") },
|
||||
{ key: "metadata_batch_failed" as const, label: t("settings.eventBatchFailed") },
|
||||
{ key: "metadata_refresh_completed" as const, label: t("settings.eventRefreshCompleted") },
|
||||
{ key: "metadata_refresh_failed" as const, label: t("settings.eventRefreshFailed") },
|
||||
],
|
||||
},
|
||||
]).map(({ category, icon, items }) => (
|
||||
<div key={category}>
|
||||
<p className="text-xs font-medium text-muted-foreground uppercase tracking-wide mb-2 flex items-center gap-1.5">
|
||||
<Icon name={icon} size="sm" className="text-muted-foreground" />
|
||||
{category}
|
||||
</p>
|
||||
<div className="space-y-1">
|
||||
{items.map(({ key, label }) => (
|
||||
<label key={key} className="flex items-center justify-between py-1.5 cursor-pointer group">
|
||||
<span className="text-sm text-foreground group-hover:text-foreground/80">{label}</span>
|
||||
<div className="relative">
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={events[key]}
|
||||
onChange={(e) => {
|
||||
const updated = { ...events, [key]: e.target.checked };
|
||||
setEvents(updated);
|
||||
saveTelegram(undefined, undefined, undefined, updated);
|
||||
}}
|
||||
className="sr-only peer"
|
||||
/>
|
||||
<div className="w-9 h-5 bg-muted rounded-full peer peer-checked:after:translate-x-full peer-checked:after:border-white after:content-[''] after:absolute after:top-[2px] after:left-[2px] after:bg-white after:border-gray-300 after:border after:rounded-full after:h-4 after:w-4 after:transition-all peer-checked:bg-primary" />
|
||||
</div>
|
||||
</label>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-3">
|
||||
<Button
|
||||
onClick={handleTestConnection}
|
||||
disabled={isTesting || !botToken || !chatId || !enabled}
|
||||
>
|
||||
{isTesting ? (
|
||||
<>
|
||||
<Icon name="spinner" size="sm" className="animate-spin -ml-1 mr-2" />
|
||||
{t("settings.testing")}
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Icon name="refresh" size="sm" className="mr-2" />
|
||||
{t("settings.testConnection")}
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
{testResult && (
|
||||
<span className={`text-sm font-medium ${testResult.success ? "text-success" : "text-destructive"}`}>
|
||||
{testResult.message}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -10,6 +10,10 @@ export type LibraryDto = {
|
||||
watcher_enabled: boolean;
|
||||
metadata_provider: string | null;
|
||||
fallback_metadata_provider: string | null;
|
||||
metadata_refresh_mode: string;
|
||||
next_metadata_refresh_at: string | null;
|
||||
series_count: number;
|
||||
thumbnail_book_ids: string[];
|
||||
};
|
||||
|
||||
export type IndexJobDto = {
|
||||
@@ -137,7 +141,7 @@ export function config() {
|
||||
|
||||
export async function apiFetch<T>(
|
||||
path: string,
|
||||
init?: RequestInit,
|
||||
init?: RequestInit & { next?: { revalidate?: number; tags?: string[] } },
|
||||
): Promise<T> {
|
||||
const { baseUrl, token } = config();
|
||||
const headers = new Headers(init?.headers || {});
|
||||
@@ -146,10 +150,12 @@ export async function apiFetch<T>(
|
||||
headers.set("Content-Type", "application/json");
|
||||
}
|
||||
|
||||
const { next: nextOptions, ...restInit } = init ?? {};
|
||||
|
||||
const res = await fetch(`${baseUrl}${path}`, {
|
||||
...init,
|
||||
...restInit,
|
||||
headers,
|
||||
cache: "no-store",
|
||||
...(nextOptions ? { next: nextOptions } : { cache: "no-store" as const }),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
@@ -164,7 +170,7 @@ export async function apiFetch<T>(
|
||||
}
|
||||
|
||||
export async function fetchLibraries() {
|
||||
return apiFetch<LibraryDto[]>("/libraries");
|
||||
return apiFetch<LibraryDto[]>("/libraries", { next: { revalidate: 30 } });
|
||||
}
|
||||
|
||||
export async function createLibrary(name: string, rootPath: string) {
|
||||
@@ -192,11 +198,13 @@ export async function updateLibraryMonitoring(
|
||||
monitorEnabled: boolean,
|
||||
scanMode: string,
|
||||
watcherEnabled?: boolean,
|
||||
metadataRefreshMode?: string,
|
||||
) {
|
||||
const body: {
|
||||
monitor_enabled: boolean;
|
||||
scan_mode: string;
|
||||
watcher_enabled?: boolean;
|
||||
metadata_refresh_mode?: string;
|
||||
} = {
|
||||
monitor_enabled: monitorEnabled,
|
||||
scan_mode: scanMode,
|
||||
@@ -204,6 +212,9 @@ export async function updateLibraryMonitoring(
|
||||
if (watcherEnabled !== undefined) {
|
||||
body.watcher_enabled = watcherEnabled;
|
||||
}
|
||||
if (metadataRefreshMode !== undefined) {
|
||||
body.metadata_refresh_mode = metadataRefreshMode;
|
||||
}
|
||||
return apiFetch<LibraryDto>(`/libraries/${libraryId}/monitoring`, {
|
||||
method: "PATCH",
|
||||
body: JSON.stringify(body),
|
||||
@@ -214,10 +225,11 @@ export async function listJobs() {
|
||||
return apiFetch<IndexJobDto[]>("/index/status");
|
||||
}
|
||||
|
||||
export async function rebuildIndex(libraryId?: string, full?: boolean) {
|
||||
const body: { library_id?: string; full?: boolean } = {};
|
||||
export async function rebuildIndex(libraryId?: string, full?: boolean, rescan?: boolean) {
|
||||
const body: { library_id?: string; full?: boolean; rescan?: boolean } = {};
|
||||
if (libraryId) body.library_id = libraryId;
|
||||
if (full) body.full = true;
|
||||
if (rescan) body.rescan = true;
|
||||
return apiFetch<IndexJobDto>("/index/rebuild", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body),
|
||||
@@ -277,12 +289,18 @@ export async function fetchBooks(
|
||||
limit: number = 50,
|
||||
readingStatus?: string,
|
||||
sort?: string,
|
||||
author?: string,
|
||||
format?: string,
|
||||
metadataProvider?: string,
|
||||
): Promise<BooksPageDto> {
|
||||
const params = new URLSearchParams();
|
||||
if (libraryId) params.set("library_id", libraryId);
|
||||
if (series) params.set("series", series);
|
||||
if (readingStatus) params.set("reading_status", readingStatus);
|
||||
if (sort) params.set("sort", sort);
|
||||
if (author) params.set("author", author);
|
||||
if (format) params.set("format", format);
|
||||
if (metadataProvider) params.set("metadata_provider", metadataProvider);
|
||||
params.set("page", page.toString());
|
||||
params.set("limit", limit.toString());
|
||||
|
||||
@@ -324,6 +342,7 @@ export async function fetchAllSeries(
|
||||
seriesStatus?: string,
|
||||
hasMissing?: boolean,
|
||||
metadataProvider?: string,
|
||||
author?: string,
|
||||
): Promise<SeriesPageDto> {
|
||||
const params = new URLSearchParams();
|
||||
if (libraryId) params.set("library_id", libraryId);
|
||||
@@ -333,6 +352,7 @@ export async function fetchAllSeries(
|
||||
if (seriesStatus) params.set("series_status", seriesStatus);
|
||||
if (hasMissing) params.set("has_missing", "true");
|
||||
if (metadataProvider) params.set("metadata_provider", metadataProvider);
|
||||
if (author) params.set("author", author);
|
||||
params.set("page", page.toString());
|
||||
params.set("limit", limit.toString());
|
||||
|
||||
@@ -340,7 +360,7 @@ export async function fetchAllSeries(
|
||||
}
|
||||
|
||||
export async function fetchSeriesStatuses(): Promise<string[]> {
|
||||
return apiFetch<string[]>("/series/statuses");
|
||||
return apiFetch<string[]>("/series/statuses", { next: { revalidate: 300 } });
|
||||
}
|
||||
|
||||
export async function searchBooks(
|
||||
@@ -405,7 +425,7 @@ export type ThumbnailStats = {
|
||||
};
|
||||
|
||||
export async function getSettings() {
|
||||
return apiFetch<Settings>("/settings");
|
||||
return apiFetch<Settings>("/settings", { next: { revalidate: 60 } });
|
||||
}
|
||||
|
||||
export async function updateSetting(key: string, value: unknown) {
|
||||
@@ -416,7 +436,7 @@ export async function updateSetting(key: string, value: unknown) {
|
||||
}
|
||||
|
||||
export async function getCacheStats() {
|
||||
return apiFetch<CacheStats>("/settings/cache/stats");
|
||||
return apiFetch<CacheStats>("/settings/cache/stats", { next: { revalidate: 30 } });
|
||||
}
|
||||
|
||||
export async function clearCache() {
|
||||
@@ -426,7 +446,29 @@ export async function clearCache() {
|
||||
}
|
||||
|
||||
export async function getThumbnailStats() {
|
||||
return apiFetch<ThumbnailStats>("/settings/thumbnail/stats");
|
||||
return apiFetch<ThumbnailStats>("/settings/thumbnail/stats", { next: { revalidate: 30 } });
|
||||
}
|
||||
|
||||
// Status mappings
|
||||
export type StatusMappingDto = {
|
||||
id: string;
|
||||
provider_status: string;
|
||||
mapped_status: string | null;
|
||||
};
|
||||
|
||||
export async function fetchStatusMappings(): Promise<StatusMappingDto[]> {
|
||||
return apiFetch<StatusMappingDto[]>("/settings/status-mappings", { next: { revalidate: 60 } });
|
||||
}
|
||||
|
||||
export async function upsertStatusMapping(provider_status: string, mapped_status: string): Promise<StatusMappingDto> {
|
||||
return apiFetch<StatusMappingDto>("/settings/status-mappings", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ provider_status, mapped_status }),
|
||||
});
|
||||
}
|
||||
|
||||
export async function deleteStatusMapping(id: string): Promise<void> {
|
||||
await apiFetch<unknown>(`/settings/status-mappings/${id}`, { method: "DELETE" });
|
||||
}
|
||||
|
||||
export async function convertBook(bookId: string) {
|
||||
@@ -494,6 +536,20 @@ export type MonthlyAdditions = {
|
||||
books_added: number;
|
||||
};
|
||||
|
||||
export type ProviderCount = {
|
||||
provider: string;
|
||||
count: number;
|
||||
};
|
||||
|
||||
export type MetadataStats = {
|
||||
total_series: number;
|
||||
series_linked: number;
|
||||
series_unlinked: number;
|
||||
books_with_summary: number;
|
||||
books_with_isbn: number;
|
||||
by_provider: ProviderCount[];
|
||||
};
|
||||
|
||||
export type StatsResponse = {
|
||||
overview: StatsOverview;
|
||||
reading_status: ReadingStatusStats;
|
||||
@@ -502,10 +558,43 @@ export type StatsResponse = {
|
||||
by_library: LibraryStatsItem[];
|
||||
top_series: TopSeriesItem[];
|
||||
additions_over_time: MonthlyAdditions[];
|
||||
metadata: MetadataStats;
|
||||
};
|
||||
|
||||
export async function fetchStats() {
|
||||
return apiFetch<StatsResponse>("/stats");
|
||||
return apiFetch<StatsResponse>("/stats", { next: { revalidate: 30 } });
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Authors
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export type AuthorDto = {
|
||||
name: string;
|
||||
book_count: number;
|
||||
series_count: number;
|
||||
};
|
||||
|
||||
export type AuthorsPageDto = {
|
||||
items: AuthorDto[];
|
||||
total: number;
|
||||
page: number;
|
||||
limit: number;
|
||||
};
|
||||
|
||||
export async function fetchAuthors(
|
||||
q?: string,
|
||||
page: number = 1,
|
||||
limit: number = 20,
|
||||
sort?: string,
|
||||
): Promise<AuthorsPageDto> {
|
||||
const params = new URLSearchParams();
|
||||
if (q) params.set("q", q);
|
||||
if (sort) params.set("sort", sort);
|
||||
params.set("page", page.toString());
|
||||
params.set("limit", limit.toString());
|
||||
|
||||
return apiFetch<AuthorsPageDto>(`/authors?${params.toString()}`);
|
||||
}
|
||||
|
||||
export type UpdateBookRequest = {
|
||||
@@ -788,6 +877,49 @@ export async function startMetadataBatch(libraryId: string) {
|
||||
});
|
||||
}
|
||||
|
||||
export async function startMetadataRefresh(libraryId: string) {
|
||||
return apiFetch<{ id: string; status: string }>("/metadata/refresh", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ library_id: libraryId }),
|
||||
});
|
||||
}
|
||||
|
||||
export type RefreshFieldDiff = {
|
||||
field: string;
|
||||
old?: unknown;
|
||||
new?: unknown;
|
||||
};
|
||||
|
||||
export type RefreshBookDiff = {
|
||||
book_id: string;
|
||||
title: string;
|
||||
volume: number | null;
|
||||
changes: RefreshFieldDiff[];
|
||||
};
|
||||
|
||||
export type RefreshSeriesResult = {
|
||||
series_name: string;
|
||||
provider: string;
|
||||
status: string; // "updated" | "unchanged" | "error"
|
||||
series_changes: RefreshFieldDiff[];
|
||||
book_changes: RefreshBookDiff[];
|
||||
error?: string;
|
||||
};
|
||||
|
||||
export type MetadataRefreshReportDto = {
|
||||
job_id: string;
|
||||
status: string;
|
||||
total_links: number;
|
||||
refreshed: number;
|
||||
unchanged: number;
|
||||
errors: number;
|
||||
changes: RefreshSeriesResult[];
|
||||
};
|
||||
|
||||
export async function getMetadataRefreshReport(jobId: string) {
|
||||
return apiFetch<MetadataRefreshReportDto>(`/metadata/refresh/${jobId}/report`);
|
||||
}
|
||||
|
||||
export async function getMetadataBatchReport(jobId: string) {
|
||||
return apiFetch<MetadataBatchReportDto>(`/metadata/batch/${jobId}/report`);
|
||||
}
|
||||
@@ -796,3 +928,53 @@ export async function getMetadataBatchResults(jobId: string, status?: string) {
|
||||
const params = status ? `?status=${status}` : "";
|
||||
return apiFetch<MetadataBatchResultDto[]>(`/metadata/batch/${jobId}/results${params}`);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Prowlarr
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export type ProwlarrCategory = {
|
||||
id: number;
|
||||
name: string | null;
|
||||
};
|
||||
|
||||
export type ProwlarrRelease = {
|
||||
guid: string;
|
||||
title: string;
|
||||
size: number;
|
||||
downloadUrl: string | null;
|
||||
indexer: string | null;
|
||||
seeders: number | null;
|
||||
leechers: number | null;
|
||||
publishDate: string | null;
|
||||
protocol: string | null;
|
||||
infoUrl: string | null;
|
||||
categories: ProwlarrCategory[] | null;
|
||||
matchedMissingVolumes: number[] | null;
|
||||
};
|
||||
|
||||
export type ProwlarrSearchResponse = {
|
||||
results: ProwlarrRelease[];
|
||||
query: string;
|
||||
};
|
||||
|
||||
export type ProwlarrTestResponse = {
|
||||
success: boolean;
|
||||
message: string;
|
||||
indexer_count: number | null;
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// qBittorrent
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export type QBittorrentAddResponse = {
|
||||
success: boolean;
|
||||
message: string;
|
||||
};
|
||||
|
||||
export type QBittorrentTestResponse = {
|
||||
success: boolean;
|
||||
message: string;
|
||||
version: string | null;
|
||||
};
|
||||
|
||||
@@ -75,6 +75,13 @@ const en: Record<TranslationKey, string> = {
|
||||
"dashboard.noSeries": "No series yet",
|
||||
"dashboard.unknown": "Unknown",
|
||||
"dashboard.readCount": "{{read}}/{{total}} read",
|
||||
"dashboard.metadataCoverage": "Metadata coverage",
|
||||
"dashboard.seriesLinked": "Linked series",
|
||||
"dashboard.seriesUnlinked": "Unlinked series",
|
||||
"dashboard.byProvider": "By provider",
|
||||
"dashboard.bookMetadata": "Book metadata",
|
||||
"dashboard.withSummary": "With summary",
|
||||
"dashboard.withIsbn": "With ISBN",
|
||||
|
||||
// Books page
|
||||
"books.title": "Books",
|
||||
@@ -93,6 +100,8 @@ const en: Record<TranslationKey, string> = {
|
||||
"books.noResults": "No books found for \"{{query}}\"",
|
||||
"books.noBooks": "No books available",
|
||||
"books.coverOf": "Cover of {{name}}",
|
||||
"books.format": "Format",
|
||||
"books.allFormats": "All formats",
|
||||
|
||||
// Series page
|
||||
"series.title": "Series",
|
||||
@@ -106,6 +115,20 @@ const en: Record<TranslationKey, string> = {
|
||||
"series.missingCount": "{{count}} missing",
|
||||
"series.readCount": "{{read}}/{{total}} read",
|
||||
|
||||
// Authors page
|
||||
"nav.authors": "Authors",
|
||||
"authors.title": "Authors",
|
||||
"authors.searchPlaceholder": "Search by author name...",
|
||||
"authors.bookCount": "{{count}} book{{plural}}",
|
||||
"authors.seriesCount": "{{count}} serie{{plural}}",
|
||||
"authors.noResults": "No authors found matching your filters",
|
||||
"authors.noAuthors": "No authors available",
|
||||
"authors.matchingQuery": "matching",
|
||||
"authors.sortName": "Name",
|
||||
"authors.sortBooks": "Book count",
|
||||
"authors.booksBy": "Books by {{name}}",
|
||||
"authors.seriesBy": "Series by {{name}}",
|
||||
|
||||
// Libraries page
|
||||
"libraries.title": "Libraries",
|
||||
"libraries.addLibrary": "Add a library",
|
||||
@@ -117,6 +140,11 @@ const en: Record<TranslationKey, string> = {
|
||||
"libraries.manual": "Manual",
|
||||
"libraries.nextScan": "Next: {{time}}",
|
||||
"libraries.imminent": "Imminent",
|
||||
"libraries.nextMetadataRefresh": "Next metadata refresh: {{time}}",
|
||||
"libraries.nextMetadataRefreshShort": "Meta.: {{time}}",
|
||||
"libraries.scanLabel": "Scan: {{mode}}",
|
||||
"libraries.watcherLabel": "File watch",
|
||||
"libraries.metaRefreshLabel": "Meta refresh: {{mode}}",
|
||||
"libraries.index": "Index",
|
||||
"libraries.fullIndex": "Full",
|
||||
"libraries.batchMetadata": "Batch metadata",
|
||||
@@ -134,13 +162,22 @@ const en: Record<TranslationKey, string> = {
|
||||
"librarySeries.noBooksInSeries": "No books in this series",
|
||||
|
||||
// Library actions
|
||||
"libraryActions.autoScan": "Auto scan",
|
||||
"libraryActions.fileWatch": "File watch ⚡",
|
||||
"libraryActions.schedule": "📅 Schedule",
|
||||
"libraryActions.settingsTitle": "Library settings",
|
||||
"libraryActions.sectionIndexation": "Indexation",
|
||||
"libraryActions.sectionMetadata": "Metadata",
|
||||
"libraryActions.autoScan": "Scheduled scan",
|
||||
"libraryActions.autoScanDesc": "Automatically scan for new and modified files",
|
||||
"libraryActions.fileWatch": "Real-time file watch",
|
||||
"libraryActions.fileWatchDesc": "Detect file changes instantly via filesystem events",
|
||||
"libraryActions.schedule": "Frequency",
|
||||
"libraryActions.provider": "Provider",
|
||||
"libraryActions.fallback": "Fallback",
|
||||
"libraryActions.providerDesc": "Source used to fetch series and volume metadata",
|
||||
"libraryActions.fallback": "Fallback provider",
|
||||
"libraryActions.fallbackDesc": "Used when the primary provider returns no results",
|
||||
"libraryActions.default": "Default",
|
||||
"libraryActions.none": "None",
|
||||
"libraryActions.metadataRefreshSchedule": "Auto-refresh",
|
||||
"libraryActions.metadataRefreshDesc": "Periodically re-fetch metadata for existing series",
|
||||
"libraryActions.saving": "Saving...",
|
||||
|
||||
// Library sub-page header
|
||||
@@ -162,12 +199,27 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobs.startJobDescription": "Select a library (or all) and choose the action to perform.",
|
||||
"jobs.allLibraries": "All libraries",
|
||||
"jobs.rebuild": "Rebuild",
|
||||
"jobs.rescan": "Deep rescan",
|
||||
"jobs.fullRebuild": "Full rebuild",
|
||||
"jobs.generateThumbnails": "Generate thumbnails",
|
||||
"jobs.regenerateThumbnails": "Regenerate thumbnails",
|
||||
"jobs.batchMetadata": "Batch metadata",
|
||||
"jobs.refreshMetadata": "Refresh metadata",
|
||||
"jobs.refreshMetadataDescription": "Refreshes metadata for all series already linked to an external provider. Re-downloads information from the provider and updates series and books in the database (respecting locked fields). Series without an approved link are ignored. <strong>Requires a specific library</strong> (does not work on \"All libraries\").",
|
||||
"jobs.referenceTitle": "Job types reference",
|
||||
"jobs.groupIndexation": "Indexation",
|
||||
"jobs.groupThumbnails": "Thumbnails",
|
||||
"jobs.groupMetadata": "Metadata",
|
||||
"jobs.requiresLibrary": "Requires a specific library",
|
||||
"jobs.rebuildShort": "Scan new & modified files",
|
||||
"jobs.rescanShort": "Re-walk all directories to discover new formats",
|
||||
"jobs.fullRebuildShort": "Delete all & re-scan from scratch",
|
||||
"jobs.generateThumbnailsShort": "Missing thumbnails only",
|
||||
"jobs.regenerateThumbnailsShort": "Recreate all thumbnails",
|
||||
"jobs.batchMetadataShort": "Auto-match unlinked series",
|
||||
"jobs.refreshMetadataShort": "Update existing linked series",
|
||||
"jobs.rebuildDescription": "Incremental scan: detects files added, modified, or deleted since the last scan, indexes them, and generates missing thumbnails. Existing unmodified data is preserved. This is the most common and fastest action.",
|
||||
"jobs.rescanDescription": "Re-walks all directories regardless of whether they changed, discovering files in newly supported formats (e.g. EPUB). Existing books and metadata are fully preserved — only genuinely new files are added. Slower than a rebuild but safe for your data.",
|
||||
"jobs.fullRebuildDescription": "Deletes all indexed data (books, series, thumbnails) then performs a full scan from scratch. Useful if the database is out of sync or corrupted. Long and destructive operation: reading statuses and manual metadata will be lost.",
|
||||
"jobs.generateThumbnailsDescription": "Generates thumbnails only for books that don't have one yet. Existing thumbnails are not affected. Useful after an import or if some thumbnails are missing.",
|
||||
"jobs.regenerateThumbnailsDescription": "Regenerates all thumbnails from scratch, replacing existing ones. Useful if thumbnail quality or size has changed in the configuration, or if thumbnails are corrupted.",
|
||||
@@ -178,8 +230,7 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobsList.library": "Library",
|
||||
"jobsList.type": "Type",
|
||||
"jobsList.status": "Status",
|
||||
"jobsList.files": "Files",
|
||||
"jobsList.thumbnails": "Thumbnails",
|
||||
"jobsList.stats": "Stats",
|
||||
"jobsList.duration": "Duration",
|
||||
"jobsList.created": "Created",
|
||||
"jobsList.actions": "Actions",
|
||||
@@ -188,6 +239,12 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobRow.showProgress": "Show progress",
|
||||
"jobRow.hideProgress": "Hide progress",
|
||||
"jobRow.scanned": "{{count}} scanned",
|
||||
"jobRow.filesIndexed": "{{count}} files indexed",
|
||||
"jobRow.filesRemoved": "{{count}} files removed",
|
||||
"jobRow.thumbnailsGenerated": "{{count}} thumbnails generated",
|
||||
"jobRow.metadataProcessed": "{{count}} series processed",
|
||||
"jobRow.metadataRefreshed": "{{count}} series refreshed",
|
||||
"jobRow.errors": "{{count}} errors",
|
||||
"jobRow.view": "View",
|
||||
|
||||
// Job progress
|
||||
@@ -227,6 +284,14 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobDetail.phase2b": "Phase 2b — Thumbnail generation",
|
||||
"jobDetail.metadataSearch": "Metadata search",
|
||||
"jobDetail.metadataSearchDesc": "Searching external providers for each series",
|
||||
"jobDetail.metadataRefresh": "Metadata refresh",
|
||||
"jobDetail.metadataRefreshDesc": "Re-downloading metadata from providers for already linked series",
|
||||
"jobDetail.refreshReport": "Refresh report",
|
||||
"jobDetail.refreshReportDesc": "{{count}} linked series processed",
|
||||
"jobDetail.refreshed": "Refreshed",
|
||||
"jobDetail.unchanged": "Unchanged",
|
||||
"jobDetail.refreshChanges": "Changes detail",
|
||||
"jobDetail.refreshChangesDesc": "{{count}} series with changes",
|
||||
"jobDetail.phase1Desc": "Scanning and indexing library files",
|
||||
"jobDetail.phase2aDesc": "Extracting the first page of each archive (page count + raw image)",
|
||||
"jobDetail.phase2bDesc": "Generating thumbnails for scanned books",
|
||||
@@ -261,13 +326,17 @@ const en: Record<TranslationKey, string> = {
|
||||
|
||||
// Job types
|
||||
"jobType.rebuild": "Indexing",
|
||||
"jobType.rescan": "Deep rescan",
|
||||
"jobType.full_rebuild": "Full indexing",
|
||||
"jobType.thumbnail_rebuild": "Thumbnails",
|
||||
"jobType.thumbnail_regenerate": "Regen. thumbnails",
|
||||
"jobType.cbr_to_cbz": "CBR → CBZ",
|
||||
"jobType.metadata_batch": "Batch metadata",
|
||||
"jobType.metadata_refresh": "Refresh meta.",
|
||||
"jobType.rebuildLabel": "Incremental indexing",
|
||||
"jobType.rebuildDesc": "Scans new/modified files, analyzes them, and generates missing thumbnails.",
|
||||
"jobType.rescanLabel": "Deep rescan",
|
||||
"jobType.rescanDesc": "Re-walks all directories to discover files in newly supported formats (e.g. EPUB). Existing data is preserved — only new files are added.",
|
||||
"jobType.full_rebuildLabel": "Full reindexing",
|
||||
"jobType.full_rebuildDesc": "Deletes all existing data then performs a full scan, re-analysis, and thumbnail generation.",
|
||||
"jobType.thumbnail_rebuildLabel": "Thumbnail rebuild",
|
||||
@@ -278,6 +347,8 @@ const en: Record<TranslationKey, string> = {
|
||||
"jobType.cbr_to_cbzDesc": "Converts a CBR archive to the open CBZ format.",
|
||||
"jobType.metadata_batchLabel": "Batch metadata",
|
||||
"jobType.metadata_batchDesc": "Searches external metadata providers for all series in the library and automatically applies 100% confidence matches.",
|
||||
"jobType.metadata_refreshLabel": "Metadata refresh",
|
||||
"jobType.metadata_refreshDesc": "Re-downloads and updates metadata for all series already linked to an external provider.",
|
||||
|
||||
// Status badges
|
||||
"statusBadge.extracting_pages": "Extracting pages",
|
||||
@@ -410,6 +481,95 @@ const en: Record<TranslationKey, string> = {
|
||||
"settings.comicvineHelp": "Get your key at",
|
||||
"settings.freeProviders": "are free and do not require an API key.",
|
||||
|
||||
// Settings - Status Mappings
|
||||
"settings.statusMappings": "Status mappings",
|
||||
"settings.statusMappingsDesc": "Configure the mapping between provider statuses and database statuses. Multiple provider statuses can map to a single target status.",
|
||||
"settings.targetStatus": "Target status",
|
||||
"settings.providerStatuses": "Provider statuses",
|
||||
"settings.addProviderStatus": "Add a provider status…",
|
||||
"settings.noMappings": "No mappings configured",
|
||||
"settings.unmappedSection": "Unmapped",
|
||||
"settings.addMapping": "Add a mapping",
|
||||
"settings.selectTargetStatus": "Select a target status",
|
||||
"settings.newTargetPlaceholder": "New target status (e.g. hiatus)",
|
||||
"settings.createTargetStatus": "Create status",
|
||||
|
||||
// Settings - Prowlarr
|
||||
"settings.prowlarr": "Prowlarr",
|
||||
"settings.prowlarrDesc": "Configure Prowlarr to search for releases on indexers (torrents/usenet). Only manual search is available for now.",
|
||||
"settings.prowlarrUrl": "Prowlarr URL",
|
||||
"settings.prowlarrUrlPlaceholder": "http://localhost:9696",
|
||||
"settings.prowlarrApiKey": "API Key",
|
||||
"settings.prowlarrApiKeyPlaceholder": "Prowlarr API key",
|
||||
"settings.prowlarrCategories": "Categories",
|
||||
"settings.prowlarrCategoriesHelp": "Comma-separated Newznab category IDs (7030 = Comics, 7020 = Ebooks)",
|
||||
"settings.testConnection": "Test connection",
|
||||
"settings.testing": "Testing...",
|
||||
"settings.testSuccess": "Connection successful",
|
||||
"settings.testFailed": "Connection failed",
|
||||
|
||||
// Prowlarr search modal
|
||||
"prowlarr.searchButton": "Prowlarr",
|
||||
"prowlarr.modalTitle": "Prowlarr Search",
|
||||
"prowlarr.searchSeries": "Search series",
|
||||
"prowlarr.searchVolume": "Search",
|
||||
"prowlarr.searching": "Searching...",
|
||||
"prowlarr.noResults": "No results found",
|
||||
"prowlarr.resultCount": "{{count}} result{{plural}}",
|
||||
"prowlarr.missingVolumes": "Missing volumes",
|
||||
"prowlarr.columnTitle": "Title",
|
||||
"prowlarr.columnIndexer": "Indexer",
|
||||
"prowlarr.columnSize": "Size",
|
||||
"prowlarr.columnSeeders": "Seeds",
|
||||
"prowlarr.columnLeechers": "Peers",
|
||||
"prowlarr.columnProtocol": "Protocol",
|
||||
"prowlarr.searchPlaceholder": "Edit search query...",
|
||||
"prowlarr.searchAction": "Search",
|
||||
"prowlarr.searchError": "Search failed",
|
||||
"prowlarr.notConfigured": "Prowlarr is not configured",
|
||||
"prowlarr.download": "Download",
|
||||
"prowlarr.info": "Info",
|
||||
"prowlarr.sendToQbittorrent": "Send to qBittorrent",
|
||||
"prowlarr.sending": "Sending...",
|
||||
"prowlarr.sentSuccess": "Sent to qBittorrent",
|
||||
"prowlarr.sentError": "Failed to send to qBittorrent",
|
||||
"prowlarr.missingVol": "Vol. {{vol}} missing",
|
||||
|
||||
// Settings - qBittorrent
|
||||
"settings.qbittorrent": "qBittorrent",
|
||||
"settings.qbittorrentDesc": "Configure qBittorrent as a download client. Torrents found via Prowlarr can be sent directly to qBittorrent.",
|
||||
"settings.qbittorrentUrl": "qBittorrent URL",
|
||||
"settings.qbittorrentUrlPlaceholder": "http://localhost:8080",
|
||||
"settings.qbittorrentUsername": "Username",
|
||||
"settings.qbittorrentPassword": "Password",
|
||||
|
||||
// Settings - Telegram Notifications
|
||||
"settings.notifications": "Notifications",
|
||||
"settings.telegram": "Telegram",
|
||||
"settings.telegramDesc": "Receive Telegram notifications for scans, errors, and metadata linking.",
|
||||
"settings.botToken": "Bot Token",
|
||||
"settings.botTokenPlaceholder": "123456:ABC-DEF1234ghIkl-zyx57W2v1u123ew11",
|
||||
"settings.chatId": "Chat ID",
|
||||
"settings.chatIdPlaceholder": "123456789",
|
||||
"settings.telegramEnabled": "Enable Telegram notifications",
|
||||
"settings.telegramEvents": "Events",
|
||||
"settings.eventCategoryScan": "Scans",
|
||||
"settings.eventCategoryThumbnail": "Thumbnails",
|
||||
"settings.eventCategoryConversion": "CBR → CBZ Conversion",
|
||||
"settings.eventCategoryMetadata": "Metadata",
|
||||
"settings.eventCompleted": "Completed",
|
||||
"settings.eventFailed": "Failed",
|
||||
"settings.eventCancelled": "Cancelled",
|
||||
"settings.eventLinked": "Linked",
|
||||
"settings.eventBatchCompleted": "Batch completed",
|
||||
"settings.eventBatchFailed": "Batch failed",
|
||||
"settings.eventRefreshCompleted": "Refresh completed",
|
||||
"settings.eventRefreshFailed": "Refresh failed",
|
||||
"settings.telegramHelp": "How to get the required information?",
|
||||
"settings.telegramHelpBot": "Open Telegram, search for <b>@BotFather</b>, send <code>/newbot</code> and follow the instructions. Copy the token it gives you.",
|
||||
"settings.telegramHelpChat": "Send a message to your bot, then open <code>https://api.telegram.org/bot<TOKEN>/getUpdates</code> in your browser. The <b>chat id</b> is in <code>message.chat.id</code>.",
|
||||
"settings.telegramHelpGroup": "For a group: add the bot to the group, send a message, then check the same URL. Group IDs are negative (e.g. <code>-123456789</code>).",
|
||||
|
||||
// Settings - Language
|
||||
"settings.language": "Language",
|
||||
"settings.languageDesc": "Choose the interface language",
|
||||
|
||||
@@ -73,6 +73,13 @@ const fr = {
|
||||
"dashboard.noSeries": "Aucune série pour le moment",
|
||||
"dashboard.unknown": "Inconnu",
|
||||
"dashboard.readCount": "{{read}}/{{total}} lu",
|
||||
"dashboard.metadataCoverage": "Couverture métadonnées",
|
||||
"dashboard.seriesLinked": "Séries liées",
|
||||
"dashboard.seriesUnlinked": "Séries non liées",
|
||||
"dashboard.byProvider": "Par fournisseur",
|
||||
"dashboard.bookMetadata": "Métadonnées livres",
|
||||
"dashboard.withSummary": "Avec résumé",
|
||||
"dashboard.withIsbn": "Avec ISBN",
|
||||
|
||||
// Books page
|
||||
"books.title": "Livres",
|
||||
@@ -91,6 +98,8 @@ const fr = {
|
||||
"books.noResults": "Aucun livre trouvé pour \"{{query}}\"",
|
||||
"books.noBooks": "Aucun livre disponible",
|
||||
"books.coverOf": "Couverture de {{name}}",
|
||||
"books.format": "Format",
|
||||
"books.allFormats": "Tous les formats",
|
||||
|
||||
// Series page
|
||||
"series.title": "Séries",
|
||||
@@ -104,6 +113,20 @@ const fr = {
|
||||
"series.missingCount": "{{count}} manquant{{plural}}",
|
||||
"series.readCount": "{{read}}/{{total}} lu{{plural}}",
|
||||
|
||||
// Authors page
|
||||
"nav.authors": "Auteurs",
|
||||
"authors.title": "Auteurs",
|
||||
"authors.searchPlaceholder": "Rechercher par nom d'auteur...",
|
||||
"authors.bookCount": "{{count}} livre{{plural}}",
|
||||
"authors.seriesCount": "{{count}} série{{plural}}",
|
||||
"authors.noResults": "Aucun auteur trouvé correspondant à vos filtres",
|
||||
"authors.noAuthors": "Aucun auteur disponible",
|
||||
"authors.matchingQuery": "correspondant à",
|
||||
"authors.sortName": "Nom",
|
||||
"authors.sortBooks": "Nombre de livres",
|
||||
"authors.booksBy": "Livres de {{name}}",
|
||||
"authors.seriesBy": "Séries de {{name}}",
|
||||
|
||||
// Libraries page
|
||||
"libraries.title": "Bibliothèques",
|
||||
"libraries.addLibrary": "Ajouter une bibliothèque",
|
||||
@@ -115,6 +138,11 @@ const fr = {
|
||||
"libraries.manual": "Manuel",
|
||||
"libraries.nextScan": "Prochain : {{time}}",
|
||||
"libraries.imminent": "Imminent",
|
||||
"libraries.nextMetadataRefresh": "Prochain rafraîchissement méta. : {{time}}",
|
||||
"libraries.nextMetadataRefreshShort": "Méta. : {{time}}",
|
||||
"libraries.scanLabel": "Scan : {{mode}}",
|
||||
"libraries.watcherLabel": "Surveillance fichiers",
|
||||
"libraries.metaRefreshLabel": "Rafraîch. méta. : {{mode}}",
|
||||
"libraries.index": "Indexer",
|
||||
"libraries.fullIndex": "Complet",
|
||||
"libraries.batchMetadata": "Métadonnées en lot",
|
||||
@@ -132,13 +160,22 @@ const fr = {
|
||||
"librarySeries.noBooksInSeries": "Aucun livre dans cette série",
|
||||
|
||||
// Library actions
|
||||
"libraryActions.autoScan": "Scan auto",
|
||||
"libraryActions.fileWatch": "Surveillance fichiers ⚡",
|
||||
"libraryActions.schedule": "📅 Planification",
|
||||
"libraryActions.settingsTitle": "Paramètres de la bibliothèque",
|
||||
"libraryActions.sectionIndexation": "Indexation",
|
||||
"libraryActions.sectionMetadata": "Métadonnées",
|
||||
"libraryActions.autoScan": "Scan planifié",
|
||||
"libraryActions.autoScanDesc": "Scanner automatiquement les fichiers nouveaux et modifiés",
|
||||
"libraryActions.fileWatch": "Surveillance en temps réel",
|
||||
"libraryActions.fileWatchDesc": "Détecter les changements de fichiers instantanément",
|
||||
"libraryActions.schedule": "Fréquence",
|
||||
"libraryActions.provider": "Fournisseur",
|
||||
"libraryActions.fallback": "Secours",
|
||||
"libraryActions.providerDesc": "Source utilisée pour récupérer les métadonnées des séries",
|
||||
"libraryActions.fallback": "Fournisseur de secours",
|
||||
"libraryActions.fallbackDesc": "Utilisé quand le fournisseur principal ne retourne aucun résultat",
|
||||
"libraryActions.default": "Par défaut",
|
||||
"libraryActions.none": "Aucun",
|
||||
"libraryActions.metadataRefreshSchedule": "Rafraîchissement auto",
|
||||
"libraryActions.metadataRefreshDesc": "Re-télécharger périodiquement les métadonnées existantes",
|
||||
"libraryActions.saving": "Enregistrement...",
|
||||
|
||||
// Library sub-page header
|
||||
@@ -159,13 +196,28 @@ const fr = {
|
||||
"jobs.startJob": "Lancer une tâche",
|
||||
"jobs.startJobDescription": "Sélectionnez une bibliothèque (ou toutes) et choisissez l'action à effectuer.",
|
||||
"jobs.allLibraries": "Toutes les bibliothèques",
|
||||
"jobs.rebuild": "Reconstruction",
|
||||
"jobs.fullRebuild": "Reconstruction complète",
|
||||
"jobs.rebuild": "Mise à jour",
|
||||
"jobs.rescan": "Rescan complet",
|
||||
"jobs.fullRebuild": "Reconstruction complète (destructif)",
|
||||
"jobs.generateThumbnails": "Générer les miniatures",
|
||||
"jobs.regenerateThumbnails": "Regénérer les miniatures",
|
||||
"jobs.batchMetadata": "Métadonnées en lot",
|
||||
"jobs.refreshMetadata": "Rafraîchir métadonnées",
|
||||
"jobs.refreshMetadataDescription": "Rafraîchit les métadonnées de toutes les séries déjà liées à un fournisseur externe. Re-télécharge les informations depuis le fournisseur et met à jour les séries et livres en base (en respectant les champs verrouillés). Les séries sans lien approuvé sont ignorées. <strong>Requiert une bibliothèque spécifique</strong> (ne fonctionne pas sur \u00ab Toutes les bibliothèques \u00bb).",
|
||||
"jobs.referenceTitle": "Référence des types de tâches",
|
||||
"jobs.groupIndexation": "Indexation",
|
||||
"jobs.groupThumbnails": "Miniatures",
|
||||
"jobs.groupMetadata": "Métadonnées",
|
||||
"jobs.requiresLibrary": "Requiert une bibliothèque spécifique",
|
||||
"jobs.rebuildShort": "Scanner les fichiers nouveaux et modifiés",
|
||||
"jobs.rescanShort": "Re-parcourir tous les dossiers pour découvrir de nouveaux formats",
|
||||
"jobs.fullRebuildShort": "Tout supprimer et re-scanner depuis zéro. Les métadonnées, statuts de lecture et liens seront perdus.",
|
||||
"jobs.generateThumbnailsShort": "Miniatures manquantes uniquement",
|
||||
"jobs.regenerateThumbnailsShort": "Recréer toutes les miniatures",
|
||||
"jobs.batchMetadataShort": "Lier automatiquement les séries non liées",
|
||||
"jobs.refreshMetadataShort": "Mettre à jour les séries déjà liées",
|
||||
"jobs.rebuildDescription": "Scan incrémental : détecte les fichiers ajoutés, modifiés ou supprimés depuis le dernier scan, les indexe et génère les miniatures manquantes. Les données existantes non modifiées sont conservées. C'est l'action la plus courante et la plus rapide.",
|
||||
"jobs.rescanDescription": "Re-parcourt tous les dossiers même s'ils n'ont pas changé, pour découvrir les fichiers dans les formats nouvellement supportés (ex. EPUB). Les livres et métadonnées existants sont entièrement préservés — seuls les fichiers réellement nouveaux sont ajoutés. Plus lent qu'un rebuild mais sans risque pour vos données.",
|
||||
"jobs.fullRebuildDescription": "Supprime toutes les données indexées (livres, séries, miniatures) puis effectue un scan complet depuis zéro. Utile si la base de données est désynchronisée ou corrompue. Opération longue et destructive : les statuts de lecture et les métadonnées manuelles seront perdus.",
|
||||
"jobs.generateThumbnailsDescription": "Génère les miniatures uniquement pour les livres qui n'en ont pas encore. Les miniatures existantes ne sont pas touchées. Utile après un import ou si certaines miniatures sont manquantes.",
|
||||
"jobs.regenerateThumbnailsDescription": "Regénère toutes les miniatures depuis zéro, en remplaçant les existantes. Utile si la qualité ou la taille des miniatures a changé dans la configuration, ou si des miniatures sont corrompues.",
|
||||
@@ -176,8 +228,7 @@ const fr = {
|
||||
"jobsList.library": "Bibliothèque",
|
||||
"jobsList.type": "Type",
|
||||
"jobsList.status": "Statut",
|
||||
"jobsList.files": "Fichiers",
|
||||
"jobsList.thumbnails": "Miniatures",
|
||||
"jobsList.stats": "Stats",
|
||||
"jobsList.duration": "Durée",
|
||||
"jobsList.created": "Créé",
|
||||
"jobsList.actions": "Actions",
|
||||
@@ -186,6 +237,12 @@ const fr = {
|
||||
"jobRow.showProgress": "Afficher la progression",
|
||||
"jobRow.hideProgress": "Masquer la progression",
|
||||
"jobRow.scanned": "{{count}} analysés",
|
||||
"jobRow.filesIndexed": "{{count}} fichiers indexés",
|
||||
"jobRow.filesRemoved": "{{count}} fichiers supprimés",
|
||||
"jobRow.thumbnailsGenerated": "{{count}} miniatures générées",
|
||||
"jobRow.metadataProcessed": "{{count}} séries traitées",
|
||||
"jobRow.metadataRefreshed": "{{count}} séries rafraîchies",
|
||||
"jobRow.errors": "{{count}} erreurs",
|
||||
"jobRow.view": "Voir",
|
||||
|
||||
// Job progress
|
||||
@@ -225,6 +282,14 @@ const fr = {
|
||||
"jobDetail.phase2b": "Phase 2b — Génération des miniatures",
|
||||
"jobDetail.metadataSearch": "Recherche de métadonnées",
|
||||
"jobDetail.metadataSearchDesc": "Recherche auprès des fournisseurs externes pour chaque série",
|
||||
"jobDetail.metadataRefresh": "Rafraîchissement des métadonnées",
|
||||
"jobDetail.metadataRefreshDesc": "Re-téléchargement des métadonnées depuis les fournisseurs pour les séries déjà liées",
|
||||
"jobDetail.refreshReport": "Rapport de rafraîchissement",
|
||||
"jobDetail.refreshReportDesc": "{{count}} séries liées traitées",
|
||||
"jobDetail.refreshed": "Rafraîchies",
|
||||
"jobDetail.unchanged": "Inchangées",
|
||||
"jobDetail.refreshChanges": "Détail des changements",
|
||||
"jobDetail.refreshChangesDesc": "{{count}} séries avec des modifications",
|
||||
"jobDetail.phase1Desc": "Scan et indexation des fichiers de la bibliothèque",
|
||||
"jobDetail.phase2aDesc": "Extraction de la première page de chaque archive (nombre de pages + image brute)",
|
||||
"jobDetail.phase2bDesc": "Génération des miniatures pour les livres analysés",
|
||||
@@ -259,13 +324,17 @@ const fr = {
|
||||
|
||||
// Job types
|
||||
"jobType.rebuild": "Indexation",
|
||||
"jobType.rescan": "Rescan complet",
|
||||
"jobType.full_rebuild": "Indexation complète",
|
||||
"jobType.thumbnail_rebuild": "Miniatures",
|
||||
"jobType.thumbnail_regenerate": "Régén. miniatures",
|
||||
"jobType.cbr_to_cbz": "CBR → CBZ",
|
||||
"jobType.metadata_batch": "Métadonnées en lot",
|
||||
"jobType.metadata_refresh": "Rafraîchir méta.",
|
||||
"jobType.rebuildLabel": "Indexation incrémentale",
|
||||
"jobType.rebuildDesc": "Scanne les fichiers nouveaux/modifiés, les analyse et génère les miniatures manquantes.",
|
||||
"jobType.rescanLabel": "Rescan complet",
|
||||
"jobType.rescanDesc": "Re-parcourt tous les dossiers pour découvrir les fichiers dans les formats nouvellement supportés (ex. EPUB). Les données existantes sont préservées — seuls les nouveaux fichiers sont ajoutés.",
|
||||
"jobType.full_rebuildLabel": "Réindexation complète",
|
||||
"jobType.full_rebuildDesc": "Supprime toutes les données existantes puis effectue un scan complet, une ré-analyse et la génération des miniatures.",
|
||||
"jobType.thumbnail_rebuildLabel": "Reconstruction des miniatures",
|
||||
@@ -276,6 +345,8 @@ const fr = {
|
||||
"jobType.cbr_to_cbzDesc": "Convertit une archive CBR au format ouvert CBZ.",
|
||||
"jobType.metadata_batchLabel": "Métadonnées en lot",
|
||||
"jobType.metadata_batchDesc": "Recherche les métadonnées auprès des fournisseurs externes pour toutes les séries de la bibliothèque et applique automatiquement les correspondances à 100% de confiance.",
|
||||
"jobType.metadata_refreshLabel": "Rafraîchissement métadonnées",
|
||||
"jobType.metadata_refreshDesc": "Re-télécharge et met à jour les métadonnées pour toutes les séries déjà liées à un fournisseur externe.",
|
||||
|
||||
// Status badges
|
||||
"statusBadge.extracting_pages": "Extraction des pages",
|
||||
@@ -408,6 +479,95 @@ const fr = {
|
||||
"settings.comicvineHelp": "Obtenez votre clé sur",
|
||||
"settings.freeProviders": "sont gratuits et ne nécessitent pas de clé API.",
|
||||
|
||||
// Settings - Status Mappings
|
||||
"settings.statusMappings": "Correspondance de statuts",
|
||||
"settings.statusMappingsDesc": "Configurer la correspondance entre les statuts des fournisseurs et les statuts en base de données. Plusieurs statuts fournisseurs peuvent pointer vers un même statut cible.",
|
||||
"settings.targetStatus": "Statut cible",
|
||||
"settings.providerStatuses": "Statuts fournisseurs",
|
||||
"settings.addProviderStatus": "Ajouter un statut fournisseur…",
|
||||
"settings.noMappings": "Aucune correspondance configurée",
|
||||
"settings.unmappedSection": "Non mappés",
|
||||
"settings.addMapping": "Ajouter une correspondance",
|
||||
"settings.selectTargetStatus": "Sélectionner un statut cible",
|
||||
"settings.newTargetPlaceholder": "Nouveau statut cible (ex: hiatus)",
|
||||
"settings.createTargetStatus": "Créer un statut",
|
||||
|
||||
// Settings - Prowlarr
|
||||
"settings.prowlarr": "Prowlarr",
|
||||
"settings.prowlarrDesc": "Configurer Prowlarr pour rechercher des releases sur les indexeurs (torrents/usenet). Seule la recherche manuelle est disponible pour le moment.",
|
||||
"settings.prowlarrUrl": "URL Prowlarr",
|
||||
"settings.prowlarrUrlPlaceholder": "http://localhost:9696",
|
||||
"settings.prowlarrApiKey": "Clé API",
|
||||
"settings.prowlarrApiKeyPlaceholder": "Clé API Prowlarr",
|
||||
"settings.prowlarrCategories": "Catégories",
|
||||
"settings.prowlarrCategoriesHelp": "ID de catégories Newznab séparés par des virgules (7030 = Comics, 7020 = Ebooks)",
|
||||
"settings.testConnection": "Tester la connexion",
|
||||
"settings.testing": "Test en cours...",
|
||||
"settings.testSuccess": "Connexion réussie",
|
||||
"settings.testFailed": "Échec de la connexion",
|
||||
|
||||
// Prowlarr search modal
|
||||
"prowlarr.searchButton": "Prowlarr",
|
||||
"prowlarr.modalTitle": "Recherche Prowlarr",
|
||||
"prowlarr.searchSeries": "Rechercher la série",
|
||||
"prowlarr.searchVolume": "Rechercher",
|
||||
"prowlarr.searching": "Recherche en cours...",
|
||||
"prowlarr.noResults": "Aucun résultat trouvé",
|
||||
"prowlarr.resultCount": "{{count}} résultat{{plural}}",
|
||||
"prowlarr.missingVolumes": "Volumes manquants",
|
||||
"prowlarr.columnTitle": "Titre",
|
||||
"prowlarr.columnIndexer": "Indexeur",
|
||||
"prowlarr.columnSize": "Taille",
|
||||
"prowlarr.columnSeeders": "Seeds",
|
||||
"prowlarr.columnLeechers": "Peers",
|
||||
"prowlarr.columnProtocol": "Protocole",
|
||||
"prowlarr.searchPlaceholder": "Modifier la recherche...",
|
||||
"prowlarr.searchAction": "Rechercher",
|
||||
"prowlarr.searchError": "Erreur lors de la recherche",
|
||||
"prowlarr.notConfigured": "Prowlarr n'est pas configuré",
|
||||
"prowlarr.download": "Télécharger",
|
||||
"prowlarr.info": "Info",
|
||||
"prowlarr.sendToQbittorrent": "Envoyer à qBittorrent",
|
||||
"prowlarr.sending": "Envoi...",
|
||||
"prowlarr.sentSuccess": "Envoyé à qBittorrent",
|
||||
"prowlarr.sentError": "Échec de l'envoi à qBittorrent",
|
||||
"prowlarr.missingVol": "T{{vol}} manquant",
|
||||
|
||||
// Settings - qBittorrent
|
||||
"settings.qbittorrent": "qBittorrent",
|
||||
"settings.qbittorrentDesc": "Configurer qBittorrent comme client de téléchargement. Les torrents trouvés via Prowlarr peuvent être envoyés directement à qBittorrent.",
|
||||
"settings.qbittorrentUrl": "URL qBittorrent",
|
||||
"settings.qbittorrentUrlPlaceholder": "http://localhost:8080",
|
||||
"settings.qbittorrentUsername": "Nom d'utilisateur",
|
||||
"settings.qbittorrentPassword": "Mot de passe",
|
||||
|
||||
// Settings - Telegram Notifications
|
||||
"settings.notifications": "Notifications",
|
||||
"settings.telegram": "Telegram",
|
||||
"settings.telegramDesc": "Recevoir des notifications Telegram lors des scans, erreurs et liaisons de métadonnées.",
|
||||
"settings.botToken": "Bot Token",
|
||||
"settings.botTokenPlaceholder": "123456:ABC-DEF1234ghIkl-zyx57W2v1u123ew11",
|
||||
"settings.chatId": "Chat ID",
|
||||
"settings.chatIdPlaceholder": "123456789",
|
||||
"settings.telegramEnabled": "Activer les notifications Telegram",
|
||||
"settings.telegramEvents": "Événements",
|
||||
"settings.eventCategoryScan": "Scans",
|
||||
"settings.eventCategoryThumbnail": "Miniatures",
|
||||
"settings.eventCategoryConversion": "Conversion CBR → CBZ",
|
||||
"settings.eventCategoryMetadata": "Métadonnées",
|
||||
"settings.eventCompleted": "Terminé",
|
||||
"settings.eventFailed": "Échoué",
|
||||
"settings.eventCancelled": "Annulé",
|
||||
"settings.eventLinked": "Liée",
|
||||
"settings.eventBatchCompleted": "Batch terminé",
|
||||
"settings.eventBatchFailed": "Batch échoué",
|
||||
"settings.eventRefreshCompleted": "Rafraîchissement terminé",
|
||||
"settings.eventRefreshFailed": "Rafraîchissement échoué",
|
||||
"settings.telegramHelp": "Comment obtenir les informations ?",
|
||||
"settings.telegramHelpBot": "Ouvrez Telegram, recherchez <b>@BotFather</b>, envoyez <code>/newbot</code> et suivez les instructions. Copiez le token fourni.",
|
||||
"settings.telegramHelpChat": "Envoyez un message à votre bot, puis ouvrez <code>https://api.telegram.org/bot<TOKEN>/getUpdates</code> dans votre navigateur. Le <b>chat id</b> apparaît dans <code>message.chat.id</code>.",
|
||||
"settings.telegramHelpGroup": "Pour un groupe : ajoutez le bot au groupe, envoyez un message, puis consultez la même URL. Les IDs de groupe sont négatifs (ex: <code>-123456789</code>).",
|
||||
|
||||
// Settings - Language
|
||||
"settings.language": "Langue",
|
||||
"settings.languageDesc": "Choisir la langue de l'interface",
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
output: "standalone",
|
||||
typedRoutes: true
|
||||
typedRoutes: true,
|
||||
images: {
|
||||
minimumCacheTTL: 86400,
|
||||
},
|
||||
};
|
||||
|
||||
export default nextConfig;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "stripstream-backoffice",
|
||||
"version": "1.6.1",
|
||||
"version": "1.23.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev -p 7082",
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -14,6 +14,7 @@ futures = "0.3"
|
||||
image.workspace = true
|
||||
jpeg-decoder.workspace = true
|
||||
num_cpus.workspace = true
|
||||
notifications = { path = "../../crates/notifications" }
|
||||
parsers = { path = "../../crates/parsers" }
|
||||
reqwest.workspace = true
|
||||
serde.workspace = true
|
||||
|
||||
@@ -6,13 +6,15 @@ COPY Cargo.toml ./
|
||||
COPY apps/api/Cargo.toml apps/api/Cargo.toml
|
||||
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
|
||||
COPY crates/core/Cargo.toml crates/core/Cargo.toml
|
||||
COPY crates/notifications/Cargo.toml crates/notifications/Cargo.toml
|
||||
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
|
||||
|
||||
RUN mkdir -p apps/api/src apps/indexer/src crates/core/src crates/parsers/src && \
|
||||
RUN mkdir -p apps/api/src apps/indexer/src crates/core/src crates/notifications/src crates/parsers/src && \
|
||||
echo "fn main() {}" > apps/api/src/main.rs && \
|
||||
echo "fn main() {}" > apps/indexer/src/main.rs && \
|
||||
echo "" > apps/indexer/src/lib.rs && \
|
||||
echo "" > crates/core/src/lib.rs && \
|
||||
echo "" > crates/notifications/src/lib.rs && \
|
||||
echo "" > crates/parsers/src/lib.rs
|
||||
|
||||
# Build dependencies only (cached as long as Cargo.toml files don't change)
|
||||
@@ -25,12 +27,13 @@ RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||
COPY apps/api/src apps/api/src
|
||||
COPY apps/indexer/src apps/indexer/src
|
||||
COPY crates/core/src crates/core/src
|
||||
COPY crates/notifications/src crates/notifications/src
|
||||
COPY crates/parsers/src crates/parsers/src
|
||||
|
||||
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||
--mount=type=cache,target=/usr/local/cargo/git \
|
||||
--mount=type=cache,target=/app/target \
|
||||
touch apps/indexer/src/main.rs crates/core/src/lib.rs crates/parsers/src/lib.rs && \
|
||||
touch apps/indexer/src/main.rs crates/core/src/lib.rs crates/notifications/src/lib.rs crates/parsers/src/lib.rs && \
|
||||
cargo build --release -p indexer && \
|
||||
cp /app/target/release/indexer /usr/local/bin/indexer
|
||||
|
||||
|
||||
@@ -290,6 +290,7 @@ fn book_format_from_str(s: &str) -> Option<BookFormat> {
|
||||
"cbz" => Some(BookFormat::Cbz),
|
||||
"cbr" => Some(BookFormat::Cbr),
|
||||
"pdf" => Some(BookFormat::Pdf),
|
||||
"epub" => Some(BookFormat::Epub),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,12 +37,13 @@ pub async fn cleanup_stale_jobs(pool: &PgPool) -> Result<()> {
|
||||
}
|
||||
|
||||
/// Job types processed by the API, not the indexer.
|
||||
const API_ONLY_JOB_TYPES: &[&str] = &["metadata_batch"];
|
||||
const API_ONLY_JOB_TYPES: &[&str] = &["metadata_batch", "metadata_refresh"];
|
||||
|
||||
/// Job types that modify book/thumbnail data and must not run concurrently.
|
||||
const EXCLUSIVE_JOB_TYPES: &[&str] = &[
|
||||
"rebuild",
|
||||
"full_rebuild",
|
||||
"rescan",
|
||||
"scan",
|
||||
"thumbnail_rebuild",
|
||||
"thumbnail_regenerate",
|
||||
@@ -211,11 +212,29 @@ pub async fn process_job(
|
||||
}
|
||||
|
||||
let is_full_rebuild = job_type == "full_rebuild";
|
||||
let is_rescan = job_type == "rescan";
|
||||
info!(
|
||||
"[JOB] {} type={} full_rebuild={}",
|
||||
job_id, job_type, is_full_rebuild
|
||||
"[JOB] {} type={} full_rebuild={} rescan={}",
|
||||
job_id, job_type, is_full_rebuild, is_rescan
|
||||
);
|
||||
|
||||
// Rescan: clear directory mtimes to force re-walking all directories,
|
||||
// but keep existing data intact (unlike full_rebuild)
|
||||
if is_rescan {
|
||||
if let Some(library_id) = target_library_id {
|
||||
let _ = sqlx::query("DELETE FROM directory_mtimes WHERE library_id = $1")
|
||||
.bind(library_id)
|
||||
.execute(&state.pool)
|
||||
.await;
|
||||
info!("[JOB] Rescan: cleared directory mtimes for library {}", library_id);
|
||||
} else {
|
||||
let _ = sqlx::query("DELETE FROM directory_mtimes")
|
||||
.execute(&state.pool)
|
||||
.await;
|
||||
info!("[JOB] Rescan: cleared all directory mtimes");
|
||||
}
|
||||
}
|
||||
|
||||
// Full rebuild: delete existing data first
|
||||
if is_full_rebuild {
|
||||
info!("[JOB] Full rebuild: deleting existing data");
|
||||
@@ -258,7 +277,7 @@ pub async fn process_job(
|
||||
// For full rebuilds, the DB is already cleared, so we must walk the filesystem.
|
||||
let library_ids: Vec<uuid::Uuid> = libraries.iter().map(|r| r.get("id")).collect();
|
||||
|
||||
let total_files: usize = if !is_full_rebuild {
|
||||
let total_files: usize = if !is_full_rebuild && !is_rescan {
|
||||
let count: i64 = sqlx::query_scalar(
|
||||
"SELECT COUNT(*) FROM book_files bf JOIN books b ON b.id = bf.book_id WHERE b.library_id = ANY($1)"
|
||||
)
|
||||
@@ -309,6 +328,7 @@ pub async fn process_job(
|
||||
removed_files: 0,
|
||||
errors: 0,
|
||||
warnings: 0,
|
||||
new_series: 0,
|
||||
};
|
||||
|
||||
let mut total_processed_count = 0i32;
|
||||
|
||||
@@ -14,6 +14,7 @@ use crate::{
|
||||
utils,
|
||||
AppState,
|
||||
};
|
||||
use std::collections::HashSet;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct JobStats {
|
||||
@@ -22,6 +23,7 @@ pub struct JobStats {
|
||||
pub removed_files: usize,
|
||||
pub errors: usize,
|
||||
pub warnings: usize,
|
||||
pub new_series: usize,
|
||||
}
|
||||
|
||||
const BATCH_SIZE: usize = 100;
|
||||
@@ -106,6 +108,18 @@ pub async fn scan_library_discovery(
|
||||
HashMap::new()
|
||||
};
|
||||
|
||||
// Track existing series names for new_series counting
|
||||
let existing_series: HashSet<String> = sqlx::query_scalar(
|
||||
"SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified') FROM books WHERE library_id = $1",
|
||||
)
|
||||
.bind(library_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.collect();
|
||||
let mut seen_new_series: HashSet<String> = HashSet::new();
|
||||
|
||||
let mut seen: HashMap<String, bool> = HashMap::new();
|
||||
let mut library_processed_count = 0i32;
|
||||
let mut last_progress_update = std::time::Instant::now();
|
||||
@@ -382,6 +396,12 @@ pub async fn scan_library_discovery(
|
||||
let book_id = Uuid::new_v4();
|
||||
let file_id = Uuid::new_v4();
|
||||
|
||||
// Track new series
|
||||
let series_key = parsed.series.as_deref().unwrap_or("unclassified").to_string();
|
||||
if !existing_series.contains(&series_key) && seen_new_series.insert(series_key) {
|
||||
stats.new_series += 1;
|
||||
}
|
||||
|
||||
books_to_insert.push(BookInsert {
|
||||
book_id,
|
||||
library_id,
|
||||
|
||||
@@ -26,15 +26,15 @@ pub async fn check_and_schedule_auto_scans(pool: &PgPool) -> Result<()> {
|
||||
for row in libraries {
|
||||
let library_id: Uuid = row.get("id");
|
||||
let scan_mode: String = row.get("scan_mode");
|
||||
|
||||
|
||||
info!("[SCHEDULER] Auto-scanning library {} (mode: {})", library_id, scan_mode);
|
||||
|
||||
|
||||
let job_id = Uuid::new_v4();
|
||||
let job_type = match scan_mode.as_str() {
|
||||
"full" => "full_rebuild",
|
||||
_ => "rebuild",
|
||||
};
|
||||
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, $3, 'pending')"
|
||||
)
|
||||
@@ -43,7 +43,7 @@ pub async fn check_and_schedule_auto_scans(pool: &PgPool) -> Result<()> {
|
||||
.bind(job_type)
|
||||
.execute(pool)
|
||||
.await?;
|
||||
|
||||
|
||||
// Update next_scan_at
|
||||
let interval_minutes = match scan_mode.as_str() {
|
||||
"hourly" => 60,
|
||||
@@ -51,7 +51,7 @@ pub async fn check_and_schedule_auto_scans(pool: &PgPool) -> Result<()> {
|
||||
"weekly" => 10080,
|
||||
_ => 1440, // default daily
|
||||
};
|
||||
|
||||
|
||||
sqlx::query(
|
||||
"UPDATE libraries SET last_scan_at = NOW(), next_scan_at = NOW() + INTERVAL '1 minute' * $2 WHERE id = $1"
|
||||
)
|
||||
@@ -59,9 +59,71 @@ pub async fn check_and_schedule_auto_scans(pool: &PgPool) -> Result<()> {
|
||||
.bind(interval_minutes)
|
||||
.execute(pool)
|
||||
.await?;
|
||||
|
||||
|
||||
info!("[SCHEDULER] Created job {} for library {}", job_id, library_id);
|
||||
}
|
||||
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn check_and_schedule_metadata_refreshes(pool: &PgPool) -> Result<()> {
|
||||
let libraries = sqlx::query(
|
||||
r#"
|
||||
SELECT id, metadata_refresh_mode
|
||||
FROM libraries
|
||||
WHERE metadata_refresh_mode != 'manual'
|
||||
AND (
|
||||
next_metadata_refresh_at IS NULL
|
||||
OR next_metadata_refresh_at <= NOW()
|
||||
)
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM index_jobs
|
||||
WHERE library_id = libraries.id
|
||||
AND type = 'metadata_refresh'
|
||||
AND status IN ('pending', 'running')
|
||||
)
|
||||
AND EXISTS (
|
||||
SELECT 1 FROM external_metadata_links
|
||||
WHERE library_id = libraries.id
|
||||
AND status = 'approved'
|
||||
)
|
||||
"#
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?;
|
||||
|
||||
for row in libraries {
|
||||
let library_id: Uuid = row.get("id");
|
||||
let refresh_mode: String = row.get("metadata_refresh_mode");
|
||||
|
||||
info!("[SCHEDULER] Auto-refreshing metadata for library {} (mode: {})", library_id, refresh_mode);
|
||||
|
||||
let job_id = Uuid::new_v4();
|
||||
sqlx::query(
|
||||
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, 'metadata_refresh', 'pending')"
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(library_id)
|
||||
.execute(pool)
|
||||
.await?;
|
||||
|
||||
let interval_minutes = match refresh_mode.as_str() {
|
||||
"hourly" => 60,
|
||||
"daily" => 1440,
|
||||
"weekly" => 10080,
|
||||
_ => 1440,
|
||||
};
|
||||
|
||||
sqlx::query(
|
||||
"UPDATE libraries SET last_metadata_refresh_at = NOW(), next_metadata_refresh_at = NOW() + INTERVAL '1 minute' * $2 WHERE id = $1"
|
||||
)
|
||||
.bind(library_id)
|
||||
.bind(interval_minutes)
|
||||
.execute(pool)
|
||||
.await?;
|
||||
|
||||
info!("[SCHEDULER] Created metadata_refresh job {} for library {}", job_id, library_id);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@ pub fn compute_fingerprint(path: &Path, size: u64, mtime: &DateTime<Utc>) -> Res
|
||||
|
||||
pub fn kind_from_format(format: BookFormat) -> &'static str {
|
||||
match format {
|
||||
BookFormat::Pdf => "ebook",
|
||||
BookFormat::Pdf | BookFormat::Epub => "ebook",
|
||||
BookFormat::Cbz | BookFormat::Cbr => "comic",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
use std::time::Duration;
|
||||
use sqlx::Row;
|
||||
use tracing::{error, info, trace};
|
||||
use uuid::Uuid;
|
||||
use crate::{job, scheduler, watcher, AppState};
|
||||
|
||||
pub async fn run_worker(state: AppState, interval_seconds: u64) {
|
||||
let wait = Duration::from_secs(interval_seconds.max(1));
|
||||
|
||||
|
||||
// Cleanup stale jobs from previous runs
|
||||
if let Err(err) = job::cleanup_stale_jobs(&state.pool).await {
|
||||
error!("[CLEANUP] Failed to cleanup stale jobs: {}", err);
|
||||
@@ -27,25 +29,190 @@ pub async fn run_worker(state: AppState, interval_seconds: u64) {
|
||||
if let Err(err) = scheduler::check_and_schedule_auto_scans(&scheduler_state.pool).await {
|
||||
error!("[SCHEDULER] Error: {}", err);
|
||||
}
|
||||
if let Err(err) = scheduler::check_and_schedule_metadata_refreshes(&scheduler_state.pool).await {
|
||||
error!("[SCHEDULER] Metadata refresh error: {}", err);
|
||||
}
|
||||
tokio::time::sleep(scheduler_wait).await;
|
||||
}
|
||||
});
|
||||
|
||||
struct JobInfo {
|
||||
job_type: String,
|
||||
library_name: Option<String>,
|
||||
book_title: Option<String>,
|
||||
thumbnail_path: Option<String>,
|
||||
}
|
||||
|
||||
async fn load_job_info(
|
||||
pool: &sqlx::PgPool,
|
||||
job_id: Uuid,
|
||||
library_id: Option<Uuid>,
|
||||
) -> JobInfo {
|
||||
let row = sqlx::query("SELECT type, book_id FROM index_jobs WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
|
||||
let (job_type, book_id): (String, Option<Uuid>) = match row {
|
||||
Some(r) => (r.get("type"), r.get("book_id")),
|
||||
None => ("unknown".to_string(), None),
|
||||
};
|
||||
|
||||
let library_name: Option<String> = if let Some(lib_id) = library_id {
|
||||
sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||
.bind(lib_id)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.ok()
|
||||
.flatten()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let (book_title, thumbnail_path): (Option<String>, Option<String>) = if let Some(bid) = book_id {
|
||||
let row = sqlx::query("SELECT title, thumbnail_path FROM books WHERE id = $1")
|
||||
.bind(bid)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
match row {
|
||||
Some(r) => (r.get("title"), r.get("thumbnail_path")),
|
||||
None => (None, None),
|
||||
}
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
JobInfo { job_type, library_name, book_title, thumbnail_path }
|
||||
}
|
||||
|
||||
async fn load_scan_stats(pool: &sqlx::PgPool, job_id: Uuid) -> notifications::ScanStats {
|
||||
let row = sqlx::query("SELECT stats_json FROM index_jobs WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.ok()
|
||||
.flatten();
|
||||
|
||||
if let Some(row) = row {
|
||||
if let Ok(val) = row.try_get::<serde_json::Value, _>("stats_json") {
|
||||
return notifications::ScanStats {
|
||||
scanned_files: val.get("scanned_files").and_then(|v| v.as_u64()).unwrap_or(0) as usize,
|
||||
indexed_files: val.get("indexed_files").and_then(|v| v.as_u64()).unwrap_or(0) as usize,
|
||||
removed_files: val.get("removed_files").and_then(|v| v.as_u64()).unwrap_or(0) as usize,
|
||||
new_series: val.get("new_series").and_then(|v| v.as_u64()).unwrap_or(0) as usize,
|
||||
errors: val.get("errors").and_then(|v| v.as_u64()).unwrap_or(0) as usize,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
notifications::ScanStats {
|
||||
scanned_files: 0,
|
||||
indexed_files: 0,
|
||||
removed_files: 0,
|
||||
new_series: 0,
|
||||
errors: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn build_completed_event(
|
||||
job_type: &str,
|
||||
library_name: Option<String>,
|
||||
book_title: Option<String>,
|
||||
thumbnail_path: Option<String>,
|
||||
stats: notifications::ScanStats,
|
||||
duration_seconds: u64,
|
||||
) -> notifications::NotificationEvent {
|
||||
match notifications::job_type_category(job_type) {
|
||||
"thumbnail" => notifications::NotificationEvent::ThumbnailCompleted {
|
||||
job_type: job_type.to_string(),
|
||||
library_name,
|
||||
duration_seconds,
|
||||
},
|
||||
"conversion" => notifications::NotificationEvent::ConversionCompleted {
|
||||
library_name,
|
||||
book_title,
|
||||
thumbnail_path,
|
||||
},
|
||||
_ => notifications::NotificationEvent::ScanCompleted {
|
||||
job_type: job_type.to_string(),
|
||||
library_name,
|
||||
stats,
|
||||
duration_seconds,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn build_failed_event(
|
||||
job_type: &str,
|
||||
library_name: Option<String>,
|
||||
book_title: Option<String>,
|
||||
thumbnail_path: Option<String>,
|
||||
error: String,
|
||||
) -> notifications::NotificationEvent {
|
||||
match notifications::job_type_category(job_type) {
|
||||
"thumbnail" => notifications::NotificationEvent::ThumbnailFailed {
|
||||
job_type: job_type.to_string(),
|
||||
library_name,
|
||||
error,
|
||||
},
|
||||
"conversion" => notifications::NotificationEvent::ConversionFailed {
|
||||
library_name,
|
||||
book_title,
|
||||
thumbnail_path,
|
||||
error,
|
||||
},
|
||||
_ => notifications::NotificationEvent::ScanFailed {
|
||||
job_type: job_type.to_string(),
|
||||
library_name,
|
||||
error,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
loop {
|
||||
match job::claim_next_job(&state.pool).await {
|
||||
Ok(Some((job_id, library_id))) => {
|
||||
info!("[INDEXER] Starting job {} library={:?}", job_id, library_id);
|
||||
let started_at = std::time::Instant::now();
|
||||
let info = load_job_info(&state.pool, job_id, library_id).await;
|
||||
|
||||
if let Err(err) = job::process_job(&state, job_id, library_id).await {
|
||||
let err_str = err.to_string();
|
||||
if err_str.contains("cancelled") || err_str.contains("Cancelled") {
|
||||
info!("[INDEXER] Job {} was cancelled by user", job_id);
|
||||
// Status is already 'cancelled' in DB, don't change it
|
||||
notifications::notify(
|
||||
state.pool.clone(),
|
||||
notifications::NotificationEvent::ScanCancelled {
|
||||
job_type: info.job_type.clone(),
|
||||
library_name: info.library_name.clone(),
|
||||
},
|
||||
);
|
||||
} else {
|
||||
error!("[INDEXER] Job {} failed: {}", job_id, err);
|
||||
let _ = job::fail_job(&state.pool, job_id, &err_str).await;
|
||||
notifications::notify(
|
||||
state.pool.clone(),
|
||||
build_failed_event(&info.job_type, info.library_name.clone(), info.book_title.clone(), info.thumbnail_path.clone(), err_str),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
info!("[INDEXER] Job {} completed", job_id);
|
||||
let stats = load_scan_stats(&state.pool, job_id).await;
|
||||
notifications::notify(
|
||||
state.pool.clone(),
|
||||
build_completed_event(
|
||||
&info.job_type,
|
||||
info.library_name.clone(),
|
||||
info.book_title.clone(),
|
||||
info.thumbnail_path.clone(),
|
||||
stats,
|
||||
started_at.elapsed().as_secs(),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
Ok(None) => {
|
||||
|
||||
13
crates/notifications/Cargo.toml
Normal file
13
crates/notifications/Cargo.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "notifications"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
reqwest.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
sqlx.workspace = true
|
||||
tokio.workspace = true
|
||||
tracing.workspace = true
|
||||
513
crates/notifications/src/lib.rs
Normal file
513
crates/notifications/src/lib.rs
Normal file
@@ -0,0 +1,513 @@
|
||||
use anyhow::Result;
|
||||
use serde::Deserialize;
|
||||
use sqlx::PgPool;
|
||||
use tracing::{info, warn};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Config
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TelegramConfig {
|
||||
pub bot_token: String,
|
||||
pub chat_id: String,
|
||||
#[serde(default)]
|
||||
pub enabled: bool,
|
||||
#[serde(default = "default_events")]
|
||||
pub events: EventToggles,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct EventToggles {
|
||||
#[serde(default = "default_true")]
|
||||
pub scan_completed: bool,
|
||||
#[serde(default = "default_true")]
|
||||
pub scan_failed: bool,
|
||||
#[serde(default = "default_true")]
|
||||
pub scan_cancelled: bool,
|
||||
#[serde(default = "default_true")]
|
||||
pub thumbnail_completed: bool,
|
||||
#[serde(default = "default_true")]
|
||||
pub thumbnail_failed: bool,
|
||||
#[serde(default = "default_true")]
|
||||
pub conversion_completed: bool,
|
||||
#[serde(default = "default_true")]
|
||||
pub conversion_failed: bool,
|
||||
#[serde(default = "default_true")]
|
||||
pub metadata_approved: bool,
|
||||
#[serde(default = "default_true")]
|
||||
pub metadata_batch_completed: bool,
|
||||
#[serde(default = "default_true")]
|
||||
pub metadata_batch_failed: bool,
|
||||
#[serde(default = "default_true")]
|
||||
pub metadata_refresh_completed: bool,
|
||||
#[serde(default = "default_true")]
|
||||
pub metadata_refresh_failed: bool,
|
||||
}
|
||||
|
||||
fn default_true() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_events() -> EventToggles {
|
||||
EventToggles {
|
||||
scan_completed: true,
|
||||
scan_failed: true,
|
||||
scan_cancelled: true,
|
||||
thumbnail_completed: true,
|
||||
thumbnail_failed: true,
|
||||
conversion_completed: true,
|
||||
conversion_failed: true,
|
||||
metadata_approved: true,
|
||||
metadata_batch_completed: true,
|
||||
metadata_batch_failed: true,
|
||||
metadata_refresh_completed: true,
|
||||
metadata_refresh_failed: true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Load the Telegram config from `app_settings` (key = "telegram").
|
||||
/// Returns `None` when the row is missing, disabled, or has empty credentials.
|
||||
pub async fn load_telegram_config(pool: &PgPool) -> Option<TelegramConfig> {
|
||||
let row = sqlx::query_scalar::<_, serde_json::Value>(
|
||||
"SELECT value FROM app_settings WHERE key = 'telegram'",
|
||||
)
|
||||
.fetch_optional(pool)
|
||||
.await
|
||||
.ok()??;
|
||||
|
||||
let config: TelegramConfig = serde_json::from_value(row).ok()?;
|
||||
|
||||
if !config.enabled || config.bot_token.is_empty() || config.chat_id.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(config)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Telegram HTTP
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
fn build_client() -> Result<reqwest::Client> {
|
||||
Ok(reqwest::Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(10))
|
||||
.build()?)
|
||||
}
|
||||
|
||||
async fn send_telegram(config: &TelegramConfig, text: &str) -> Result<()> {
|
||||
let url = format!(
|
||||
"https://api.telegram.org/bot{}/sendMessage",
|
||||
config.bot_token
|
||||
);
|
||||
|
||||
let body = serde_json::json!({
|
||||
"chat_id": config.chat_id,
|
||||
"text": text,
|
||||
"parse_mode": "HTML",
|
||||
});
|
||||
|
||||
let resp = build_client()?.post(&url).json(&body).send().await?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let text = resp.text().await.unwrap_or_default();
|
||||
anyhow::bail!("Telegram API returned {status}: {text}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn send_telegram_photo(config: &TelegramConfig, caption: &str, photo_path: &str) -> Result<()> {
|
||||
let url = format!(
|
||||
"https://api.telegram.org/bot{}/sendPhoto",
|
||||
config.bot_token
|
||||
);
|
||||
|
||||
let photo_bytes = tokio::fs::read(photo_path).await?;
|
||||
let filename = std::path::Path::new(photo_path)
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
let mime = if filename.ends_with(".webp") {
|
||||
"image/webp"
|
||||
} else if filename.ends_with(".png") {
|
||||
"image/png"
|
||||
} else {
|
||||
"image/jpeg"
|
||||
};
|
||||
|
||||
let part = reqwest::multipart::Part::bytes(photo_bytes)
|
||||
.file_name(filename)
|
||||
.mime_str(mime)?;
|
||||
|
||||
let form = reqwest::multipart::Form::new()
|
||||
.text("chat_id", config.chat_id.clone())
|
||||
.text("caption", caption.to_string())
|
||||
.text("parse_mode", "HTML")
|
||||
.part("photo", part);
|
||||
|
||||
let resp = build_client()?.post(&url).multipart(form).send().await?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let text = resp.text().await.unwrap_or_default();
|
||||
anyhow::bail!("Telegram API returned {status}: {text}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Send a test message. Returns the result directly (not fire-and-forget).
|
||||
pub async fn send_test_message(config: &TelegramConfig) -> Result<()> {
|
||||
send_telegram(config, "🔔 <b>Stripstream Librarian</b>\nTest notification — connection OK!").await
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Notification events
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
pub struct ScanStats {
|
||||
pub scanned_files: usize,
|
||||
pub indexed_files: usize,
|
||||
pub removed_files: usize,
|
||||
pub new_series: usize,
|
||||
pub errors: usize,
|
||||
}
|
||||
|
||||
pub enum NotificationEvent {
|
||||
// Scan jobs (rebuild, full_rebuild, rescan, scan)
|
||||
ScanCompleted {
|
||||
job_type: String,
|
||||
library_name: Option<String>,
|
||||
stats: ScanStats,
|
||||
duration_seconds: u64,
|
||||
},
|
||||
ScanFailed {
|
||||
job_type: String,
|
||||
library_name: Option<String>,
|
||||
error: String,
|
||||
},
|
||||
ScanCancelled {
|
||||
job_type: String,
|
||||
library_name: Option<String>,
|
||||
},
|
||||
// Thumbnail jobs (thumbnail_rebuild, thumbnail_regenerate)
|
||||
ThumbnailCompleted {
|
||||
job_type: String,
|
||||
library_name: Option<String>,
|
||||
duration_seconds: u64,
|
||||
},
|
||||
ThumbnailFailed {
|
||||
job_type: String,
|
||||
library_name: Option<String>,
|
||||
error: String,
|
||||
},
|
||||
// CBR→CBZ conversion
|
||||
ConversionCompleted {
|
||||
library_name: Option<String>,
|
||||
book_title: Option<String>,
|
||||
thumbnail_path: Option<String>,
|
||||
},
|
||||
ConversionFailed {
|
||||
library_name: Option<String>,
|
||||
book_title: Option<String>,
|
||||
thumbnail_path: Option<String>,
|
||||
error: String,
|
||||
},
|
||||
// Metadata manual approve
|
||||
MetadataApproved {
|
||||
series_name: String,
|
||||
provider: String,
|
||||
thumbnail_path: Option<String>,
|
||||
},
|
||||
// Metadata batch (auto-match)
|
||||
MetadataBatchCompleted {
|
||||
library_name: Option<String>,
|
||||
total_series: i32,
|
||||
processed: i32,
|
||||
},
|
||||
MetadataBatchFailed {
|
||||
library_name: Option<String>,
|
||||
error: String,
|
||||
},
|
||||
// Metadata refresh
|
||||
MetadataRefreshCompleted {
|
||||
library_name: Option<String>,
|
||||
refreshed: i32,
|
||||
unchanged: i32,
|
||||
errors: i32,
|
||||
},
|
||||
MetadataRefreshFailed {
|
||||
library_name: Option<String>,
|
||||
error: String,
|
||||
},
|
||||
}
|
||||
|
||||
/// Classify an indexer job_type string into the right event constructor category.
|
||||
/// Returns "scan", "thumbnail", or "conversion".
|
||||
pub fn job_type_category(job_type: &str) -> &'static str {
|
||||
match job_type {
|
||||
"thumbnail_rebuild" | "thumbnail_regenerate" => "thumbnail",
|
||||
"cbr_to_cbz" => "conversion",
|
||||
_ => "scan",
|
||||
}
|
||||
}
|
||||
|
||||
fn format_event(event: &NotificationEvent) -> String {
|
||||
match event {
|
||||
NotificationEvent::ScanCompleted {
|
||||
job_type,
|
||||
library_name,
|
||||
stats,
|
||||
duration_seconds,
|
||||
} => {
|
||||
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||
let duration = format_duration(*duration_seconds);
|
||||
format!(
|
||||
"📚 <b>Scan completed</b>\n\
|
||||
Library: {lib}\n\
|
||||
Type: {job_type}\n\
|
||||
New books: {}\n\
|
||||
New series: {}\n\
|
||||
Files scanned: {}\n\
|
||||
Removed: {}\n\
|
||||
Errors: {}\n\
|
||||
Duration: {duration}",
|
||||
stats.indexed_files,
|
||||
stats.new_series,
|
||||
stats.scanned_files,
|
||||
stats.removed_files,
|
||||
stats.errors,
|
||||
)
|
||||
}
|
||||
NotificationEvent::ScanFailed {
|
||||
job_type,
|
||||
library_name,
|
||||
error,
|
||||
} => {
|
||||
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||
let err = truncate(error, 200);
|
||||
format!(
|
||||
"❌ <b>Scan failed</b>\n\
|
||||
Library: {lib}\n\
|
||||
Type: {job_type}\n\
|
||||
Error: {err}"
|
||||
)
|
||||
}
|
||||
NotificationEvent::ScanCancelled {
|
||||
job_type,
|
||||
library_name,
|
||||
} => {
|
||||
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||
format!(
|
||||
"⏹ <b>Scan cancelled</b>\n\
|
||||
Library: {lib}\n\
|
||||
Type: {job_type}"
|
||||
)
|
||||
}
|
||||
NotificationEvent::ThumbnailCompleted {
|
||||
job_type,
|
||||
library_name,
|
||||
duration_seconds,
|
||||
} => {
|
||||
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||
let duration = format_duration(*duration_seconds);
|
||||
format!(
|
||||
"🖼 <b>Thumbnails completed</b>\n\
|
||||
Library: {lib}\n\
|
||||
Type: {job_type}\n\
|
||||
Duration: {duration}"
|
||||
)
|
||||
}
|
||||
NotificationEvent::ThumbnailFailed {
|
||||
job_type,
|
||||
library_name,
|
||||
error,
|
||||
} => {
|
||||
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||
let err = truncate(error, 200);
|
||||
format!(
|
||||
"❌ <b>Thumbnails failed</b>\n\
|
||||
Library: {lib}\n\
|
||||
Type: {job_type}\n\
|
||||
Error: {err}"
|
||||
)
|
||||
}
|
||||
NotificationEvent::ConversionCompleted {
|
||||
library_name,
|
||||
book_title,
|
||||
..
|
||||
} => {
|
||||
let lib = library_name.as_deref().unwrap_or("Unknown");
|
||||
let title = book_title.as_deref().unwrap_or("Unknown");
|
||||
format!(
|
||||
"🔄 <b>CBR→CBZ conversion completed</b>\n\
|
||||
Library: {lib}\n\
|
||||
Book: {title}"
|
||||
)
|
||||
}
|
||||
NotificationEvent::ConversionFailed {
|
||||
library_name,
|
||||
book_title,
|
||||
error,
|
||||
..
|
||||
} => {
|
||||
let lib = library_name.as_deref().unwrap_or("Unknown");
|
||||
let title = book_title.as_deref().unwrap_or("Unknown");
|
||||
let err = truncate(error, 200);
|
||||
format!(
|
||||
"❌ <b>CBR→CBZ conversion failed</b>\n\
|
||||
Library: {lib}\n\
|
||||
Book: {title}\n\
|
||||
Error: {err}"
|
||||
)
|
||||
}
|
||||
NotificationEvent::MetadataApproved {
|
||||
series_name,
|
||||
provider,
|
||||
..
|
||||
} => {
|
||||
format!(
|
||||
"🔗 <b>Metadata linked</b>\n\
|
||||
Series: {series_name}\n\
|
||||
Provider: {provider}"
|
||||
)
|
||||
}
|
||||
NotificationEvent::MetadataBatchCompleted {
|
||||
library_name,
|
||||
total_series,
|
||||
processed,
|
||||
} => {
|
||||
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||
format!(
|
||||
"🔍 <b>Metadata batch completed</b>\n\
|
||||
Library: {lib}\n\
|
||||
Series processed: {processed}/{total_series}"
|
||||
)
|
||||
}
|
||||
NotificationEvent::MetadataBatchFailed {
|
||||
library_name,
|
||||
error,
|
||||
} => {
|
||||
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||
let err = truncate(error, 200);
|
||||
format!(
|
||||
"❌ <b>Metadata batch failed</b>\n\
|
||||
Library: {lib}\n\
|
||||
Error: {err}"
|
||||
)
|
||||
}
|
||||
NotificationEvent::MetadataRefreshCompleted {
|
||||
library_name,
|
||||
refreshed,
|
||||
unchanged,
|
||||
errors,
|
||||
} => {
|
||||
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||
format!(
|
||||
"🔄 <b>Metadata refresh completed</b>\n\
|
||||
Library: {lib}\n\
|
||||
Updated: {refreshed}\n\
|
||||
Unchanged: {unchanged}\n\
|
||||
Errors: {errors}"
|
||||
)
|
||||
}
|
||||
NotificationEvent::MetadataRefreshFailed {
|
||||
library_name,
|
||||
error,
|
||||
} => {
|
||||
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||
let err = truncate(error, 200);
|
||||
format!(
|
||||
"❌ <b>Metadata refresh failed</b>\n\
|
||||
Library: {lib}\n\
|
||||
Error: {err}"
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn truncate(s: &str, max: usize) -> String {
|
||||
if s.len() > max {
|
||||
format!("{}…", &s[..max])
|
||||
} else {
|
||||
s.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
fn format_duration(secs: u64) -> String {
|
||||
if secs < 60 {
|
||||
format!("{secs}s")
|
||||
} else {
|
||||
let m = secs / 60;
|
||||
let s = secs % 60;
|
||||
format!("{m}m{s}s")
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public entry point — fire & forget
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Returns whether this event type is enabled in the config.
|
||||
fn is_event_enabled(config: &TelegramConfig, event: &NotificationEvent) -> bool {
|
||||
match event {
|
||||
NotificationEvent::ScanCompleted { .. } => config.events.scan_completed,
|
||||
NotificationEvent::ScanFailed { .. } => config.events.scan_failed,
|
||||
NotificationEvent::ScanCancelled { .. } => config.events.scan_cancelled,
|
||||
NotificationEvent::ThumbnailCompleted { .. } => config.events.thumbnail_completed,
|
||||
NotificationEvent::ThumbnailFailed { .. } => config.events.thumbnail_failed,
|
||||
NotificationEvent::ConversionCompleted { .. } => config.events.conversion_completed,
|
||||
NotificationEvent::ConversionFailed { .. } => config.events.conversion_failed,
|
||||
NotificationEvent::MetadataApproved { .. } => config.events.metadata_approved,
|
||||
NotificationEvent::MetadataBatchCompleted { .. } => config.events.metadata_batch_completed,
|
||||
NotificationEvent::MetadataBatchFailed { .. } => config.events.metadata_batch_failed,
|
||||
NotificationEvent::MetadataRefreshCompleted { .. } => config.events.metadata_refresh_completed,
|
||||
NotificationEvent::MetadataRefreshFailed { .. } => config.events.metadata_refresh_failed,
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract thumbnail path from event if present and file exists on disk.
|
||||
fn event_thumbnail(event: &NotificationEvent) -> Option<&str> {
|
||||
let path = match event {
|
||||
NotificationEvent::ConversionCompleted { thumbnail_path, .. } => thumbnail_path.as_deref(),
|
||||
NotificationEvent::ConversionFailed { thumbnail_path, .. } => thumbnail_path.as_deref(),
|
||||
NotificationEvent::MetadataApproved { thumbnail_path, .. } => thumbnail_path.as_deref(),
|
||||
_ => None,
|
||||
};
|
||||
path.filter(|p| std::path::Path::new(p).exists())
|
||||
}
|
||||
|
||||
/// Load config + format + send in a spawned task. Errors are only logged.
|
||||
pub fn notify(pool: PgPool, event: NotificationEvent) {
|
||||
tokio::spawn(async move {
|
||||
let config = match load_telegram_config(&pool).await {
|
||||
Some(c) => c,
|
||||
None => return, // disabled or not configured
|
||||
};
|
||||
|
||||
if !is_event_enabled(&config, &event) {
|
||||
return;
|
||||
}
|
||||
|
||||
let text = format_event(&event);
|
||||
let sent = if let Some(photo) = event_thumbnail(&event) {
|
||||
match send_telegram_photo(&config, &text, photo).await {
|
||||
Ok(()) => Ok(()),
|
||||
Err(e) => {
|
||||
warn!("[TELEGRAM] Photo send failed, falling back to text: {e}");
|
||||
send_telegram(&config, &text).await
|
||||
}
|
||||
}
|
||||
} else {
|
||||
send_telegram(&config, &text).await
|
||||
};
|
||||
|
||||
match sent {
|
||||
Ok(()) => info!("[TELEGRAM] Notification sent"),
|
||||
Err(e) => warn!("[TELEGRAM] Failed to send notification: {e}"),
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -9,6 +9,7 @@ pub enum BookFormat {
|
||||
Cbz,
|
||||
Cbr,
|
||||
Pdf,
|
||||
Epub,
|
||||
}
|
||||
|
||||
impl BookFormat {
|
||||
@@ -17,6 +18,7 @@ impl BookFormat {
|
||||
Self::Cbz => "cbz",
|
||||
Self::Cbr => "cbr",
|
||||
Self::Pdf => "pdf",
|
||||
Self::Epub => "epub",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -35,6 +37,7 @@ pub fn detect_format(path: &Path) -> Option<BookFormat> {
|
||||
"cbz" => Some(BookFormat::Cbz),
|
||||
"cbr" => Some(BookFormat::Cbr),
|
||||
"pdf" => Some(BookFormat::Pdf),
|
||||
"epub" => Some(BookFormat::Epub),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -144,6 +147,7 @@ pub fn parse_metadata(
|
||||
BookFormat::Cbz => parse_cbz_page_count(path).ok(),
|
||||
BookFormat::Cbr => parse_cbr_page_count(path).ok(),
|
||||
BookFormat::Pdf => parse_pdf_page_count(path).ok(),
|
||||
BookFormat::Epub => parse_epub_page_count(path).ok(),
|
||||
};
|
||||
|
||||
Ok(meta)
|
||||
@@ -156,6 +160,7 @@ pub fn analyze_book(path: &Path, format: BookFormat, pdf_render_scale: u32) -> R
|
||||
BookFormat::Cbz => analyze_cbz(path, true),
|
||||
BookFormat::Cbr => analyze_cbr(path, true),
|
||||
BookFormat::Pdf => analyze_pdf(path, pdf_render_scale),
|
||||
BookFormat::Epub => analyze_epub(path),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -530,6 +535,7 @@ pub fn list_archive_images(path: &Path, format: BookFormat) -> Result<Vec<String
|
||||
BookFormat::Cbz => list_cbz_images(path),
|
||||
BookFormat::Cbr => list_cbr_images(path),
|
||||
BookFormat::Pdf => Err(anyhow::anyhow!("list_archive_images not applicable for PDF")),
|
||||
BookFormat::Epub => get_epub_image_index(path),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -629,6 +635,7 @@ pub fn extract_image_by_name(path: &Path, format: BookFormat, image_name: &str)
|
||||
BookFormat::Cbz => extract_cbz_by_name(path, image_name),
|
||||
BookFormat::Cbr => extract_cbr_by_name(path, image_name),
|
||||
BookFormat::Pdf => Err(anyhow::anyhow!("use extract_page for PDF")),
|
||||
BookFormat::Epub => extract_cbz_by_name(path, image_name),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -721,6 +728,7 @@ pub fn extract_page(path: &Path, format: BookFormat, page_number: u32, pdf_rende
|
||||
let width = if pdf_render_width == 0 { 1200 } else { pdf_render_width };
|
||||
render_pdf_page_n(path, page_number, width)
|
||||
}
|
||||
BookFormat::Epub => extract_epub_page(path, page_number),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -894,6 +902,340 @@ fn render_pdf_page_n(path: &Path, page_number: u32, width: u32) -> Result<Vec<u8
|
||||
}
|
||||
|
||||
|
||||
// ============================================================
|
||||
// EPUB support — spine-aware image index with cache
|
||||
// ============================================================
|
||||
|
||||
/// Cache of ordered image paths per EPUB file. Avoids re-parsing OPF/XHTML on every page request.
|
||||
static EPUB_INDEX_CACHE: OnceLock<Mutex<HashMap<PathBuf, Vec<String>>>> = OnceLock::new();
|
||||
|
||||
fn epub_index_cache() -> &'static Mutex<HashMap<PathBuf, Vec<String>>> {
|
||||
EPUB_INDEX_CACHE.get_or_init(|| Mutex::new(HashMap::new()))
|
||||
}
|
||||
|
||||
// Pre-compiled regex patterns for EPUB XML parsing (compiled once on first use)
|
||||
static RE_EPUB_ROOTFILE: OnceLock<regex::Regex> = OnceLock::new();
|
||||
static RE_EPUB_ITEM: OnceLock<regex::Regex> = OnceLock::new();
|
||||
static RE_EPUB_ITEMREF: OnceLock<regex::Regex> = OnceLock::new();
|
||||
static RE_EPUB_IMG_SRC: OnceLock<regex::Regex> = OnceLock::new();
|
||||
static RE_EPUB_SVG_HREF: OnceLock<regex::Regex> = OnceLock::new();
|
||||
static RE_EPUB_ATTR_ID: OnceLock<regex::Regex> = OnceLock::new();
|
||||
static RE_EPUB_ATTR_HREF: OnceLock<regex::Regex> = OnceLock::new();
|
||||
static RE_EPUB_ATTR_MEDIA: OnceLock<regex::Regex> = OnceLock::new();
|
||||
|
||||
struct EpubManifestItem {
|
||||
href: String,
|
||||
media_type: String,
|
||||
}
|
||||
|
||||
/// Build the ordered list of image paths for an EPUB file.
|
||||
/// Walks the OPF spine to determine reading order, parses XHTML/SVG pages
|
||||
/// for image references, and falls back to CBZ-style listing if no
|
||||
/// images are found through the spine.
|
||||
fn build_epub_image_index(path: &Path) -> Result<Vec<String>> {
|
||||
let file = std::fs::File::open(path)
|
||||
.with_context(|| format!("cannot open epub: {}", path.display()))?;
|
||||
let mut archive = zip::ZipArchive::new(file)
|
||||
.with_context(|| format!("invalid epub zip: {}", path.display()))?;
|
||||
|
||||
// 1. Find OPF path from META-INF/container.xml
|
||||
let opf_path = {
|
||||
let mut entry = archive
|
||||
.by_name("META-INF/container.xml")
|
||||
.context("missing META-INF/container.xml — not a valid EPUB")?;
|
||||
let mut buf = Vec::new();
|
||||
entry.read_to_end(&mut buf)?;
|
||||
let xml = String::from_utf8_lossy(&buf);
|
||||
let re = RE_EPUB_ROOTFILE.get_or_init(|| {
|
||||
regex::Regex::new(r#"<(?:\w+:)?rootfile[^>]+full-path="([^"]+)""#).unwrap()
|
||||
});
|
||||
re.captures(&xml)
|
||||
.and_then(|c| c.get(1))
|
||||
.map(|m| decode_xml_entities(m.as_str()))
|
||||
.context("no rootfile found in container.xml")?
|
||||
};
|
||||
|
||||
let opf_dir = std::path::Path::new(&opf_path)
|
||||
.parent()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.unwrap_or_default();
|
||||
|
||||
// 2. Parse OPF manifest + spine
|
||||
let (manifest, spine_idrefs) = {
|
||||
let mut entry = archive
|
||||
.by_name(&opf_path)
|
||||
.with_context(|| format!("missing OPF file: {}", opf_path))?;
|
||||
let mut buf = Vec::new();
|
||||
entry.read_to_end(&mut buf)?;
|
||||
let xml = String::from_utf8_lossy(&buf);
|
||||
parse_epub_opf(&xml, &opf_dir)?
|
||||
};
|
||||
|
||||
// 3. Walk spine entries to build ordered image list
|
||||
let re_img = RE_EPUB_IMG_SRC.get_or_init(|| {
|
||||
regex::Regex::new(r#"(?i)<img\s[^>]*src=["']([^"']+)["']"#).unwrap()
|
||||
});
|
||||
let re_svg = RE_EPUB_SVG_HREF.get_or_init(|| {
|
||||
regex::Regex::new(r#"(?i)<image\s[^>]*(?:xlink:)?href=["']([^"']+)["']"#).unwrap()
|
||||
});
|
||||
|
||||
let mut images: Vec<String> = Vec::new();
|
||||
let mut seen = std::collections::HashSet::new();
|
||||
|
||||
for idref in &spine_idrefs {
|
||||
let item = match manifest.get(idref.as_str()) {
|
||||
Some(item) => item,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
// Direct raster image in spine (rare but possible)
|
||||
if item.media_type.starts_with("image/") && !item.media_type.contains("svg") {
|
||||
if seen.insert(item.href.clone()) {
|
||||
images.push(item.href.clone());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Read XHTML/SVG content — entry is dropped at end of match arm, releasing archive borrow
|
||||
let content = match archive.by_name(&item.href) {
|
||||
Ok(mut entry) => {
|
||||
let mut buf = Vec::new();
|
||||
match entry.read_to_end(&mut buf) {
|
||||
Ok(_) => String::from_utf8_lossy(&buf).to_string(),
|
||||
Err(_) => continue,
|
||||
}
|
||||
}
|
||||
Err(_) => continue,
|
||||
};
|
||||
|
||||
let content_dir = std::path::Path::new(&item.href)
|
||||
.parent()
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.unwrap_or_default();
|
||||
|
||||
// Extract <img src="..."> and <image [xlink:]href="...">
|
||||
for re in [re_img, re_svg] {
|
||||
for cap in re.captures_iter(&content) {
|
||||
if let Some(src) = cap.get(1) {
|
||||
let src_str = src.as_str();
|
||||
if src_str.starts_with("data:") {
|
||||
continue;
|
||||
}
|
||||
let decoded = decode_xml_entities(&percent_decode_epub(src_str));
|
||||
let resolved = resolve_epub_path(&content_dir, &decoded);
|
||||
if seen.insert(resolved.clone()) {
|
||||
images.push(resolved);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Fallback: no images from spine → list all images in ZIP (CBZ-style)
|
||||
if images.is_empty() {
|
||||
for i in 0..archive.len() {
|
||||
if let Ok(entry) = archive.by_index(i) {
|
||||
let name = entry.name().to_string();
|
||||
if is_image_name(&name.to_ascii_lowercase()) && seen.insert(name.clone()) {
|
||||
images.push(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
images.sort_by(|a, b| natord::compare(a, b));
|
||||
}
|
||||
|
||||
if images.is_empty() {
|
||||
return Err(anyhow::anyhow!("no images found in epub: {}", path.display()));
|
||||
}
|
||||
|
||||
Ok(images)
|
||||
}
|
||||
|
||||
fn parse_epub_opf(
|
||||
xml: &str,
|
||||
opf_dir: &str,
|
||||
) -> Result<(HashMap<String, EpubManifestItem>, Vec<String>)> {
|
||||
let re_item = RE_EPUB_ITEM.get_or_init(|| {
|
||||
regex::Regex::new(r#"(?s)<(?:\w+:)?item\s([^>]+?)/?>"#).unwrap()
|
||||
});
|
||||
let re_itemref = RE_EPUB_ITEMREF.get_or_init(|| {
|
||||
regex::Regex::new(r#"<(?:\w+:)?itemref\s[^>]*idref="([^"]+)""#).unwrap()
|
||||
});
|
||||
let re_id = RE_EPUB_ATTR_ID.get_or_init(|| {
|
||||
regex::Regex::new(r#"(?:^|\s)id="([^"]+)""#).unwrap()
|
||||
});
|
||||
let re_href = RE_EPUB_ATTR_HREF.get_or_init(|| {
|
||||
regex::Regex::new(r#"(?:^|\s)href="([^"]+)""#).unwrap()
|
||||
});
|
||||
let re_media = RE_EPUB_ATTR_MEDIA.get_or_init(|| {
|
||||
regex::Regex::new(r#"media-type="([^"]+)""#).unwrap()
|
||||
});
|
||||
|
||||
let mut manifest: HashMap<String, EpubManifestItem> = HashMap::new();
|
||||
for cap in re_item.captures_iter(xml) {
|
||||
if let Some(attrs) = cap.get(1) {
|
||||
let a = attrs.as_str();
|
||||
let id = re_id.captures(a).and_then(|c| c.get(1));
|
||||
let href = re_href.captures(a).and_then(|c| c.get(1));
|
||||
let media = re_media.captures(a).and_then(|c| c.get(1));
|
||||
|
||||
if let (Some(id), Some(href), Some(media)) = (id, href, media) {
|
||||
let decoded_href = decode_xml_entities(&percent_decode_epub(href.as_str()));
|
||||
let resolved = resolve_epub_path(opf_dir, &decoded_href);
|
||||
manifest.insert(
|
||||
id.as_str().to_string(),
|
||||
EpubManifestItem {
|
||||
href: resolved,
|
||||
media_type: media.as_str().to_string(),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let spine_idrefs: Vec<String> = re_itemref
|
||||
.captures_iter(xml)
|
||||
.filter_map(|c| c.get(1).map(|m| m.as_str().to_string()))
|
||||
.collect();
|
||||
|
||||
Ok((manifest, spine_idrefs))
|
||||
}
|
||||
|
||||
/// Get the cached image index for an EPUB, building it on first access.
|
||||
fn get_epub_image_index(path: &Path) -> Result<Vec<String>> {
|
||||
{
|
||||
let cache = epub_index_cache().lock().unwrap();
|
||||
if let Some(names) = cache.get(path) {
|
||||
return Ok(names.clone());
|
||||
}
|
||||
}
|
||||
let images = build_epub_image_index(path)?;
|
||||
{
|
||||
let mut cache = epub_index_cache().lock().unwrap();
|
||||
cache.insert(path.to_path_buf(), images.clone());
|
||||
}
|
||||
Ok(images)
|
||||
}
|
||||
|
||||
fn parse_epub_page_count(path: &Path) -> Result<i32> {
|
||||
let images = build_epub_image_index(path)?;
|
||||
Ok(images.len() as i32)
|
||||
}
|
||||
|
||||
fn analyze_epub(path: &Path) -> Result<(i32, Vec<u8>)> {
|
||||
let images = get_epub_image_index(path)?;
|
||||
let count = images.len() as i32;
|
||||
|
||||
let file = std::fs::File::open(path)
|
||||
.with_context(|| format!("cannot open epub: {}", path.display()))?;
|
||||
let mut archive = zip::ZipArchive::new(file)?;
|
||||
|
||||
for img_path in &images {
|
||||
if let Ok(mut entry) = archive.by_name(img_path) {
|
||||
let mut buf = Vec::new();
|
||||
if entry.read_to_end(&mut buf).is_ok() && !buf.is_empty() {
|
||||
return Ok((count, buf));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow::anyhow!(
|
||||
"no readable images in epub: {}",
|
||||
path.display()
|
||||
))
|
||||
}
|
||||
|
||||
fn extract_epub_page(path: &Path, page_number: u32) -> Result<Vec<u8>> {
|
||||
let images = get_epub_image_index(path)?;
|
||||
let index = page_number as usize - 1;
|
||||
let img_path = images
|
||||
.get(index)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"page {} out of range (total: {})",
|
||||
page_number,
|
||||
images.len()
|
||||
)
|
||||
})?;
|
||||
|
||||
let file = std::fs::File::open(path)
|
||||
.with_context(|| format!("cannot open epub: {}", path.display()))?;
|
||||
let mut archive = zip::ZipArchive::new(file)?;
|
||||
let mut entry = archive
|
||||
.by_name(img_path)
|
||||
.with_context(|| format!("image '{}' not found in epub", img_path))?;
|
||||
let mut buf = Vec::new();
|
||||
entry.read_to_end(&mut buf)?;
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
// --- EPUB path/encoding helpers ---
|
||||
|
||||
fn resolve_epub_path(base_dir: &str, href: &str) -> String {
|
||||
if let Some(stripped) = href.strip_prefix('/') {
|
||||
return normalize_epub_path(stripped);
|
||||
}
|
||||
if base_dir.is_empty() {
|
||||
return normalize_epub_path(href);
|
||||
}
|
||||
normalize_epub_path(&format!("{}/{}", base_dir, href))
|
||||
}
|
||||
|
||||
fn normalize_epub_path(path: &str) -> String {
|
||||
let mut parts: Vec<&str> = Vec::new();
|
||||
for part in path.split('/') {
|
||||
match part {
|
||||
".." => {
|
||||
parts.pop();
|
||||
}
|
||||
"." | "" => {}
|
||||
_ => parts.push(part),
|
||||
}
|
||||
}
|
||||
parts.join("/")
|
||||
}
|
||||
|
||||
fn percent_decode_epub(s: &str) -> String {
|
||||
if !s.contains('%') {
|
||||
return s.to_string();
|
||||
}
|
||||
let bytes = s.as_bytes();
|
||||
let mut result = Vec::with_capacity(bytes.len());
|
||||
let mut i = 0;
|
||||
while i < bytes.len() {
|
||||
if bytes[i] == b'%' && i + 2 < bytes.len() {
|
||||
if let (Some(h), Some(l)) = (epub_hex_val(bytes[i + 1]), epub_hex_val(bytes[i + 2])) {
|
||||
result.push(h * 16 + l);
|
||||
i += 3;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
result.push(bytes[i]);
|
||||
i += 1;
|
||||
}
|
||||
String::from_utf8_lossy(&result).to_string()
|
||||
}
|
||||
|
||||
fn epub_hex_val(b: u8) -> Option<u8> {
|
||||
match b {
|
||||
b'0'..=b'9' => Some(b - b'0'),
|
||||
b'a'..=b'f' => Some(b - b'a' + 10),
|
||||
b'A'..=b'F' => Some(b - b'A' + 10),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_xml_entities(s: &str) -> String {
|
||||
if !s.contains('&') {
|
||||
return s.to_string();
|
||||
}
|
||||
s.replace("&", "&")
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace(""", "\"")
|
||||
.replace("'", "'")
|
||||
}
|
||||
|
||||
/// Convert a CBR file to CBZ in-place (same directory, same stem).
|
||||
///
|
||||
/// The conversion is safe: a `.cbz.tmp` file is written first, verified, then
|
||||
|
||||
310
docs/FEATURES.md
Normal file
310
docs/FEATURES.md
Normal file
@@ -0,0 +1,310 @@
|
||||
# Stripstream Librarian — Features & Business Rules
|
||||
|
||||
## Libraries
|
||||
|
||||
### Multi-Library Management
|
||||
- Create and manage multiple independent libraries, each with its own root path
|
||||
- Enable/disable libraries individually
|
||||
- Delete a library cascades to all its books, jobs, and metadata
|
||||
|
||||
### Scanning & Indexing
|
||||
- **Incremental scan**: uses directory mtime tracking to skip unchanged directories
|
||||
- **Full rebuild**: force re-walk all directories, ignoring cached mtimes
|
||||
- **Rescan**: deep rescan to discover newly supported formats
|
||||
- **Two-phase pipeline**:
|
||||
- Phase 1 (Discovery): fast filename-based metadata extraction (no archive I/O)
|
||||
- Phase 2 (Analysis): extract page counts, first page image from archives
|
||||
|
||||
### Real-Time Monitoring
|
||||
- **Automatic periodic scanning**: configurable interval (default 5 seconds)
|
||||
- **Filesystem watcher**: real-time detection of file changes for instant indexing
|
||||
- Each can be toggled per library (`monitor_enabled`, `watcher_enabled`)
|
||||
|
||||
---
|
||||
|
||||
## Books
|
||||
|
||||
### Format Support
|
||||
- **CBZ** (ZIP-based comic archives)
|
||||
- **CBR** (RAR-based comic archives)
|
||||
- **PDF**
|
||||
- **EPUB**
|
||||
- Automatic format detection from file extension and magic bytes
|
||||
|
||||
### Metadata Extraction
|
||||
- **Title**: derived from filename or external metadata
|
||||
- **Series**: derived from directory structure (first directory level under library root)
|
||||
- **Volume**: extracted from filename with pattern detection:
|
||||
- `T##` (Tome) — most common for French comics
|
||||
- `Vol.##`, `Vol ##`, `Volume ##`
|
||||
- `###` (standalone number)
|
||||
- `-## ` (dash-separated)
|
||||
- **Author(s)**: single scalar and array support
|
||||
- **Page count**: extracted from archive analysis
|
||||
- **Language**, **kind** (ebook, comic, bd)
|
||||
|
||||
### Thumbnails
|
||||
- Generated from the first page of each archive
|
||||
- Output format configurable: WebP (default), JPEG, PNG
|
||||
- Configurable dimensions (default 300×400)
|
||||
- Lazy generation: created on first access if missing
|
||||
- Bulk operations: rebuild missing or regenerate all
|
||||
|
||||
### CBR to CBZ Conversion
|
||||
- Convert RAR archives to ZIP format
|
||||
- Tracked as background job with progress
|
||||
|
||||
---
|
||||
|
||||
## Series
|
||||
|
||||
### Automatic Aggregation
|
||||
- Series derived from directory structure during scanning
|
||||
- Books without series grouped as "unclassified"
|
||||
|
||||
### Series Metadata
|
||||
- Description, publisher, start year, status (`ongoing`, `ended`, `completed`, `on_hold`, `hiatus`)
|
||||
- Total volume count (from external providers)
|
||||
- Authors (aggregated from books or metadata)
|
||||
|
||||
### Filtering & Discovery
|
||||
- Filter by: series name (partial match), reading status, series status, metadata provider linkage
|
||||
- Sort by: name, reading status, book count
|
||||
- **Missing books detection**: identifies gaps in volume numbering within a series
|
||||
|
||||
---
|
||||
|
||||
## Reading Progress
|
||||
|
||||
### Per-Book Tracking
|
||||
- Three states: `unread` (default), `reading`, `read`
|
||||
- Current page tracking when status is `reading`
|
||||
- `last_read_at` timestamp auto-updated
|
||||
|
||||
### Series-Level Status
|
||||
- Calculated from book statuses:
|
||||
- All read → series `read`
|
||||
- None read → series `unread`
|
||||
- Mixed → series `reading`
|
||||
|
||||
### Bulk Operations
|
||||
- Mark entire series as read (updates all books)
|
||||
|
||||
---
|
||||
|
||||
## Search & Discovery
|
||||
|
||||
### Full-Text Search
|
||||
- PostgreSQL-based (`ILIKE` + `pg_trgm`)
|
||||
- Searches across: book titles, series names, authors (scalar and array fields), series metadata authors
|
||||
- Case-insensitive partial matching
|
||||
- Library-scoped filtering
|
||||
|
||||
### Results
|
||||
- Book hits: title, authors, series, volume, language, kind
|
||||
- Series hits: name, book count, read count, first book (for linking)
|
||||
- Processing time included in response
|
||||
|
||||
---
|
||||
|
||||
## Authors
|
||||
|
||||
- Unique author aggregation from books and series metadata
|
||||
- Per-author book and series count
|
||||
- Searchable by name (partial match)
|
||||
- Sortable by name or book count
|
||||
|
||||
---
|
||||
|
||||
## External Metadata
|
||||
|
||||
### Supported Providers
|
||||
| Provider | Focus |
|
||||
|----------|-------|
|
||||
| Google Books | General books (default fallback) |
|
||||
| ComicVine | Comics |
|
||||
| BedéThèque | Franco-Belgian comics |
|
||||
| AniList | Manga/anime |
|
||||
| Open Library | General books |
|
||||
|
||||
### Provider Configuration
|
||||
- Global default provider with library-level override
|
||||
- Fallback provider if primary is unavailable
|
||||
|
||||
### Matching Workflow
|
||||
1. **Search**: query a provider, get candidates with confidence scores
|
||||
2. **Match**: link a series to an external result (status `pending`)
|
||||
3. **Approve**: validate and sync metadata to series and books
|
||||
4. **Reject**: discard a match
|
||||
|
||||
### Batch Processing
|
||||
- Auto-match all series in a library via `metadata_batch` job
|
||||
- Configurable confidence threshold
|
||||
- Result statuses: `auto_matched`, `no_results`, `too_many_results`, `low_confidence`, `already_linked`
|
||||
|
||||
### Metadata Refresh
|
||||
- Update approved links with latest data from providers
|
||||
- Change tracking reports per series/book
|
||||
- Non-destructive: only updates when provider has new data
|
||||
|
||||
### Field Locking
|
||||
- Individual book fields can be locked to prevent external sync from overwriting manual edits
|
||||
|
||||
---
|
||||
|
||||
## External Integrations
|
||||
|
||||
### Komga Sync
|
||||
- Import reading progress from a Komga server
|
||||
- Matches local series/books by name
|
||||
- Detailed sync report: matched, already read, newly marked, unmatched
|
||||
|
||||
### Prowlarr (Indexer Search)
|
||||
- Search Prowlarr for missing volumes in a series
|
||||
- Volume pattern matching against release titles
|
||||
- Results: title, size, seeders/leechers, download URL, matched missing volumes
|
||||
|
||||
### qBittorrent
|
||||
- Add torrents directly from Prowlarr search results
|
||||
- Connection test endpoint
|
||||
|
||||
---
|
||||
|
||||
## Page Rendering & Caching
|
||||
|
||||
### Page Extraction
|
||||
- Render any page from supported archive formats
|
||||
- 1-indexed page numbers
|
||||
|
||||
### Image Processing
|
||||
- Output formats: original, JPEG, PNG, WebP
|
||||
- Quality parameter (1–100)
|
||||
- Max width parameter (1–2160 px)
|
||||
- Configurable resampling filter: lanczos3, nearest, triangle/bilinear
|
||||
- Concurrent render limit (default 8) with semaphore
|
||||
|
||||
### Caching
|
||||
- **LRU in-memory cache**: 512 entries
|
||||
- **Disk cache**: SHA256-keyed, two-level directory structure
|
||||
- Cache key = hash(path + page + format + quality + width)
|
||||
- Configurable cache directory and max size
|
||||
- Manual cache clear via settings
|
||||
|
||||
---
|
||||
|
||||
## Background Jobs
|
||||
|
||||
### Job Types
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `rebuild` | Incremental scan |
|
||||
| `full_rebuild` | Full filesystem rescan |
|
||||
| `rescan` | Deep rescan for new formats |
|
||||
| `thumbnail_rebuild` | Generate missing thumbnails |
|
||||
| `thumbnail_regenerate` | Clear and regenerate all thumbnails |
|
||||
| `cbr_to_cbz` | Convert RAR to ZIP |
|
||||
| `metadata_batch` | Auto-match series to metadata |
|
||||
| `metadata_refresh` | Update approved metadata links |
|
||||
|
||||
### Job Lifecycle
|
||||
- Status flow: `pending` → `running` → `success` | `failed` | `cancelled`
|
||||
- Intermediate statuses: `extracting_pages`, `generating_thumbnails`
|
||||
- Real-time progress via **Server-Sent Events** (SSE)
|
||||
- Per-file error tracking (non-fatal: job continues on errors)
|
||||
- Cancellation support for pending/running jobs
|
||||
|
||||
### Progress Tracking
|
||||
- Percentage (0–100), current file, processed/total counts
|
||||
- Timing: started_at, finished_at, phase2_started_at
|
||||
- Stats JSON blob with job-specific metrics
|
||||
|
||||
---
|
||||
|
||||
## Authentication & Security
|
||||
|
||||
### Token System
|
||||
- **Bootstrap token**: admin token via `API_BOOTSTRAP_TOKEN` env var
|
||||
- **API tokens**: create, list, revoke with scopes
|
||||
- Token format: `stl_{prefix}_{secret}` with Argon2 hashing
|
||||
- Expiration dates, last usage tracking, revocation
|
||||
|
||||
### Access Control
|
||||
- **Two scopes**: `admin` (full access) and `read` (read-only)
|
||||
- Route-level middleware enforcement
|
||||
- Rate limiting: configurable sliding window (default 120 req/s)
|
||||
|
||||
---
|
||||
|
||||
## Backoffice (Web UI)
|
||||
|
||||
### Dashboard
|
||||
- Statistics cards: books, series, authors, libraries
|
||||
- Donut charts: reading status breakdown, format distribution
|
||||
- Bar charts: books per language
|
||||
- Per-library reading progress bars
|
||||
- Top series by book/page count
|
||||
- Monthly addition timeline
|
||||
- Metadata coverage stats
|
||||
|
||||
### Pages
|
||||
- **Libraries**: list, create, delete, configure monitoring and metadata provider
|
||||
- **Books**: global list with filtering/sorting, detail view with metadata and page rendering
|
||||
- **Series**: global list, per-library view, detail with metadata management
|
||||
- **Authors**: list with book/series counts, detail with author's books
|
||||
- **Jobs**: history, live progress via SSE, error details
|
||||
- **Tokens**: create, list, revoke API tokens
|
||||
- **Settings**: image processing, cache, thumbnails, external services (Prowlarr, qBittorrent)
|
||||
|
||||
### Interactive Features
|
||||
- Real-time search with suggestions
|
||||
- Metadata search and matching modals
|
||||
- Prowlarr search modal for missing volumes
|
||||
- Folder browser/picker for library paths
|
||||
- Book/series editing forms
|
||||
- Quick reading status toggles
|
||||
- CBR to CBZ conversion trigger
|
||||
|
||||
---
|
||||
|
||||
## API
|
||||
|
||||
### Documentation
|
||||
- OpenAPI/Swagger UI available at `/swagger-ui`
|
||||
- Health check (`/health`), readiness (`/ready`), Prometheus metrics (`/metrics`)
|
||||
|
||||
### Public Endpoints (no auth)
|
||||
- `GET /health`, `GET /ready`, `GET /metrics`, `GET /swagger-ui`
|
||||
|
||||
### Read Endpoints (read scope)
|
||||
- Libraries, books, series, authors listing and detail
|
||||
- Book pages and thumbnails
|
||||
- Reading progress get/update
|
||||
- Full-text search, collection statistics
|
||||
|
||||
### Admin Endpoints (admin scope)
|
||||
- Library CRUD and configuration
|
||||
- Book metadata editing, CBR conversion
|
||||
- Series metadata editing
|
||||
- Indexing job management (trigger, cancel, stream)
|
||||
- API token management
|
||||
- Metadata operations (search, match, approve, reject, batch, refresh)
|
||||
- External integrations (Prowlarr, qBittorrent, Komga)
|
||||
- Application settings and cache management
|
||||
|
||||
---
|
||||
|
||||
## Database
|
||||
|
||||
### Key Design Decisions
|
||||
- PostgreSQL with `pg_trgm` for full-text search (no external search engine)
|
||||
- All deletions cascade from libraries
|
||||
- Unique constraints: file paths, token prefixes, metadata links (library + series + provider)
|
||||
- Directory mtime caching for incremental scan optimization
|
||||
- Connection pool: 10 (API), 20 (indexer)
|
||||
|
||||
### Archive Resilience
|
||||
- CBZ: fallback streaming reader if central directory corrupted
|
||||
- CBR: RAR extraction via system `unar`, fallback to CBZ parsing
|
||||
- PDF: `pdfinfo` for page count, `pdftoppm` for rendering
|
||||
- EPUB: ZIP-based extraction
|
||||
- FD exhaustion detection: aborts if too many consecutive IO errors
|
||||
5
infra/migrations/0037_add_metadata_refresh_job_type.sql
Normal file
5
infra/migrations/0037_add_metadata_refresh_job_type.sql
Normal file
@@ -0,0 +1,5 @@
|
||||
-- Allow metadata_refresh job type in index_jobs
|
||||
ALTER TABLE index_jobs
|
||||
DROP CONSTRAINT IF EXISTS index_jobs_type_check,
|
||||
ADD CONSTRAINT index_jobs_type_check
|
||||
CHECK (type IN ('scan', 'rebuild', 'full_rebuild', 'thumbnail_rebuild', 'thumbnail_regenerate', 'cbr_to_cbz', 'metadata_batch', 'metadata_refresh'));
|
||||
24
infra/migrations/0038_add_status_mappings.sql
Normal file
24
infra/migrations/0038_add_status_mappings.sql
Normal file
@@ -0,0 +1,24 @@
|
||||
-- Status mappings: many provider statuses → one target status (existing in series_metadata.status)
|
||||
CREATE TABLE status_mappings (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
provider_status TEXT NOT NULL UNIQUE,
|
||||
mapped_status TEXT NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
-- Pre-populate with current hardcoded mappings from normalize_series_status
|
||||
INSERT INTO status_mappings (provider_status, mapped_status) VALUES
|
||||
-- AniList
|
||||
('finished', 'ended'),
|
||||
('releasing', 'ongoing'),
|
||||
('not_yet_released', 'upcoming'),
|
||||
('cancelled', 'cancelled'),
|
||||
('hiatus', 'hiatus'),
|
||||
-- Bédéthèque (French)
|
||||
('finie', 'ended'),
|
||||
('terminée', 'ended'),
|
||||
('en cours', 'ongoing'),
|
||||
('suspendue', 'hiatus'),
|
||||
('annulée', 'cancelled'),
|
||||
('arrêtée', 'cancelled');
|
||||
19
infra/migrations/0039_renormalize_series_status.sql
Normal file
19
infra/migrations/0039_renormalize_series_status.sql
Normal file
@@ -0,0 +1,19 @@
|
||||
-- Re-normalize series_metadata.status using the status_mappings table.
|
||||
-- Batch sync was not calling normalize_series_status before, so raw provider
|
||||
-- values like "Série en cours" ended up in the DB alongside "ongoing".
|
||||
|
||||
-- Exact match
|
||||
UPDATE series_metadata sm
|
||||
SET status = m.mapped_status, updated_at = NOW()
|
||||
FROM status_mappings m
|
||||
WHERE LOWER(sm.status) = m.provider_status
|
||||
AND sm.status IS NOT NULL
|
||||
AND LOWER(sm.status) != m.mapped_status;
|
||||
|
||||
-- Substring match (for values like "Série en cours" containing "en cours")
|
||||
UPDATE series_metadata sm
|
||||
SET status = m.mapped_status, updated_at = NOW()
|
||||
FROM status_mappings m
|
||||
WHERE LOWER(sm.status) LIKE '%' || m.provider_status || '%'
|
||||
AND sm.status IS NOT NULL
|
||||
AND LOWER(sm.status) != m.mapped_status;
|
||||
5
infra/migrations/0040_lowercase_series_status.sql
Normal file
5
infra/migrations/0040_lowercase_series_status.sql
Normal file
@@ -0,0 +1,5 @@
|
||||
-- Normalize all series_metadata.status values to lowercase for consistency.
|
||||
-- This fixes case mismatches like "One shot" vs "one shot".
|
||||
UPDATE series_metadata
|
||||
SET status = LOWER(status), updated_at = NOW()
|
||||
WHERE status IS NOT NULL AND status != LOWER(status);
|
||||
18
infra/migrations/0041_renormalize_with_current_mappings.sql
Normal file
18
infra/migrations/0041_renormalize_with_current_mappings.sql
Normal file
@@ -0,0 +1,18 @@
|
||||
-- Re-normalize series_metadata.status using current status_mappings.
|
||||
-- Catches mappings added via UI after migration 0039 ran (e.g. "one shot" → "ended").
|
||||
|
||||
-- Exact match
|
||||
UPDATE series_metadata sm
|
||||
SET status = m.mapped_status, updated_at = NOW()
|
||||
FROM status_mappings m
|
||||
WHERE LOWER(sm.status) = m.provider_status
|
||||
AND sm.status IS NOT NULL
|
||||
AND LOWER(sm.status) != m.mapped_status;
|
||||
|
||||
-- Substring match
|
||||
UPDATE series_metadata sm
|
||||
SET status = m.mapped_status, updated_at = NOW()
|
||||
FROM status_mappings m
|
||||
WHERE LOWER(sm.status) LIKE '%' || m.provider_status || '%'
|
||||
AND sm.status IS NOT NULL
|
||||
AND LOWER(sm.status) != m.mapped_status;
|
||||
3
infra/migrations/0042_nullable_mapped_status.sql
Normal file
3
infra/migrations/0042_nullable_mapped_status.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
-- Allow mapped_status to be NULL to represent "known but unmapped" provider statuses.
|
||||
-- Clicking X in the UI will set mapped_status to NULL instead of deleting the row.
|
||||
ALTER TABLE status_mappings ALTER COLUMN mapped_status DROP NOT NULL;
|
||||
3
infra/migrations/0043_add_prowlarr_settings.sql
Normal file
3
infra/migrations/0043_add_prowlarr_settings.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
INSERT INTO app_settings (key, value) VALUES
|
||||
('prowlarr', '{"url": "", "api_key": "", "categories": [7030, 7020]}')
|
||||
ON CONFLICT DO NOTHING;
|
||||
3
infra/migrations/0044_add_qbittorrent_settings.sql
Normal file
3
infra/migrations/0044_add_qbittorrent_settings.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
INSERT INTO app_settings (key, value) VALUES
|
||||
('qbittorrent', '{"url": "", "username": "", "password": ""}')
|
||||
ON CONFLICT DO NOTHING;
|
||||
4
infra/migrations/0045_add_metadata_refresh_schedule.sql
Normal file
4
infra/migrations/0045_add_metadata_refresh_schedule.sql
Normal file
@@ -0,0 +1,4 @@
|
||||
ALTER TABLE libraries
|
||||
ADD COLUMN metadata_refresh_mode TEXT NOT NULL DEFAULT 'manual',
|
||||
ADD COLUMN last_metadata_refresh_at TIMESTAMPTZ,
|
||||
ADD COLUMN next_metadata_refresh_at TIMESTAMPTZ;
|
||||
10
infra/migrations/0046_add_epub_format.sql
Normal file
10
infra/migrations/0046_add_epub_format.sql
Normal file
@@ -0,0 +1,10 @@
|
||||
-- Add EPUB to allowed format values in book_files and books tables.
|
||||
-- PostgreSQL CHECK constraints are dropped+recreated (no ALTER CONSTRAINT).
|
||||
|
||||
-- book_files.format
|
||||
ALTER TABLE book_files DROP CONSTRAINT IF EXISTS book_files_format_check;
|
||||
ALTER TABLE book_files ADD CONSTRAINT book_files_format_check CHECK (format IN ('pdf', 'cbz', 'cbr', 'epub'));
|
||||
|
||||
-- books.format (denormalized column added in 0020)
|
||||
ALTER TABLE books DROP CONSTRAINT IF EXISTS books_format_check;
|
||||
ALTER TABLE books ADD CONSTRAINT books_format_check CHECK (format IN ('pdf', 'cbz', 'cbr', 'epub'));
|
||||
7
infra/migrations/0047_add_rescan_job_type.sql
Normal file
7
infra/migrations/0047_add_rescan_job_type.sql
Normal file
@@ -0,0 +1,7 @@
|
||||
-- Add rescan job type: clears directory mtimes to force re-walking all directories
|
||||
-- while preserving existing data (unlike full_rebuild which deletes everything).
|
||||
-- Useful for discovering newly supported formats (e.g. EPUB) without losing metadata.
|
||||
ALTER TABLE index_jobs
|
||||
DROP CONSTRAINT IF EXISTS index_jobs_type_check,
|
||||
ADD CONSTRAINT index_jobs_type_check
|
||||
CHECK (type IN ('scan', 'rebuild', 'full_rebuild', 'rescan', 'thumbnail_rebuild', 'thumbnail_regenerate', 'cbr_to_cbz', 'metadata_batch', 'metadata_refresh'));
|
||||
3
infra/migrations/0048_add_telegram_settings.sql
Normal file
3
infra/migrations/0048_add_telegram_settings.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
INSERT INTO app_settings (key, value) VALUES
|
||||
('telegram', '{"bot_token": "", "chat_id": "", "enabled": false, "events": {"job_completed": true, "job_failed": true, "job_cancelled": true, "metadata_approved": true}}')
|
||||
ON CONFLICT DO NOTHING;
|
||||
8
infra/migrations/0049_update_telegram_events.sql
Normal file
8
infra/migrations/0049_update_telegram_events.sql
Normal file
@@ -0,0 +1,8 @@
|
||||
-- Update telegram events from 4 generic toggles to 12 granular toggles
|
||||
UPDATE app_settings
|
||||
SET value = jsonb_set(
|
||||
value,
|
||||
'{events}',
|
||||
'{"scan_completed": true, "scan_failed": true, "scan_cancelled": true, "thumbnail_completed": true, "thumbnail_failed": true, "conversion_completed": true, "conversion_failed": true, "metadata_approved": true, "metadata_batch_completed": true, "metadata_batch_failed": true, "metadata_refresh_completed": true, "metadata_refresh_failed": true}'::jsonb
|
||||
)
|
||||
WHERE key = 'telegram';
|
||||
Reference in New Issue
Block a user