Compare commits
167 Commits
3bd2fb7c1f
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 2a7881ac6e | |||
| 0950018b38 | |||
| bc796f4ee5 | |||
| 232ecdda41 | |||
| 32d13984a1 | |||
| eab7f2e21b | |||
| b6422fbf3e | |||
| 6dbd0c80e6 | |||
| 0c42a9ed04 | |||
| 95a6e54d06 | |||
| e26219989f | |||
| 5d33a35407 | |||
| d53572dc33 | |||
| cf1953d11f | |||
| 6f663eaee7 | |||
| ee65c6263a | |||
| 691b6b22ab | |||
| 11c80a16a3 | |||
| c366b44c54 | |||
| 92f80542e6 | |||
| 3a25e42a20 | |||
| 24763bf5a7 | |||
| 08f0397029 | |||
| 766e3a01b2 | |||
| 626e2e035d | |||
| cfd2321db2 | |||
| 1b715033ce | |||
| 81d1586501 | |||
| bd74c9e3e3 | |||
| 41228430cf | |||
| 6a4ba06fac | |||
| e5c3542d3f | |||
| 24516f1069 | |||
| 5383cdef60 | |||
| be5c3f7a34 | |||
| caa9922ff9 | |||
| 135f000c71 | |||
| d9e50a4235 | |||
| 5f6eb5a5cb | |||
| 41c77fca2e | |||
| 49621f3fb1 | |||
| 6df743b2e6 | |||
| edfefc0128 | |||
| b0185abefe | |||
| b9e54cbfd8 | |||
| 3f0bd783cd | |||
| fc8856c83f | |||
| bd09f3d943 | |||
| 1f434c3d67 | |||
| 4972a403df | |||
| 629708cdd0 | |||
| 560087a897 | |||
| 27f553b005 | |||
| ed7665248e | |||
| 736b8aedc0 | |||
| 3daa49ae6c | |||
| 5fb24188e1 | |||
| 54f972db17 | |||
| acd8b62382 | |||
| cc65e3d1ad | |||
| 70889ca955 | |||
| 4ad6d57271 | |||
| fe5de3d5c1 | |||
| 5a224c48c0 | |||
| d08fe31b1b | |||
| 4d69ed91c5 | |||
| c6ddd3e6c7 | |||
| 504185f31f | |||
| acd0cce3f8 | |||
| e14da4fc8d | |||
| c04d4fb618 | |||
| 57bc82703d | |||
| e6aa7ebed0 | |||
| c44b51d6ef | |||
| d4c48de780 | |||
| 8948f75d62 | |||
| d304877a83 | |||
| 9cec32ba3e | |||
| e8768dfad7 | |||
| cfc98819ab | |||
| bfc1c76fe2 | |||
| 39e9f35acb | |||
| 36987f59b9 | |||
| 931d0e06f4 | |||
| 741a4da878 | |||
| e28b78d0e6 | |||
| 163dc3698c | |||
| 818bd82e0f | |||
| 76c8bcbf2c | |||
| 00094b22c6 | |||
| 1e4d9acebe | |||
| b226aa3a35 | |||
| d913be9d2a | |||
| e9bb951d97 | |||
| 037ede2750 | |||
| 06a245d90a | |||
| 63d5fcaa13 | |||
| 020cb6baae | |||
| 6db8042ffe | |||
| d4f87c4044 | |||
| 055c376222 | |||
| 1cc5d049ea | |||
| b955c2697c | |||
| 9a8c1577af | |||
| 52b9b0e00e | |||
| 51ef2fa725 | |||
| 7d53babc84 | |||
| 00f4445924 | |||
| 1a91c051b5 | |||
| 48ca9d0a8b | |||
| f75d795215 | |||
| ac13f53124 | |||
| c9ccf5cd90 | |||
| a99bfb5a91 | |||
| 389d71b42f | |||
| 2985ef5561 | |||
| 4be8177683 | |||
| a675dcd2a4 | |||
| 127cd8a42c | |||
| 1b9f2d3915 | |||
| f095bf050b | |||
| b17718df9b | |||
| 5c3ddf7819 | |||
| c56d02a895 | |||
| bc98067871 | |||
| a085924f8a | |||
| 9fbdf793d0 | |||
| b14accbbe0 | |||
| 330239d2c3 | |||
| bf5a20882b | |||
| 44c6dd626a | |||
| 9153b0c750 | |||
| e18bbba4ce | |||
| 2870dd9dbc | |||
| cf2e7a0be7 | |||
| 82444cda02 | |||
| 1d25c8869f | |||
| fd277602c9 | |||
| 673777bc8d | |||
| 03af82d065 | |||
| 78e28a269d | |||
| ee05df26c4 | |||
| 96d9efdeed | |||
| 9f5183848b | |||
| 6f9dd108ef | |||
| 61bc307715 | |||
| c7f3ad981d | |||
| 0d60d46cae | |||
| 6947af10fe | |||
| fe54f55f47 | |||
| f71ca92e85 | |||
| 7cca7e40c2 | |||
| 5db2a7501b | |||
| 85e0945c9d | |||
| efc2773199 | |||
| 1d9a1c76d2 | |||
| 3e3e0154fa | |||
| e73498cc60 | |||
| 0f4025369c | |||
| 7d3670e951 | |||
| 09682f5836 | |||
| db11c62d2f | |||
| 7346f1d5b7 | |||
| 358896c7d5 | |||
| 1d10044d46 | |||
| 8d98056375 | |||
| 4aafed3d31 |
35
.env.example
35
.env.example
@@ -9,13 +9,16 @@
|
|||||||
# REQUIRED - Change these values in production!
|
# REQUIRED - Change these values in production!
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
# Master key for Meilisearch authentication (required)
|
|
||||||
MEILI_MASTER_KEY=change-me-in-production
|
|
||||||
|
|
||||||
# Bootstrap token for initial API admin access (required)
|
# Bootstrap token for initial API admin access (required)
|
||||||
# Use this token for the first API calls before creating proper API tokens
|
# Use this token for the first API calls before creating proper API tokens
|
||||||
API_BOOTSTRAP_TOKEN=change-me-in-production
|
API_BOOTSTRAP_TOKEN=change-me-in-production
|
||||||
|
|
||||||
|
# Backoffice admin credentials (required)
|
||||||
|
ADMIN_USERNAME=admin
|
||||||
|
ADMIN_PASSWORD=change-me-in-production
|
||||||
|
# Secret for signing session JWTs (min 32 chars, required)
|
||||||
|
SESSION_SECRET=change-me-in-production-use-32-chars-min
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Service Configuration
|
# Service Configuration
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
@@ -28,12 +31,27 @@ API_BASE_URL=http://api:7080
|
|||||||
INDEXER_LISTEN_ADDR=0.0.0.0:7081
|
INDEXER_LISTEN_ADDR=0.0.0.0:7081
|
||||||
INDEXER_SCAN_INTERVAL_SECONDS=5
|
INDEXER_SCAN_INTERVAL_SECONDS=5
|
||||||
|
|
||||||
# Meilisearch Search Engine
|
|
||||||
MEILI_URL=http://meilisearch:7700
|
|
||||||
|
|
||||||
# PostgreSQL Database
|
# PostgreSQL Database
|
||||||
DATABASE_URL=postgres://stripstream:stripstream@postgres:5432/stripstream
|
DATABASE_URL=postgres://stripstream:stripstream@postgres:5432/stripstream
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Logging
|
||||||
|
# =============================================================================
|
||||||
|
# Log levels per domain. Default: indexer=info,scan=info,extraction=info,thumbnail=warn,watcher=info
|
||||||
|
# Domains:
|
||||||
|
# scan — filesystem scan (discovery phase)
|
||||||
|
# extraction — page extraction from archives (extracting_pages phase)
|
||||||
|
# thumbnail — thumbnail generation (resize/encode)
|
||||||
|
# watcher — file watcher polling
|
||||||
|
# indexer — general indexer logs
|
||||||
|
# Levels: error, warn, info, debug, trace
|
||||||
|
# Examples:
|
||||||
|
# RUST_LOG=indexer=info # default, quiet thumbnails
|
||||||
|
# RUST_LOG=indexer=info,thumbnail=debug # enable thumbnail timing logs
|
||||||
|
# RUST_LOG=indexer=info,extraction=debug # per-book extraction details
|
||||||
|
# RUST_LOG=indexer=debug,scan=debug,extraction=debug,thumbnail=debug,watcher=debug # tout voir
|
||||||
|
# RUST_LOG=indexer=info,scan=info,extraction=info,thumbnail=warn,watcher=info
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Storage Configuration
|
# Storage Configuration
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
@@ -46,11 +64,11 @@ LIBRARIES_ROOT_PATH=/libraries
|
|||||||
# Path to libraries directory on host machine (for Docker volume mount)
|
# Path to libraries directory on host machine (for Docker volume mount)
|
||||||
# Default: ../libraries (relative to infra/docker-compose.yml)
|
# Default: ../libraries (relative to infra/docker-compose.yml)
|
||||||
# You can change this to an absolute path on your machine
|
# You can change this to an absolute path on your machine
|
||||||
LIBRARIES_HOST_PATH=../libraries
|
LIBRARIES_HOST_PATH=./libraries
|
||||||
|
|
||||||
# Path to thumbnails directory on host machine (for Docker volume mount)
|
# Path to thumbnails directory on host machine (for Docker volume mount)
|
||||||
# Default: ../data/thumbnails (relative to infra/docker-compose.yml)
|
# Default: ../data/thumbnails (relative to infra/docker-compose.yml)
|
||||||
THUMBNAILS_HOST_PATH=../data/thumbnails
|
THUMBNAILS_HOST_PATH=./data/thumbnails
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Port Configuration
|
# Port Configuration
|
||||||
@@ -59,5 +77,4 @@ THUMBNAILS_HOST_PATH=../data/thumbnails
|
|||||||
# - API: change "7080:7080" to "YOUR_PORT:7080"
|
# - API: change "7080:7080" to "YOUR_PORT:7080"
|
||||||
# - Indexer: change "7081:7081" to "YOUR_PORT:7081"
|
# - Indexer: change "7081:7081" to "YOUR_PORT:7081"
|
||||||
# - Backoffice: change "7082:7082" to "YOUR_PORT:7082"
|
# - Backoffice: change "7082:7082" to "YOUR_PORT:7082"
|
||||||
# - Meilisearch: change "7700:7700" to "YOUR_PORT:7700"
|
|
||||||
# - PostgreSQL: change "6432:5432" to "YOUR_PORT:5432"
|
# - PostgreSQL: change "6432:5432" to "YOUR_PORT:5432"
|
||||||
|
|||||||
17
.gitea/workflows/deploy.yml
Normal file
17
.gitea/workflows/deploy.yml
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
name: Deploy with Docker Compose
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main # adapte la branche que tu veux déployer
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: mac-orbstack-runner # le nom que tu as donné au runner
|
||||||
|
steps:
|
||||||
|
- name: Deploy stack
|
||||||
|
env:
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
|
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||||
|
run: |
|
||||||
|
BUILDKIT_PROGRESS=plain cd /Users/julienfroidefond/Sites/docker-stack && docker pull julienfroidefond32/stripstream-backoffice && docker pull julienfroidefond32/stripstream-api && docker pull julienfroidefond32/stripstream-indexer && ./scripts/stack.sh up stripstream
|
||||||
@@ -77,7 +77,7 @@ sqlx migrate add -r migration_name
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Start infrastructure only
|
# Start infrastructure only
|
||||||
docker compose up -d postgres meilisearch
|
docker compose up -d postgres
|
||||||
|
|
||||||
# Start full stack
|
# Start full stack
|
||||||
docker compose up -d
|
docker compose up -d
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ Gestionnaire de bibliothèque de bandes dessinées/ebooks. Workspace Cargo multi
|
|||||||
| Indexer (background) | `apps/indexer/` | 7081 |
|
| Indexer (background) | `apps/indexer/` | 7081 |
|
||||||
| Backoffice (Next.js) | `apps/backoffice/` | 7082 |
|
| Backoffice (Next.js) | `apps/backoffice/` | 7082 |
|
||||||
| PostgreSQL | infra | 6432 |
|
| PostgreSQL | infra | 6432 |
|
||||||
| Meilisearch | infra | 7700 |
|
|
||||||
|
|
||||||
Crates partagés : `crates/core` (config env), `crates/parsers` (CBZ/CBR/PDF).
|
Crates partagés : `crates/core` (config env), `crates/parsers` (CBZ/CBR/PDF).
|
||||||
|
|
||||||
@@ -31,7 +30,7 @@ cargo test
|
|||||||
cargo test -p parsers
|
cargo test -p parsers
|
||||||
|
|
||||||
# Infra (dépendances uniquement) — docker-compose.yml est à la racine
|
# Infra (dépendances uniquement) — docker-compose.yml est à la racine
|
||||||
docker compose up -d postgres meilisearch
|
docker compose up -d postgres
|
||||||
|
|
||||||
# Backoffice dev
|
# Backoffice dev
|
||||||
cd apps/backoffice && npm install && npm run dev # http://localhost:7082
|
cd apps/backoffice && npm install && npm run dev # http://localhost:7082
|
||||||
@@ -46,7 +45,7 @@ sqlx migrate run # DATABASE_URL doit être défini
|
|||||||
cp .env.example .env # puis éditer les valeurs REQUIRED
|
cp .env.example .env # puis éditer les valeurs REQUIRED
|
||||||
```
|
```
|
||||||
|
|
||||||
Variables **requises** au démarrage : `DATABASE_URL`, `MEILI_URL`, `MEILI_MASTER_KEY`, `API_BOOTSTRAP_TOKEN`.
|
Variables **requises** au démarrage : `DATABASE_URL`, `API_BOOTSTRAP_TOKEN`.
|
||||||
|
|
||||||
## Gotchas
|
## Gotchas
|
||||||
|
|
||||||
@@ -56,6 +55,7 @@ Variables **requises** au démarrage : `DATABASE_URL`, `MEILI_URL`, `MEILI_MASTE
|
|||||||
- **Thumbnails** : stockés dans `THUMBNAIL_DIRECTORY` (défaut `/data/thumbnails`), générés par **l'API** (pas l'indexer) — l'indexer déclenche un checkup via `POST /index/jobs/:id/thumbnails/checkup`.
|
- **Thumbnails** : stockés dans `THUMBNAIL_DIRECTORY` (défaut `/data/thumbnails`), générés par **l'API** (pas l'indexer) — l'indexer déclenche un checkup via `POST /index/jobs/:id/thumbnails/checkup`.
|
||||||
- **Workspace Cargo** : les dépendances externes sont définies dans le `Cargo.toml` racine, pas dans les crates individuels.
|
- **Workspace Cargo** : les dépendances externes sont définies dans le `Cargo.toml` racine, pas dans les crates individuels.
|
||||||
- **Migrations** : dossier `infra/migrations/`, géré par sqlx. Toujours migrer avant de démarrer les services.
|
- **Migrations** : dossier `infra/migrations/`, géré par sqlx. Toujours migrer avant de démarrer les services.
|
||||||
|
- **Recherche** : full-text via PostgreSQL (`ILIKE` + `pg_trgm`), pas de moteur de recherche externe.
|
||||||
|
|
||||||
## Fichiers clés
|
## Fichiers clés
|
||||||
|
|
||||||
@@ -64,6 +64,7 @@ Variables **requises** au démarrage : `DATABASE_URL`, `MEILI_URL`, `MEILI_MASTE
|
|||||||
| `crates/core/src/config.rs` | Config depuis env (API, Indexer, AdminUI) |
|
| `crates/core/src/config.rs` | Config depuis env (API, Indexer, AdminUI) |
|
||||||
| `crates/parsers/src/lib.rs` | Détection format, extraction métadonnées |
|
| `crates/parsers/src/lib.rs` | Détection format, extraction métadonnées |
|
||||||
| `apps/api/src/books.rs` | Endpoints CRUD livres |
|
| `apps/api/src/books.rs` | Endpoints CRUD livres |
|
||||||
|
| `apps/api/src/search.rs` | Recherche full-text PostgreSQL |
|
||||||
| `apps/api/src/pages.rs` | Rendu pages + cache LRU |
|
| `apps/api/src/pages.rs` | Rendu pages + cache LRU |
|
||||||
| `apps/indexer/src/scanner.rs` | Scan filesystem |
|
| `apps/indexer/src/scanner.rs` | Scan filesystem |
|
||||||
| `infra/migrations/*.sql` | Schéma DB |
|
| `infra/migrations/*.sql` | Schéma DB |
|
||||||
|
|||||||
662
Cargo.lock
generated
662
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -3,13 +3,14 @@ members = [
|
|||||||
"apps/api",
|
"apps/api",
|
||||||
"apps/indexer",
|
"apps/indexer",
|
||||||
"crates/core",
|
"crates/core",
|
||||||
|
"crates/notifications",
|
||||||
"crates/parsers",
|
"crates/parsers",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
version = "0.1.0"
|
version = "2.0.0"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
@@ -19,9 +20,10 @@ axum = "0.7"
|
|||||||
base64 = "0.22"
|
base64 = "0.22"
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
image = { version = "0.25", default-features = false, features = ["jpeg", "png", "webp"] }
|
image = { version = "0.25", default-features = false, features = ["jpeg", "png", "webp"] }
|
||||||
|
jpeg-decoder = "0.3"
|
||||||
lru = "0.12"
|
lru = "0.12"
|
||||||
rayon = "1.10"
|
rayon = "1.10"
|
||||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
reqwest = { version = "0.12", default-features = false, features = ["json", "multipart", "rustls-tls"] }
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
@@ -33,9 +35,11 @@ tracing = "0.1"
|
|||||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] }
|
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] }
|
||||||
uuid = { version = "1.12", features = ["serde", "v4"] }
|
uuid = { version = "1.12", features = ["serde", "v4"] }
|
||||||
natord = "1.0"
|
natord = "1.0"
|
||||||
|
num_cpus = "1.16"
|
||||||
pdfium-render = { version = "0.8", default-features = false, features = ["pdfium_latest", "image_latest", "thread_safe"] }
|
pdfium-render = { version = "0.8", default-features = false, features = ["pdfium_latest", "image_latest", "thread_safe"] }
|
||||||
unrar = "0.5"
|
unrar = "0.5"
|
||||||
walkdir = "2.5"
|
walkdir = "2.5"
|
||||||
webp = "0.3"
|
webp = "0.3"
|
||||||
utoipa = "4.0"
|
utoipa = "4.0"
|
||||||
utoipa-swagger-ui = "6.0"
|
utoipa-swagger-ui = "6.0"
|
||||||
|
scraper = "0.21"
|
||||||
|
|||||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2026 Julien Froidefond
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
10
PLAN.md
10
PLAN.md
@@ -12,7 +12,7 @@ Construire un serveur ultra performant pour indexer et servir des bibliotheques
|
|||||||
- Backend/API: Rust (`axum`)
|
- Backend/API: Rust (`axum`)
|
||||||
- Indexation: service Rust dedie (`indexer`)
|
- Indexation: service Rust dedie (`indexer`)
|
||||||
- DB: PostgreSQL
|
- DB: PostgreSQL
|
||||||
- Recherche: Meilisearch
|
- Recherche: PostgreSQL full-text (ILIKE + pg_trgm)
|
||||||
- Deploiement: Docker Compose
|
- Deploiement: Docker Compose
|
||||||
- Auth: token bootstrap env + tokens admin en DB (creables/revocables)
|
- Auth: token bootstrap env + tokens admin en DB (creables/revocables)
|
||||||
- Expiration tokens admin: aucune par defaut (revocation manuelle)
|
- Expiration tokens admin: aucune par defaut (revocation manuelle)
|
||||||
@@ -33,7 +33,7 @@ Construire un serveur ultra performant pour indexer et servir des bibliotheques
|
|||||||
**DoD:** Build des crates OK.
|
**DoD:** Build des crates OK.
|
||||||
|
|
||||||
### T2 - Infra Docker Compose
|
### T2 - Infra Docker Compose
|
||||||
- [x] Definir services `postgres`, `meilisearch`, `api`, `indexer`
|
- [x] Definir services `postgres`, `api`, `indexer`
|
||||||
- [x] Volumes persistants
|
- [x] Volumes persistants
|
||||||
- [x] Healthchecks
|
- [x] Healthchecks
|
||||||
|
|
||||||
@@ -114,7 +114,7 @@ Construire un serveur ultra performant pour indexer et servir des bibliotheques
|
|||||||
**DoD:** Pagination/filtres fonctionnels.
|
**DoD:** Pagination/filtres fonctionnels.
|
||||||
|
|
||||||
### T13 - Recherche
|
### T13 - Recherche
|
||||||
- [x] Projection vers Meilisearch
|
- [x] Recherche full-text PostgreSQL
|
||||||
- [x] `GET /search?q=...&library_id=...&type=...`
|
- [x] `GET /search?q=...&library_id=...&type=...`
|
||||||
- [x] Fuzzy + filtres
|
- [x] Fuzzy + filtres
|
||||||
|
|
||||||
@@ -264,10 +264,10 @@ Construire un serveur ultra performant pour indexer et servir des bibliotheques
|
|||||||
- Bootstrap token = break-glass (peut etre desactive plus tard)
|
- Bootstrap token = break-glass (peut etre desactive plus tard)
|
||||||
|
|
||||||
## Journal
|
## Journal
|
||||||
- 2026-03-05: `docker compose up -d --build` valide, stack complete en healthy (`postgres`, `meilisearch`, `api`, `indexer`, `admin-ui`).
|
- 2026-03-05: `docker compose up -d --build` valide, stack complete en healthy (`postgres`, `api`, `indexer`, `admin-ui`).
|
||||||
- 2026-03-05: ajustements infra appliques pour demarrage stable (`unrar` -> `unrar-free`, image `rust:1-bookworm`, healthchecks `127.0.0.1`).
|
- 2026-03-05: ajustements infra appliques pour demarrage stable (`unrar` -> `unrar-free`, image `rust:1-bookworm`, healthchecks `127.0.0.1`).
|
||||||
- 2026-03-05: ajout d'un service `migrate` dans Compose pour executer automatiquement `infra/migrations/0001_init.sql` au demarrage.
|
- 2026-03-05: ajout d'un service `migrate` dans Compose pour executer automatiquement `infra/migrations/0001_init.sql` au demarrage.
|
||||||
- 2026-03-05: Lot 2 termine (jobs, scan incremental, parsers `cbz/cbr/pdf`, API livres, sync + recherche Meilisearch).
|
- 2026-03-05: Lot 2 termine (jobs, scan incremental, parsers `cbz/cbr/pdf`, API livres, recherche PostgreSQL).
|
||||||
- 2026-03-05: verification de bout en bout OK sur une librairie de test (`/libraries/demo`) avec indexation, listing `/books` et recherche `/search` (1 CBZ detecte).
|
- 2026-03-05: verification de bout en bout OK sur une librairie de test (`/libraries/demo`) avec indexation, listing `/books` et recherche `/search` (1 CBZ detecte).
|
||||||
- 2026-03-05: Lot 3 avancee: endpoint pages (`/books/:id/pages/:n`) actif avec cache LRU, ETag/Cache-Control, limite concurrence rendu et timeouts.
|
- 2026-03-05: Lot 3 avancee: endpoint pages (`/books/:id/pages/:n`) actif avec cache LRU, ETag/Cache-Control, limite concurrence rendu et timeouts.
|
||||||
- 2026-03-05: hardening API: readiness expose sans auth via `route_layer`, metriques simples `/metrics`, rate limiting lecture (120 req/s).
|
- 2026-03-05: hardening API: readiness expose sans auth via `route_layer`, metriques simples `/metrics`, rate limiting lecture (120 req/s).
|
||||||
|
|||||||
180
README.md
180
README.md
@@ -9,7 +9,7 @@ The project consists of the following components:
|
|||||||
- **API** (`apps/api/`) - Rust-based REST API service
|
- **API** (`apps/api/`) - Rust-based REST API service
|
||||||
- **Indexer** (`apps/indexer/`) - Rust-based background indexing service
|
- **Indexer** (`apps/indexer/`) - Rust-based background indexing service
|
||||||
- **Backoffice** (`apps/backoffice/`) - Next.js web administration interface
|
- **Backoffice** (`apps/backoffice/`) - Next.js web administration interface
|
||||||
- **Infrastructure** (`infra/`) - Docker Compose setup with PostgreSQL and Meilisearch
|
- **Infrastructure** (`infra/`) - Docker Compose setup with PostgreSQL
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
@@ -27,19 +27,16 @@ The project consists of the following components:
|
|||||||
```
|
```
|
||||||
|
|
||||||
2. Edit `.env` and set secure values for:
|
2. Edit `.env` and set secure values for:
|
||||||
- `MEILI_MASTER_KEY` - Master key for Meilisearch
|
|
||||||
- `API_BOOTSTRAP_TOKEN` - Bootstrap token for initial API authentication
|
- `API_BOOTSTRAP_TOKEN` - Bootstrap token for initial API authentication
|
||||||
|
|
||||||
### Running with Docker
|
### Running with Docker
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cd infra
|
|
||||||
docker compose up -d
|
docker compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
This will start:
|
This will start:
|
||||||
- PostgreSQL (port 6432)
|
- PostgreSQL (port 6432)
|
||||||
- Meilisearch (port 7700)
|
|
||||||
- API service (port 7080)
|
- API service (port 7080)
|
||||||
- Indexer service (port 7081)
|
- Indexer service (port 7081)
|
||||||
- Backoffice web UI (port 7082)
|
- Backoffice web UI (port 7082)
|
||||||
@@ -48,7 +45,6 @@ This will start:
|
|||||||
|
|
||||||
- **Backoffice**: http://localhost:7082
|
- **Backoffice**: http://localhost:7082
|
||||||
- **API**: http://localhost:7080
|
- **API**: http://localhost:7080
|
||||||
- **Meilisearch**: http://localhost:7700
|
|
||||||
|
|
||||||
### Default Credentials
|
### Default Credentials
|
||||||
|
|
||||||
@@ -62,8 +58,7 @@ The default bootstrap token is configured in your `.env` file. Use this for init
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Start dependencies
|
# Start dependencies
|
||||||
cd infra
|
docker compose up -d postgres
|
||||||
docker compose up -d postgres meilisearch
|
|
||||||
|
|
||||||
# Run API
|
# Run API
|
||||||
cd apps/api
|
cd apps/api
|
||||||
@@ -82,46 +77,107 @@ npm install
|
|||||||
npm run dev
|
npm run dev
|
||||||
```
|
```
|
||||||
|
|
||||||
The backoffice will be available at http://localhost:3000
|
The backoffice will be available at http://localhost:7082
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
### Libraries Management
|
> For the full feature list, business rules, and API details, see [docs/FEATURES.md](docs/FEATURES.md).
|
||||||
- Create and manage multiple libraries
|
|
||||||
- Configure automatic scanning schedules (hourly, daily, weekly)
|
|
||||||
- Real-time file watcher for instant indexing
|
|
||||||
- Full and incremental rebuild options
|
|
||||||
|
|
||||||
### Books Management
|
### Libraries
|
||||||
- Support for CBZ, CBR, and PDF formats
|
- Multi-library management with per-library configuration
|
||||||
- Automatic metadata extraction
|
- Incremental and full scanning, real-time filesystem watcher
|
||||||
- Series and volume detection
|
- Per-library metadata provider selection (Google Books, ComicVine, BedéThèque, AniList, Open Library)
|
||||||
- Full-text search with Meilisearch
|
|
||||||
|
|
||||||
### Jobs Monitoring
|
### Books & Series
|
||||||
- Real-time job progress tracking
|
- **Formats**: CBZ, CBR, PDF, EPUB
|
||||||
- Detailed statistics (scanned, indexed, removed, errors)
|
- Automatic metadata extraction (title, series, volume, authors, page count) from filenames and directory structure
|
||||||
- Job history and logs
|
- Series aggregation with missing volume detection
|
||||||
- Cancel pending jobs
|
- Thumbnail generation (WebP/JPEG/PNG) with lazy generation and bulk rebuild
|
||||||
|
- CBR → CBZ conversion
|
||||||
|
|
||||||
### Search
|
### Reading Progress
|
||||||
- Full-text search across titles, authors, and series
|
- Per-book tracking: unread / reading / read with current page
|
||||||
- Library filtering
|
- Series-level aggregated reading status
|
||||||
- Real-time suggestions
|
- Bulk mark-as-read for series
|
||||||
|
|
||||||
|
### Search & Discovery
|
||||||
|
- Full-text search across titles, authors, and series (PostgreSQL `pg_trgm`)
|
||||||
|
- Author listing with book/series counts
|
||||||
|
- Filtering by reading status, series status, format, metadata provider
|
||||||
|
|
||||||
|
### External Metadata
|
||||||
|
- Search, match, approve/reject workflow with confidence scoring
|
||||||
|
- Batch auto-matching and scheduled metadata refresh
|
||||||
|
- Field locking to protect manual edits from sync
|
||||||
|
|
||||||
|
### Notifications
|
||||||
|
- **Telegram**: real-time notifications via Telegram Bot API
|
||||||
|
- 12 granular event toggles (scans, thumbnails, conversions, metadata)
|
||||||
|
- Book thumbnail images included in notifications where applicable
|
||||||
|
- Test connection from settings
|
||||||
|
|
||||||
|
### External Integrations
|
||||||
|
- **Komga**: import reading progress
|
||||||
|
- **Prowlarr**: search for missing volumes
|
||||||
|
- **qBittorrent**: add torrents directly from search results
|
||||||
|
|
||||||
|
### Background Jobs
|
||||||
|
- Rebuild, rescan, thumbnail generation, metadata batch, CBR conversion
|
||||||
|
- Real-time progress via Server-Sent Events (SSE)
|
||||||
|
- Job history, error tracking, cancellation
|
||||||
|
|
||||||
|
### Page Rendering
|
||||||
|
- On-demand page extraction from all formats
|
||||||
|
- Image processing (format, quality, max width, resampling filter)
|
||||||
|
- LRU in-memory + disk cache
|
||||||
|
|
||||||
|
### Security
|
||||||
|
- Token-based auth (`admin` / `read` scopes) with Argon2 hashing
|
||||||
|
- Rate limiting, token expiration and revocation
|
||||||
|
|
||||||
|
### Web UI (Backoffice)
|
||||||
|
- Dashboard with statistics, interactive charts (recharts), and reading progress
|
||||||
|
- Currently reading & recently read sections
|
||||||
|
- Library, book, series, author management
|
||||||
|
- Live job monitoring, metadata search modals, settings panel
|
||||||
|
- Notification settings with per-event toggle configuration
|
||||||
|
|
||||||
## Environment Variables
|
## Environment Variables
|
||||||
|
|
||||||
| Variable | Description | Default |
|
Variables marquées **required** doivent être définies. Les autres ont une valeur par défaut.
|
||||||
|----------|-------------|---------|
|
|
||||||
| `API_LISTEN_ADDR` | API service bind address | `0.0.0.0:7080` |
|
### Partagées (API + Indexer)
|
||||||
| `INDEXER_LISTEN_ADDR` | Indexer service bind address | `0.0.0.0:7081` |
|
|
||||||
| `BACKOFFICE_PORT` | Backoffice web UI port | `7082` |
|
| Variable | Description | Défaut |
|
||||||
| `DATABASE_URL` | PostgreSQL connection string | `postgres://stripstream:stripstream@postgres:5432/stripstream` |
|
|----------|-------------|--------|
|
||||||
| `MEILI_URL` | Meilisearch connection URL | `http://meilisearch:7700` |
|
| `DATABASE_URL` | **required** — Connexion PostgreSQL | — |
|
||||||
| `MEILI_MASTER_KEY` | Meilisearch master key (required) | - |
|
|
||||||
| `API_BOOTSTRAP_TOKEN` | Initial API admin token (required) | - |
|
### API
|
||||||
| `INDEXER_SCAN_INTERVAL_SECONDS` | Watcher scan interval | `5` |
|
|
||||||
| `LIBRARIES_ROOT_PATH` | Path to libraries directory | `/libraries` |
|
| Variable | Description | Défaut |
|
||||||
|
|----------|-------------|--------|
|
||||||
|
| `API_BOOTSTRAP_TOKEN` | **required** — Token admin initial | — |
|
||||||
|
| `API_LISTEN_ADDR` | Adresse d'écoute | `0.0.0.0:7080` |
|
||||||
|
|
||||||
|
### Indexer
|
||||||
|
|
||||||
|
| Variable | Description | Défaut |
|
||||||
|
|----------|-------------|--------|
|
||||||
|
| `INDEXER_LISTEN_ADDR` | Adresse d'écoute | `0.0.0.0:7081` |
|
||||||
|
| `INDEXER_SCAN_INTERVAL_SECONDS` | Intervalle de scan du watcher | `5` |
|
||||||
|
| `THUMBNAIL_ENABLED` | Activer la génération de thumbnails | `true` |
|
||||||
|
| `THUMBNAIL_DIRECTORY` | Dossier de stockage des thumbnails | `/data/thumbnails` |
|
||||||
|
| `THUMBNAIL_WIDTH` | Largeur max des thumbnails (px) | `300` |
|
||||||
|
| `THUMBNAIL_HEIGHT` | Hauteur max des thumbnails (px) | `400` |
|
||||||
|
| `THUMBNAIL_QUALITY` | Qualité WebP (0–100) | `80` |
|
||||||
|
| `THUMBNAIL_FORMAT` | Format de sortie | `webp` |
|
||||||
|
|
||||||
|
### Backoffice
|
||||||
|
|
||||||
|
| Variable | Description | Défaut |
|
||||||
|
|----------|-------------|--------|
|
||||||
|
| `API_BOOTSTRAP_TOKEN` | **required** — Token d'accès à l'API | — |
|
||||||
|
| `API_BASE_URL` | URL interne de l'API (dans le réseau Docker) | `http://api:7080` |
|
||||||
|
|
||||||
## API Documentation
|
## API Documentation
|
||||||
|
|
||||||
@@ -140,7 +196,6 @@ stripstream-librarian/
|
|||||||
│ ├── indexer/ # Rust background indexer
|
│ ├── indexer/ # Rust background indexer
|
||||||
│ └── backoffice/ # Next.js web UI
|
│ └── backoffice/ # Next.js web UI
|
||||||
├── infra/
|
├── infra/
|
||||||
│ ├── docker-compose.yml
|
|
||||||
│ └── migrations/ # SQL database migrations
|
│ └── migrations/ # SQL database migrations
|
||||||
├── libraries/ # Book storage (mounted volume)
|
├── libraries/ # Book storage (mounted volume)
|
||||||
└── .env # Environment configuration
|
└── .env # Environment configuration
|
||||||
@@ -182,40 +237,49 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- postgres_data:/var/lib/postgresql/data
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
|
||||||
meilisearch:
|
|
||||||
image: getmeili/meilisearch:v1.12
|
|
||||||
environment:
|
|
||||||
MEILI_MASTER_KEY: ${MEILI_MASTER_KEY}
|
|
||||||
|
|
||||||
api:
|
api:
|
||||||
image: julienfroidefond32/stripstream-api:latest
|
image: julienfroidefond32/stripstream-api:latest
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
ports:
|
ports:
|
||||||
- "7080:7080"
|
- "7080:7080"
|
||||||
volumes:
|
volumes:
|
||||||
- ${LIBRARIES_HOST_PATH:-./libraries}:/libraries
|
- ./libraries:/libraries
|
||||||
- ${THUMBNAILS_HOST_PATH:-./data/thumbnails}:/data/thumbnails
|
- ./data/thumbnails:/data/thumbnails
|
||||||
|
environment:
|
||||||
|
# --- Required ---
|
||||||
|
DATABASE_URL: postgres://stripstream:stripstream@postgres:5432/stripstream
|
||||||
|
API_BOOTSTRAP_TOKEN: your_bootstrap_token # required — change this
|
||||||
|
# --- Optional (defaults shown) ---
|
||||||
|
# API_LISTEN_ADDR: 0.0.0.0:7080
|
||||||
|
|
||||||
indexer:
|
indexer:
|
||||||
image: julienfroidefond32/stripstream-indexer:latest
|
image: julienfroidefond32/stripstream-indexer:latest
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
ports:
|
ports:
|
||||||
- "7081:7081"
|
- "7081:7081"
|
||||||
volumes:
|
volumes:
|
||||||
- ${LIBRARIES_HOST_PATH:-./libraries}:/libraries
|
- ./libraries:/libraries
|
||||||
- ${THUMBNAILS_HOST_PATH:-./data/thumbnails}:/data/thumbnails
|
- ./data/thumbnails:/data/thumbnails
|
||||||
|
environment:
|
||||||
|
# --- Required ---
|
||||||
|
DATABASE_URL: postgres://stripstream:stripstream@postgres:5432/stripstream
|
||||||
|
# --- Optional (defaults shown) ---
|
||||||
|
# INDEXER_LISTEN_ADDR: 0.0.0.0:7081
|
||||||
|
# INDEXER_SCAN_INTERVAL_SECONDS: 5
|
||||||
|
# THUMBNAIL_ENABLED: true
|
||||||
|
# THUMBNAIL_DIRECTORY: /data/thumbnails
|
||||||
|
# THUMBNAIL_WIDTH: 300
|
||||||
|
# THUMBNAIL_HEIGHT: 400
|
||||||
|
# THUMBNAIL_QUALITY: 80
|
||||||
|
# THUMBNAIL_FORMAT: webp
|
||||||
|
|
||||||
backoffice:
|
backoffice:
|
||||||
image: julienfroidefond32/stripstream-backoffice:latest
|
image: julienfroidefond32/stripstream-backoffice:latest
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
environment:
|
|
||||||
- PORT=7082
|
|
||||||
- HOST=0.0.0.0
|
|
||||||
ports:
|
ports:
|
||||||
- "7082:7082"
|
- "7082:7082"
|
||||||
|
environment:
|
||||||
|
# --- Required ---
|
||||||
|
API_BOOTSTRAP_TOKEN: your_bootstrap_token # must match api above
|
||||||
|
# --- Optional (defaults shown) ---
|
||||||
|
# API_BASE_URL: http://api:7080
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
@@ -223,4 +287,4 @@ volumes:
|
|||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
[Your License Here]
|
This project is licensed under the [MIT License](LICENSE).
|
||||||
|
|||||||
@@ -13,10 +13,14 @@ async-stream = "0.3"
|
|||||||
chrono.workspace = true
|
chrono.workspace = true
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
image.workspace = true
|
image.workspace = true
|
||||||
|
jpeg-decoder.workspace = true
|
||||||
lru.workspace = true
|
lru.workspace = true
|
||||||
|
notifications = { path = "../../crates/notifications" }
|
||||||
stripstream-core = { path = "../../crates/core" }
|
stripstream-core = { path = "../../crates/core" }
|
||||||
|
parsers = { path = "../../crates/parsers" }
|
||||||
rand.workspace = true
|
rand.workspace = true
|
||||||
tokio-stream = "0.1"
|
tokio-stream = "0.1"
|
||||||
|
regex = "1"
|
||||||
reqwest.workspace = true
|
reqwest.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
@@ -28,10 +32,7 @@ tower-http = { version = "0.6", features = ["cors"] }
|
|||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
tracing-subscriber.workspace = true
|
tracing-subscriber.workspace = true
|
||||||
uuid.workspace = true
|
uuid.workspace = true
|
||||||
natord.workspace = true
|
|
||||||
pdfium-render.workspace = true
|
|
||||||
unrar.workspace = true
|
|
||||||
zip = { version = "2.2", default-features = false, features = ["deflate"] }
|
|
||||||
utoipa.workspace = true
|
utoipa.workspace = true
|
||||||
utoipa-swagger-ui = { workspace = true, features = ["axum"] }
|
utoipa-swagger-ui = { workspace = true, features = ["axum"] }
|
||||||
webp.workspace = true
|
webp.workspace = true
|
||||||
|
scraper.workspace = true
|
||||||
|
|||||||
@@ -1,25 +1,42 @@
|
|||||||
FROM rust:1-bookworm AS builder
|
FROM rust:1-bookworm AS builder
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install sccache for faster builds
|
# Copy workspace manifests and create dummy source files to cache dependency builds
|
||||||
RUN cargo install sccache --locked
|
|
||||||
ENV RUSTC_WRAPPER=sccache
|
|
||||||
ENV SCCACHE_DIR=/sccache
|
|
||||||
|
|
||||||
COPY Cargo.toml ./
|
COPY Cargo.toml ./
|
||||||
COPY apps/api/Cargo.toml apps/api/Cargo.toml
|
COPY apps/api/Cargo.toml apps/api/Cargo.toml
|
||||||
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
|
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
|
||||||
COPY crates/core/Cargo.toml crates/core/Cargo.toml
|
COPY crates/core/Cargo.toml crates/core/Cargo.toml
|
||||||
|
COPY crates/notifications/Cargo.toml crates/notifications/Cargo.toml
|
||||||
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
|
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
|
||||||
|
|
||||||
|
RUN mkdir -p apps/api/src apps/indexer/src crates/core/src crates/notifications/src crates/parsers/src && \
|
||||||
|
echo "fn main() {}" > apps/api/src/main.rs && \
|
||||||
|
echo "fn main() {}" > apps/indexer/src/main.rs && \
|
||||||
|
echo "" > apps/indexer/src/lib.rs && \
|
||||||
|
echo "" > crates/core/src/lib.rs && \
|
||||||
|
echo "" > crates/notifications/src/lib.rs && \
|
||||||
|
echo "" > crates/parsers/src/lib.rs
|
||||||
|
|
||||||
|
# Build dependencies only (cached as long as Cargo.toml files don't change)
|
||||||
|
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||||
|
--mount=type=cache,target=/usr/local/cargo/git \
|
||||||
|
--mount=type=cache,target=/app/target \
|
||||||
|
cargo build --release -p api && \
|
||||||
|
cargo install sqlx-cli --no-default-features --features postgres --locked
|
||||||
|
|
||||||
|
# Copy real source code and build
|
||||||
COPY apps/api/src apps/api/src
|
COPY apps/api/src apps/api/src
|
||||||
COPY apps/indexer/src apps/indexer/src
|
COPY apps/indexer/src apps/indexer/src
|
||||||
COPY crates/core/src crates/core/src
|
COPY crates/core/src crates/core/src
|
||||||
|
COPY crates/notifications/src crates/notifications/src
|
||||||
COPY crates/parsers/src crates/parsers/src
|
COPY crates/parsers/src crates/parsers/src
|
||||||
|
|
||||||
# Build with sccache (cache persisted between builds via Docker cache mount)
|
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||||
RUN --mount=type=cache,target=/sccache \
|
--mount=type=cache,target=/usr/local/cargo/git \
|
||||||
|
--mount=type=cache,target=/app/target \
|
||||||
|
touch apps/api/src/main.rs crates/core/src/lib.rs crates/notifications/src/lib.rs crates/parsers/src/lib.rs && \
|
||||||
cargo build --release -p api && \
|
cargo build --release -p api && \
|
||||||
cargo install sqlx-cli --no-default-features --features postgres --locked
|
cp /app/target/release/api /usr/local/bin/api
|
||||||
|
|
||||||
FROM debian:bookworm-slim
|
FROM debian:bookworm-slim
|
||||||
|
|
||||||
@@ -42,7 +59,7 @@ RUN ARCH=$(dpkg --print-architecture) && \
|
|||||||
cp /tmp/lib/libpdfium.so /usr/local/lib/ && \
|
cp /tmp/lib/libpdfium.so /usr/local/lib/ && \
|
||||||
rm -rf /tmp/pdfium.tgz /tmp/lib /tmp/include && \
|
rm -rf /tmp/pdfium.tgz /tmp/lib /tmp/include && \
|
||||||
ldconfig
|
ldconfig
|
||||||
COPY --from=builder /app/target/release/api /usr/local/bin/api
|
COPY --from=builder /usr/local/bin/api /usr/local/bin/api
|
||||||
COPY --from=builder /usr/local/cargo/bin/sqlx /usr/local/bin/sqlx
|
COPY --from=builder /usr/local/cargo/bin/sqlx /usr/local/bin/sqlx
|
||||||
COPY infra/migrations /app/migrations
|
COPY infra/migrations /app/migrations
|
||||||
COPY apps/api/entrypoint.sh /usr/local/bin/entrypoint.sh
|
COPY apps/api/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ use axum::{
|
|||||||
};
|
};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use std::sync::atomic::Ordering;
|
use std::sync::atomic::Ordering;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
use crate::state::AppState;
|
use crate::state::AppState;
|
||||||
|
|
||||||
@@ -14,7 +15,14 @@ pub async fn request_counter(
|
|||||||
next: Next,
|
next: Next,
|
||||||
) -> Response {
|
) -> Response {
|
||||||
state.metrics.requests_total.fetch_add(1, Ordering::Relaxed);
|
state.metrics.requests_total.fetch_add(1, Ordering::Relaxed);
|
||||||
next.run(req).await
|
let method = req.method().clone();
|
||||||
|
let uri = req.uri().clone();
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
let response = next.run(req).await;
|
||||||
|
let status = response.status().as_u16();
|
||||||
|
let elapsed = start.elapsed();
|
||||||
|
info!("{} {} {} {}ms", method, uri.path(), status, elapsed.as_millis());
|
||||||
|
response
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn read_rate_limit(
|
pub async fn read_rate_limit(
|
||||||
|
|||||||
@@ -10,10 +10,15 @@ use sqlx::Row;
|
|||||||
|
|
||||||
use crate::{error::ApiError, state::AppState};
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct AuthUser {
|
||||||
|
pub user_id: uuid::Uuid,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum Scope {
|
pub enum Scope {
|
||||||
Admin,
|
Admin,
|
||||||
Read,
|
Read { user_id: uuid::Uuid },
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn require_admin(
|
pub async fn require_admin(
|
||||||
@@ -40,6 +45,20 @@ pub async fn require_read(
|
|||||||
let token = bearer_token(&req).ok_or_else(|| ApiError::unauthorized("missing bearer token"))?;
|
let token = bearer_token(&req).ok_or_else(|| ApiError::unauthorized("missing bearer token"))?;
|
||||||
let scope = authenticate(&state, token).await?;
|
let scope = authenticate(&state, token).await?;
|
||||||
|
|
||||||
|
if let Scope::Read { user_id } = &scope {
|
||||||
|
req.extensions_mut().insert(AuthUser { user_id: *user_id });
|
||||||
|
} else if matches!(scope, Scope::Admin) {
|
||||||
|
// Admin peut s'impersonifier via le header X-As-User
|
||||||
|
if let Some(as_user_id) = req
|
||||||
|
.headers()
|
||||||
|
.get("X-As-User")
|
||||||
|
.and_then(|v| v.to_str().ok())
|
||||||
|
.and_then(|v| uuid::Uuid::parse_str(v).ok())
|
||||||
|
{
|
||||||
|
req.extensions_mut().insert(AuthUser { user_id: as_user_id });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
req.extensions_mut().insert(scope);
|
req.extensions_mut().insert(scope);
|
||||||
Ok(next.run(req).await)
|
Ok(next.run(req).await)
|
||||||
}
|
}
|
||||||
@@ -60,8 +79,7 @@ async fn authenticate(state: &AppState, token: &str) -> Result<Scope, ApiError>
|
|||||||
|
|
||||||
let maybe_row = sqlx::query(
|
let maybe_row = sqlx::query(
|
||||||
r#"
|
r#"
|
||||||
SELECT id, token_hash, scope
|
SELECT id, token_hash, scope, user_id FROM api_tokens
|
||||||
FROM api_tokens
|
|
||||||
WHERE prefix = $1 AND revoked_at IS NULL AND (expires_at IS NULL OR expires_at > NOW())
|
WHERE prefix = $1 AND revoked_at IS NULL AND (expires_at IS NULL OR expires_at > NOW())
|
||||||
"#,
|
"#,
|
||||||
)
|
)
|
||||||
@@ -88,7 +106,12 @@ async fn authenticate(state: &AppState, token: &str) -> Result<Scope, ApiError>
|
|||||||
let scope: String = row.try_get("scope").map_err(|_| ApiError::unauthorized("invalid token"))?;
|
let scope: String = row.try_get("scope").map_err(|_| ApiError::unauthorized("invalid token"))?;
|
||||||
match scope.as_str() {
|
match scope.as_str() {
|
||||||
"admin" => Ok(Scope::Admin),
|
"admin" => Ok(Scope::Admin),
|
||||||
"read" => Ok(Scope::Read),
|
"read" => {
|
||||||
|
let user_id: uuid::Uuid = row
|
||||||
|
.try_get("user_id")
|
||||||
|
.map_err(|_| ApiError::unauthorized("read token missing user_id"))?;
|
||||||
|
Ok(Scope::Read { user_id })
|
||||||
|
}
|
||||||
_ => Err(ApiError::unauthorized("invalid token scope")),
|
_ => Err(ApiError::unauthorized("invalid token scope")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
178
apps/api/src/authors.rs
Normal file
178
apps/api/src/authors.rs
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
use axum::{extract::{Query, State}, Json};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct ListAuthorsQuery {
|
||||||
|
#[schema(value_type = Option<String>, example = "batman")]
|
||||||
|
pub q: Option<String>,
|
||||||
|
#[schema(value_type = Option<i64>, example = 1)]
|
||||||
|
pub page: Option<i64>,
|
||||||
|
#[schema(value_type = Option<i64>, example = 20)]
|
||||||
|
pub limit: Option<i64>,
|
||||||
|
/// Sort order: "name" (default), "books" (most books first)
|
||||||
|
#[schema(value_type = Option<String>, example = "books")]
|
||||||
|
pub sort: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct AuthorItem {
|
||||||
|
pub name: String,
|
||||||
|
pub book_count: i64,
|
||||||
|
pub series_count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct AuthorsPageResponse {
|
||||||
|
pub items: Vec<AuthorItem>,
|
||||||
|
pub total: i64,
|
||||||
|
pub page: i64,
|
||||||
|
pub limit: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List all unique authors with book/series counts
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/authors",
|
||||||
|
tag = "authors",
|
||||||
|
params(
|
||||||
|
("q" = Option<String>, Query, description = "Search by author name"),
|
||||||
|
("page" = Option<i64>, Query, description = "Page number (1-based)"),
|
||||||
|
("limit" = Option<i64>, Query, description = "Items per page (max 100)"),
|
||||||
|
("sort" = Option<String>, Query, description = "Sort: name (default) or books"),
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, body = AuthorsPageResponse),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn list_authors(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Query(query): Query<ListAuthorsQuery>,
|
||||||
|
) -> Result<Json<AuthorsPageResponse>, ApiError> {
|
||||||
|
let page = query.page.unwrap_or(1).max(1);
|
||||||
|
let limit = query.limit.unwrap_or(20).clamp(1, 100);
|
||||||
|
let offset = (page - 1) * limit;
|
||||||
|
let sort = query.sort.as_deref().unwrap_or("name");
|
||||||
|
|
||||||
|
let order_clause = match sort {
|
||||||
|
"books" => "book_count DESC, name ASC",
|
||||||
|
_ => "name ASC",
|
||||||
|
};
|
||||||
|
|
||||||
|
let q_pattern = query.q.as_deref()
|
||||||
|
.filter(|s| !s.trim().is_empty())
|
||||||
|
.map(|s| format!("%{s}%"));
|
||||||
|
|
||||||
|
// Aggregate unique authors from books.authors + books.author + series_metadata.authors
|
||||||
|
let sql = format!(
|
||||||
|
r#"
|
||||||
|
WITH all_authors AS (
|
||||||
|
SELECT DISTINCT UNNEST(
|
||||||
|
COALESCE(
|
||||||
|
NULLIF(authors, '{{}}'),
|
||||||
|
CASE WHEN author IS NOT NULL AND author != '' THEN ARRAY[author] ELSE ARRAY[]::text[] END
|
||||||
|
)
|
||||||
|
) AS name
|
||||||
|
FROM books
|
||||||
|
UNION
|
||||||
|
SELECT DISTINCT UNNEST(authors) AS name
|
||||||
|
FROM series_metadata
|
||||||
|
WHERE authors != '{{}}'
|
||||||
|
),
|
||||||
|
filtered AS (
|
||||||
|
SELECT name FROM all_authors
|
||||||
|
WHERE ($1::text IS NULL OR name ILIKE $1)
|
||||||
|
),
|
||||||
|
book_counts AS (
|
||||||
|
SELECT
|
||||||
|
f.name AS author_name,
|
||||||
|
COUNT(DISTINCT b.id) AS book_count
|
||||||
|
FROM filtered f
|
||||||
|
LEFT JOIN books b ON (
|
||||||
|
f.name = ANY(
|
||||||
|
COALESCE(
|
||||||
|
NULLIF(b.authors, '{{}}'),
|
||||||
|
CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
GROUP BY f.name
|
||||||
|
),
|
||||||
|
series_counts AS (
|
||||||
|
SELECT
|
||||||
|
f.name AS author_name,
|
||||||
|
COUNT(DISTINCT (sm.library_id, sm.name)) AS series_count
|
||||||
|
FROM filtered f
|
||||||
|
LEFT JOIN series_metadata sm ON (
|
||||||
|
f.name = ANY(sm.authors) AND sm.authors != '{{}}'
|
||||||
|
)
|
||||||
|
GROUP BY f.name
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
f.name,
|
||||||
|
COALESCE(bc.book_count, 0) AS book_count,
|
||||||
|
COALESCE(sc.series_count, 0) AS series_count
|
||||||
|
FROM filtered f
|
||||||
|
LEFT JOIN book_counts bc ON bc.author_name = f.name
|
||||||
|
LEFT JOIN series_counts sc ON sc.author_name = f.name
|
||||||
|
ORDER BY {order_clause}
|
||||||
|
LIMIT $2 OFFSET $3
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
|
||||||
|
let count_sql = r#"
|
||||||
|
WITH all_authors AS (
|
||||||
|
SELECT DISTINCT UNNEST(
|
||||||
|
COALESCE(
|
||||||
|
NULLIF(authors, '{}'),
|
||||||
|
CASE WHEN author IS NOT NULL AND author != '' THEN ARRAY[author] ELSE ARRAY[]::text[] END
|
||||||
|
)
|
||||||
|
) AS name
|
||||||
|
FROM books
|
||||||
|
UNION
|
||||||
|
SELECT DISTINCT UNNEST(authors) AS name
|
||||||
|
FROM series_metadata
|
||||||
|
WHERE authors != '{}'
|
||||||
|
)
|
||||||
|
SELECT COUNT(*) AS total
|
||||||
|
FROM all_authors
|
||||||
|
WHERE ($1::text IS NULL OR name ILIKE $1)
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let (rows, count_row) = tokio::join!(
|
||||||
|
sqlx::query(&sql)
|
||||||
|
.bind(q_pattern.as_deref())
|
||||||
|
.bind(limit)
|
||||||
|
.bind(offset)
|
||||||
|
.fetch_all(&state.pool),
|
||||||
|
sqlx::query(count_sql)
|
||||||
|
.bind(q_pattern.as_deref())
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
);
|
||||||
|
|
||||||
|
let rows = rows.map_err(|e| ApiError::internal(format!("authors query failed: {e}")))?;
|
||||||
|
let total: i64 = count_row
|
||||||
|
.map_err(|e| ApiError::internal(format!("authors count failed: {e}")))?
|
||||||
|
.get("total");
|
||||||
|
|
||||||
|
let items: Vec<AuthorItem> = rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| AuthorItem {
|
||||||
|
name: r.get("name"),
|
||||||
|
book_count: r.get("book_count"),
|
||||||
|
series_count: r.get("series_count"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(Json(AuthorsPageResponse {
|
||||||
|
items,
|
||||||
|
total,
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
}))
|
||||||
|
}
|
||||||
@@ -1,11 +1,11 @@
|
|||||||
use axum::{extract::{Path, Query, State}, Json};
|
use axum::{extract::{Extension, Path, Query, State}, Json};
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::Row;
|
use sqlx::Row;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
use crate::{error::ApiError, index_jobs::IndexJobResponse, state::AppState};
|
use crate::{auth::AuthUser, error::ApiError, index_jobs::IndexJobResponse, state::AppState};
|
||||||
|
|
||||||
#[derive(Deserialize, ToSchema)]
|
#[derive(Deserialize, ToSchema)]
|
||||||
pub struct ListBooksQuery {
|
pub struct ListBooksQuery {
|
||||||
@@ -13,14 +13,25 @@ pub struct ListBooksQuery {
|
|||||||
pub library_id: Option<Uuid>,
|
pub library_id: Option<Uuid>,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
pub kind: Option<String>,
|
pub kind: Option<String>,
|
||||||
|
#[schema(value_type = Option<String>, example = "cbz")]
|
||||||
|
pub format: Option<String>,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
pub series: Option<String>,
|
pub series: Option<String>,
|
||||||
#[schema(value_type = Option<String>, example = "unread,reading")]
|
#[schema(value_type = Option<String>, example = "unread,reading")]
|
||||||
pub reading_status: Option<String>,
|
pub reading_status: Option<String>,
|
||||||
|
/// Filter by exact author name (matches in authors array or scalar author field)
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub author: Option<String>,
|
||||||
#[schema(value_type = Option<i64>, example = 1)]
|
#[schema(value_type = Option<i64>, example = 1)]
|
||||||
pub page: Option<i64>,
|
pub page: Option<i64>,
|
||||||
#[schema(value_type = Option<i64>, example = 50)]
|
#[schema(value_type = Option<i64>, example = 50)]
|
||||||
pub limit: Option<i64>,
|
pub limit: Option<i64>,
|
||||||
|
/// Sort order: "title" (default) or "latest" (most recently added first)
|
||||||
|
#[schema(value_type = Option<String>, example = "latest")]
|
||||||
|
pub sort: Option<String>,
|
||||||
|
/// Filter by metadata provider: "linked" (any provider), "unlinked" (no provider), or a specific provider name
|
||||||
|
#[schema(value_type = Option<String>, example = "linked")]
|
||||||
|
pub metadata_provider: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
@@ -30,8 +41,10 @@ pub struct BookItem {
|
|||||||
#[schema(value_type = String)]
|
#[schema(value_type = String)]
|
||||||
pub library_id: Uuid,
|
pub library_id: Uuid,
|
||||||
pub kind: String,
|
pub kind: String,
|
||||||
|
pub format: Option<String>,
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub author: Option<String>,
|
pub author: Option<String>,
|
||||||
|
pub authors: Vec<String>,
|
||||||
pub series: Option<String>,
|
pub series: Option<String>,
|
||||||
pub volume: Option<i32>,
|
pub volume: Option<i32>,
|
||||||
pub language: Option<String>,
|
pub language: Option<String>,
|
||||||
@@ -63,6 +76,7 @@ pub struct BookDetails {
|
|||||||
pub kind: String,
|
pub kind: String,
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub author: Option<String>,
|
pub author: Option<String>,
|
||||||
|
pub authors: Vec<String>,
|
||||||
pub series: Option<String>,
|
pub series: Option<String>,
|
||||||
pub volume: Option<i32>,
|
pub volume: Option<i32>,
|
||||||
pub language: Option<String>,
|
pub language: Option<String>,
|
||||||
@@ -76,6 +90,12 @@ pub struct BookDetails {
|
|||||||
pub reading_current_page: Option<i32>,
|
pub reading_current_page: Option<i32>,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
pub reading_last_read_at: Option<DateTime<Utc>>,
|
pub reading_last_read_at: Option<DateTime<Utc>>,
|
||||||
|
pub summary: Option<String>,
|
||||||
|
pub isbn: Option<String>,
|
||||||
|
pub publish_date: Option<String>,
|
||||||
|
/// Fields locked from external metadata sync
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub locked_fields: Option<serde_json::Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List books with optional filtering and pagination
|
/// List books with optional filtering and pagination
|
||||||
@@ -85,11 +105,13 @@ pub struct BookDetails {
|
|||||||
tag = "books",
|
tag = "books",
|
||||||
params(
|
params(
|
||||||
("library_id" = Option<String>, Query, description = "Filter by library ID"),
|
("library_id" = Option<String>, Query, description = "Filter by library ID"),
|
||||||
("kind" = Option<String>, Query, description = "Filter by book kind (cbz, cbr, pdf)"),
|
("kind" = Option<String>, Query, description = "Filter by book kind (cbz, cbr, pdf, epub)"),
|
||||||
("series" = Option<String>, Query, description = "Filter by series name (use 'unclassified' for books without series)"),
|
("series" = Option<String>, Query, description = "Filter by series name (use 'unclassified' for books without series)"),
|
||||||
("reading_status" = Option<String>, Query, description = "Filter by reading status, comma-separated (e.g. 'unread,reading')"),
|
("reading_status" = Option<String>, Query, description = "Filter by reading status, comma-separated (e.g. 'unread,reading')"),
|
||||||
("page" = Option<i64>, Query, description = "Page number (1-indexed, default 1)"),
|
("page" = Option<i64>, Query, description = "Page number (1-indexed, default 1)"),
|
||||||
("limit" = Option<i64>, Query, description = "Items per page (max 200, default 50)"),
|
("limit" = Option<i64>, Query, description = "Items per page (max 200, default 50)"),
|
||||||
|
("sort" = Option<String>, Query, description = "Sort order: 'title' (default) or 'latest' (most recently added first)"),
|
||||||
|
("metadata_provider" = Option<String>, Query, description = "Filter by metadata provider: 'linked' (any provider), 'unlinked' (no provider), or a specific provider name"),
|
||||||
),
|
),
|
||||||
responses(
|
responses(
|
||||||
(status = 200, body = BooksPage),
|
(status = 200, body = BooksPage),
|
||||||
@@ -100,7 +122,9 @@ pub struct BookDetails {
|
|||||||
pub async fn list_books(
|
pub async fn list_books(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
Query(query): Query<ListBooksQuery>,
|
Query(query): Query<ListBooksQuery>,
|
||||||
|
user: Option<Extension<AuthUser>>,
|
||||||
) -> Result<Json<BooksPage>, ApiError> {
|
) -> Result<Json<BooksPage>, ApiError> {
|
||||||
|
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
|
||||||
let limit = query.limit.unwrap_or(50).clamp(1, 200);
|
let limit = query.limit.unwrap_or(50).clamp(1, 200);
|
||||||
let page = query.page.unwrap_or(1).max(1);
|
let page = query.page.unwrap_or(1).max(1);
|
||||||
let offset = (page - 1) * limit;
|
let offset = (page - 1) * limit;
|
||||||
@@ -110,8 +134,8 @@ pub async fn list_books(
|
|||||||
s.split(',').map(|v| v.trim().to_string()).filter(|v| !v.is_empty()).collect()
|
s.split(',').map(|v| v.trim().to_string()).filter(|v| !v.is_empty()).collect()
|
||||||
});
|
});
|
||||||
|
|
||||||
// Conditions partagées COUNT et DATA — $1=library_id $2=kind, puis optionnels
|
// Conditions partagées COUNT et DATA — $1=library_id $2=kind $3=format, puis optionnels
|
||||||
let mut p: usize = 2;
|
let mut p: usize = 3;
|
||||||
let series_cond = match query.series.as_deref() {
|
let series_cond = match query.series.as_deref() {
|
||||||
Some("unclassified") => "AND (b.series IS NULL OR b.series = '')".to_string(),
|
Some("unclassified") => "AND (b.series IS NULL OR b.series = '')".to_string(),
|
||||||
Some(_) => { p += 1; format!("AND b.series = ${p}") }
|
Some(_) => { p += 1; format!("AND b.series = ${p}") }
|
||||||
@@ -120,48 +144,80 @@ pub async fn list_books(
|
|||||||
let rs_cond = if reading_statuses.is_some() {
|
let rs_cond = if reading_statuses.is_some() {
|
||||||
p += 1; format!("AND COALESCE(brp.status, 'unread') = ANY(${p})")
|
p += 1; format!("AND COALESCE(brp.status, 'unread') = ANY(${p})")
|
||||||
} else { String::new() };
|
} else { String::new() };
|
||||||
|
let author_cond = if query.author.is_some() {
|
||||||
|
p += 1; format!("AND (${p} = ANY(COALESCE(NULLIF(b.authors, '{{}}'), CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END)) OR EXISTS (SELECT 1 FROM series_metadata sm WHERE sm.library_id = b.library_id AND sm.name = b.series AND ${p} = ANY(sm.authors)))")
|
||||||
|
} else { String::new() };
|
||||||
|
let metadata_cond = match query.metadata_provider.as_deref() {
|
||||||
|
Some("unlinked") => "AND eml.id IS NULL".to_string(),
|
||||||
|
Some("linked") => "AND eml.id IS NOT NULL".to_string(),
|
||||||
|
Some(_) => { p += 1; format!("AND eml.provider = ${p}") },
|
||||||
|
None => String::new(),
|
||||||
|
};
|
||||||
|
p += 1;
|
||||||
|
let uid_p = p;
|
||||||
|
|
||||||
|
let metadata_links_cte = r#"
|
||||||
|
metadata_links AS (
|
||||||
|
SELECT DISTINCT ON (eml.series_name, eml.library_id)
|
||||||
|
eml.series_name, eml.library_id, eml.provider, eml.id
|
||||||
|
FROM external_metadata_links eml
|
||||||
|
WHERE eml.status = 'approved'
|
||||||
|
ORDER BY eml.series_name, eml.library_id, eml.created_at DESC
|
||||||
|
)"#;
|
||||||
|
|
||||||
let count_sql = format!(
|
let count_sql = format!(
|
||||||
r#"SELECT COUNT(*) FROM books b
|
r#"WITH {metadata_links_cte}
|
||||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
|
SELECT COUNT(*) FROM books b
|
||||||
|
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p}
|
||||||
|
LEFT JOIN metadata_links eml ON eml.series_name = b.series AND eml.library_id = b.library_id
|
||||||
WHERE ($1::uuid IS NULL OR b.library_id = $1)
|
WHERE ($1::uuid IS NULL OR b.library_id = $1)
|
||||||
AND ($2::text IS NULL OR b.kind = $2)
|
AND ($2::text IS NULL OR b.kind = $2)
|
||||||
|
AND ($3::text IS NULL OR b.format = $3)
|
||||||
{series_cond}
|
{series_cond}
|
||||||
{rs_cond}"#
|
{rs_cond}
|
||||||
|
{author_cond}
|
||||||
|
{metadata_cond}"#
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let order_clause = if query.sort.as_deref() == Some("latest") {
|
||||||
|
"b.updated_at DESC".to_string()
|
||||||
|
} else {
|
||||||
|
"b.volume NULLS LAST, REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''), COALESCE((REGEXP_MATCH(LOWER(b.title), '\\d+'))[1]::int, 0), b.title ASC".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
// DATA: mêmes params filtre, puis $N+1=limit $N+2=offset
|
// DATA: mêmes params filtre, puis $N+1=limit $N+2=offset
|
||||||
let limit_p = p + 1;
|
let limit_p = p + 1;
|
||||||
let offset_p = p + 2;
|
let offset_p = p + 2;
|
||||||
let data_sql = format!(
|
let data_sql = format!(
|
||||||
r#"
|
r#"
|
||||||
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.series, b.volume, b.language, b.page_count, b.thumbnail_path, b.updated_at,
|
WITH {metadata_links_cte}
|
||||||
|
SELECT b.id, b.library_id, b.kind, b.format, b.title, b.author, b.authors, b.series, b.volume, b.language, b.page_count, b.thumbnail_path, b.updated_at,
|
||||||
COALESCE(brp.status, 'unread') AS reading_status,
|
COALESCE(brp.status, 'unread') AS reading_status,
|
||||||
brp.current_page AS reading_current_page,
|
brp.current_page AS reading_current_page,
|
||||||
brp.last_read_at AS reading_last_read_at
|
brp.last_read_at AS reading_last_read_at
|
||||||
FROM books b
|
FROM books b
|
||||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
|
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p}
|
||||||
|
LEFT JOIN metadata_links eml ON eml.series_name = b.series AND eml.library_id = b.library_id
|
||||||
WHERE ($1::uuid IS NULL OR b.library_id = $1)
|
WHERE ($1::uuid IS NULL OR b.library_id = $1)
|
||||||
AND ($2::text IS NULL OR b.kind = $2)
|
AND ($2::text IS NULL OR b.kind = $2)
|
||||||
|
AND ($3::text IS NULL OR b.format = $3)
|
||||||
{series_cond}
|
{series_cond}
|
||||||
{rs_cond}
|
{rs_cond}
|
||||||
ORDER BY
|
{author_cond}
|
||||||
REGEXP_REPLACE(LOWER(b.title), '[0-9]+', '', 'g'),
|
{metadata_cond}
|
||||||
COALESCE(
|
ORDER BY {order_clause}
|
||||||
(REGEXP_MATCH(LOWER(b.title), '\d+'))[1]::int,
|
|
||||||
0
|
|
||||||
),
|
|
||||||
b.title ASC
|
|
||||||
LIMIT ${limit_p} OFFSET ${offset_p}
|
LIMIT ${limit_p} OFFSET ${offset_p}
|
||||||
"#
|
"#
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut count_builder = sqlx::query(&count_sql)
|
let mut count_builder = sqlx::query(&count_sql)
|
||||||
.bind(query.library_id)
|
.bind(query.library_id)
|
||||||
.bind(query.kind.as_deref());
|
.bind(query.kind.as_deref())
|
||||||
|
.bind(query.format.as_deref());
|
||||||
let mut data_builder = sqlx::query(&data_sql)
|
let mut data_builder = sqlx::query(&data_sql)
|
||||||
.bind(query.library_id)
|
.bind(query.library_id)
|
||||||
.bind(query.kind.as_deref());
|
.bind(query.kind.as_deref())
|
||||||
|
.bind(query.format.as_deref());
|
||||||
|
|
||||||
if let Some(s) = query.series.as_deref() {
|
if let Some(s) = query.series.as_deref() {
|
||||||
if s != "unclassified" {
|
if s != "unclassified" {
|
||||||
@@ -173,8 +229,18 @@ pub async fn list_books(
|
|||||||
count_builder = count_builder.bind(statuses.clone());
|
count_builder = count_builder.bind(statuses.clone());
|
||||||
data_builder = data_builder.bind(statuses.clone());
|
data_builder = data_builder.bind(statuses.clone());
|
||||||
}
|
}
|
||||||
|
if let Some(ref author) = query.author {
|
||||||
data_builder = data_builder.bind(limit).bind(offset);
|
count_builder = count_builder.bind(author.clone());
|
||||||
|
data_builder = data_builder.bind(author.clone());
|
||||||
|
}
|
||||||
|
if let Some(ref mp) = query.metadata_provider {
|
||||||
|
if mp != "linked" && mp != "unlinked" {
|
||||||
|
count_builder = count_builder.bind(mp.clone());
|
||||||
|
data_builder = data_builder.bind(mp.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
count_builder = count_builder.bind(user_id);
|
||||||
|
data_builder = data_builder.bind(user_id).bind(limit).bind(offset);
|
||||||
|
|
||||||
let (count_row, rows) = tokio::try_join!(
|
let (count_row, rows) = tokio::try_join!(
|
||||||
count_builder.fetch_one(&state.pool),
|
count_builder.fetch_one(&state.pool),
|
||||||
@@ -190,8 +256,10 @@ pub async fn list_books(
|
|||||||
id: row.get("id"),
|
id: row.get("id"),
|
||||||
library_id: row.get("library_id"),
|
library_id: row.get("library_id"),
|
||||||
kind: row.get("kind"),
|
kind: row.get("kind"),
|
||||||
|
format: row.get("format"),
|
||||||
title: row.get("title"),
|
title: row.get("title"),
|
||||||
author: row.get("author"),
|
author: row.get("author"),
|
||||||
|
authors: row.get::<Vec<String>, _>("authors"),
|
||||||
series: row.get("series"),
|
series: row.get("series"),
|
||||||
volume: row.get("volume"),
|
volume: row.get("volume"),
|
||||||
language: row.get("language"),
|
language: row.get("language"),
|
||||||
@@ -231,10 +299,12 @@ pub async fn list_books(
|
|||||||
pub async fn get_book(
|
pub async fn get_book(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
Path(id): Path<Uuid>,
|
Path(id): Path<Uuid>,
|
||||||
|
user: Option<Extension<AuthUser>>,
|
||||||
) -> Result<Json<BookDetails>, ApiError> {
|
) -> Result<Json<BookDetails>, ApiError> {
|
||||||
|
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
|
||||||
let row = sqlx::query(
|
let row = sqlx::query(
|
||||||
r#"
|
r#"
|
||||||
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.series, b.volume, b.language, b.page_count, b.thumbnail_path,
|
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.authors, b.series, b.volume, b.language, b.page_count, b.thumbnail_path, b.locked_fields, b.summary, b.isbn, b.publish_date,
|
||||||
bf.abs_path, bf.format, bf.parse_status,
|
bf.abs_path, bf.format, bf.parse_status,
|
||||||
COALESCE(brp.status, 'unread') AS reading_status,
|
COALESCE(brp.status, 'unread') AS reading_status,
|
||||||
brp.current_page AS reading_current_page,
|
brp.current_page AS reading_current_page,
|
||||||
@@ -247,11 +317,12 @@ pub async fn get_book(
|
|||||||
ORDER BY updated_at DESC
|
ORDER BY updated_at DESC
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
) bf ON TRUE
|
) bf ON TRUE
|
||||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
|
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2
|
||||||
WHERE b.id = $1
|
WHERE b.id = $1
|
||||||
"#,
|
"#,
|
||||||
)
|
)
|
||||||
.bind(id)
|
.bind(id)
|
||||||
|
.bind(user_id)
|
||||||
.fetch_optional(&state.pool)
|
.fetch_optional(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -263,6 +334,7 @@ pub async fn get_book(
|
|||||||
kind: row.get("kind"),
|
kind: row.get("kind"),
|
||||||
title: row.get("title"),
|
title: row.get("title"),
|
||||||
author: row.get("author"),
|
author: row.get("author"),
|
||||||
|
authors: row.get::<Vec<String>, _>("authors"),
|
||||||
series: row.get("series"),
|
series: row.get("series"),
|
||||||
volume: row.get("volume"),
|
volume: row.get("volume"),
|
||||||
language: row.get("language"),
|
language: row.get("language"),
|
||||||
@@ -274,196 +346,16 @@ pub async fn get_book(
|
|||||||
reading_status: row.get("reading_status"),
|
reading_status: row.get("reading_status"),
|
||||||
reading_current_page: row.get("reading_current_page"),
|
reading_current_page: row.get("reading_current_page"),
|
||||||
reading_last_read_at: row.get("reading_last_read_at"),
|
reading_last_read_at: row.get("reading_last_read_at"),
|
||||||
|
summary: row.get("summary"),
|
||||||
|
isbn: row.get("isbn"),
|
||||||
|
publish_date: row.get("publish_date"),
|
||||||
|
locked_fields: Some(row.get::<serde_json::Value, _>("locked_fields")),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
// ─── Helpers ──────────────────────────────────────────────────────────────────
|
||||||
pub struct SeriesItem {
|
|
||||||
pub name: String,
|
|
||||||
pub book_count: i64,
|
|
||||||
pub books_read_count: i64,
|
|
||||||
#[schema(value_type = String)]
|
|
||||||
pub first_book_id: Uuid,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
pub(crate) fn remap_libraries_path(path: &str) -> String {
|
||||||
pub struct SeriesPage {
|
|
||||||
pub items: Vec<SeriesItem>,
|
|
||||||
pub total: i64,
|
|
||||||
pub page: i64,
|
|
||||||
pub limit: i64,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, ToSchema)]
|
|
||||||
pub struct ListSeriesQuery {
|
|
||||||
#[schema(value_type = Option<String>, example = "dragon")]
|
|
||||||
pub q: Option<String>,
|
|
||||||
#[schema(value_type = Option<String>, example = "unread,reading")]
|
|
||||||
pub reading_status: Option<String>,
|
|
||||||
#[schema(value_type = Option<i64>, example = 1)]
|
|
||||||
pub page: Option<i64>,
|
|
||||||
#[schema(value_type = Option<i64>, example = 50)]
|
|
||||||
pub limit: Option<i64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// List all series in a library with pagination
|
|
||||||
#[utoipa::path(
|
|
||||||
get,
|
|
||||||
path = "/libraries/{library_id}/series",
|
|
||||||
tag = "books",
|
|
||||||
params(
|
|
||||||
("library_id" = String, Path, description = "Library UUID"),
|
|
||||||
("q" = Option<String>, Query, description = "Filter by series name (case-insensitive, partial match)"),
|
|
||||||
("reading_status" = Option<String>, Query, description = "Filter by reading status, comma-separated (e.g. 'unread,reading')"),
|
|
||||||
("page" = Option<i64>, Query, description = "Page number (1-indexed, default 1)"),
|
|
||||||
("limit" = Option<i64>, Query, description = "Items per page (max 200, default 50)"),
|
|
||||||
),
|
|
||||||
responses(
|
|
||||||
(status = 200, body = SeriesPage),
|
|
||||||
(status = 401, description = "Unauthorized"),
|
|
||||||
),
|
|
||||||
security(("Bearer" = []))
|
|
||||||
)]
|
|
||||||
pub async fn list_series(
|
|
||||||
State(state): State<AppState>,
|
|
||||||
Path(library_id): Path<Uuid>,
|
|
||||||
Query(query): Query<ListSeriesQuery>,
|
|
||||||
) -> Result<Json<SeriesPage>, ApiError> {
|
|
||||||
let limit = query.limit.unwrap_or(50).clamp(1, 200);
|
|
||||||
let page = query.page.unwrap_or(1).max(1);
|
|
||||||
let offset = (page - 1) * limit;
|
|
||||||
|
|
||||||
let reading_statuses: Option<Vec<String>> = query.reading_status.as_deref().map(|s| {
|
|
||||||
s.split(',').map(|v| v.trim().to_string()).filter(|v| !v.is_empty()).collect()
|
|
||||||
});
|
|
||||||
|
|
||||||
let series_status_expr = r#"CASE
|
|
||||||
WHEN sc.books_read_count = sc.book_count THEN 'read'
|
|
||||||
WHEN sc.books_read_count = 0 THEN 'unread'
|
|
||||||
ELSE 'reading'
|
|
||||||
END"#;
|
|
||||||
|
|
||||||
// Paramètres dynamiques — $1 = library_id fixe, puis optionnels dans l'ordre
|
|
||||||
let mut p: usize = 1;
|
|
||||||
|
|
||||||
let q_cond = if query.q.is_some() {
|
|
||||||
p += 1; format!("AND sc.name ILIKE ${p}")
|
|
||||||
} else { String::new() };
|
|
||||||
|
|
||||||
let count_rs_cond = if reading_statuses.is_some() {
|
|
||||||
p += 1; format!("AND {series_status_expr} = ANY(${p})")
|
|
||||||
} else { String::new() };
|
|
||||||
|
|
||||||
// q_cond et count_rs_cond partagent le même p — le count_sql les réutilise directement
|
|
||||||
let count_sql = format!(
|
|
||||||
r#"
|
|
||||||
WITH sorted_books AS (
|
|
||||||
SELECT COALESCE(NULLIF(series, ''), 'unclassified') as name, id
|
|
||||||
FROM books WHERE library_id = $1
|
|
||||||
),
|
|
||||||
series_counts AS (
|
|
||||||
SELECT sb.name,
|
|
||||||
COUNT(*) as book_count,
|
|
||||||
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count
|
|
||||||
FROM sorted_books sb
|
|
||||||
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id
|
|
||||||
GROUP BY sb.name
|
|
||||||
)
|
|
||||||
SELECT COUNT(*) FROM series_counts sc WHERE TRUE {q_cond} {count_rs_cond}
|
|
||||||
"#
|
|
||||||
);
|
|
||||||
|
|
||||||
// DATA: mêmes params dans le même ordre, puis limit/offset à la fin
|
|
||||||
let limit_p = p + 1;
|
|
||||||
let offset_p = p + 2;
|
|
||||||
|
|
||||||
let data_sql = format!(
|
|
||||||
r#"
|
|
||||||
WITH sorted_books AS (
|
|
||||||
SELECT
|
|
||||||
COALESCE(NULLIF(series, ''), 'unclassified') as name,
|
|
||||||
id,
|
|
||||||
ROW_NUMBER() OVER (
|
|
||||||
PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified')
|
|
||||||
ORDER BY
|
|
||||||
REGEXP_REPLACE(LOWER(title), '[0-9]+', '', 'g'),
|
|
||||||
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
|
|
||||||
title ASC
|
|
||||||
) as rn
|
|
||||||
FROM books
|
|
||||||
WHERE library_id = $1
|
|
||||||
),
|
|
||||||
series_counts AS (
|
|
||||||
SELECT
|
|
||||||
sb.name,
|
|
||||||
COUNT(*) as book_count,
|
|
||||||
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count
|
|
||||||
FROM sorted_books sb
|
|
||||||
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id
|
|
||||||
GROUP BY sb.name
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
sc.name,
|
|
||||||
sc.book_count,
|
|
||||||
sc.books_read_count,
|
|
||||||
sb.id as first_book_id
|
|
||||||
FROM series_counts sc
|
|
||||||
JOIN sorted_books sb ON sb.name = sc.name AND sb.rn = 1
|
|
||||||
WHERE TRUE
|
|
||||||
{q_cond}
|
|
||||||
{count_rs_cond}
|
|
||||||
ORDER BY
|
|
||||||
REGEXP_REPLACE(LOWER(sc.name), '[0-9]+', '', 'g'),
|
|
||||||
COALESCE(
|
|
||||||
(REGEXP_MATCH(LOWER(sc.name), '\d+'))[1]::int,
|
|
||||||
0
|
|
||||||
),
|
|
||||||
sc.name ASC
|
|
||||||
LIMIT ${limit_p} OFFSET ${offset_p}
|
|
||||||
"#
|
|
||||||
);
|
|
||||||
|
|
||||||
let q_pattern = query.q.as_deref().map(|q| format!("%{}%", q));
|
|
||||||
|
|
||||||
let mut count_builder = sqlx::query(&count_sql).bind(library_id);
|
|
||||||
let mut data_builder = sqlx::query(&data_sql).bind(library_id);
|
|
||||||
|
|
||||||
if let Some(ref pat) = q_pattern {
|
|
||||||
count_builder = count_builder.bind(pat);
|
|
||||||
data_builder = data_builder.bind(pat);
|
|
||||||
}
|
|
||||||
if let Some(ref statuses) = reading_statuses {
|
|
||||||
count_builder = count_builder.bind(statuses.clone());
|
|
||||||
data_builder = data_builder.bind(statuses.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
data_builder = data_builder.bind(limit).bind(offset);
|
|
||||||
|
|
||||||
let (count_row, rows) = tokio::try_join!(
|
|
||||||
count_builder.fetch_one(&state.pool),
|
|
||||||
data_builder.fetch_all(&state.pool),
|
|
||||||
)?;
|
|
||||||
let total: i64 = count_row.get(0);
|
|
||||||
|
|
||||||
let mut items: Vec<SeriesItem> = rows
|
|
||||||
.iter()
|
|
||||||
.map(|row| SeriesItem {
|
|
||||||
name: row.get("name"),
|
|
||||||
book_count: row.get("book_count"),
|
|
||||||
books_read_count: row.get("books_read_count"),
|
|
||||||
first_book_id: row.get("first_book_id"),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(Json(SeriesPage {
|
|
||||||
items: std::mem::take(&mut items),
|
|
||||||
total,
|
|
||||||
page,
|
|
||||||
limit,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn remap_libraries_path(path: &str) -> String {
|
|
||||||
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
||||||
if path.starts_with("/libraries/") {
|
if path.starts_with("/libraries/") {
|
||||||
return path.replacen("/libraries", &root, 1);
|
return path.replacen("/libraries", &root, 1);
|
||||||
@@ -481,6 +373,8 @@ fn unmap_libraries_path(path: &str) -> String {
|
|||||||
path.to_string()
|
path.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ─── Convert CBR → CBZ ───────────────────────────────────────────────────────
|
||||||
|
|
||||||
/// Enqueue a CBR → CBZ conversion job for a single book
|
/// Enqueue a CBR → CBZ conversion job for a single book
|
||||||
#[utoipa::path(
|
#[utoipa::path(
|
||||||
post,
|
post,
|
||||||
@@ -570,12 +464,136 @@ pub async fn convert_book(
|
|||||||
Ok(Json(crate::index_jobs::map_row(job_row)))
|
Ok(Json(crate::index_jobs::map_row(job_row)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ─── Metadata editing ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct UpdateBookRequest {
|
||||||
|
pub title: String,
|
||||||
|
pub author: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub authors: Vec<String>,
|
||||||
|
pub series: Option<String>,
|
||||||
|
pub volume: Option<i32>,
|
||||||
|
pub language: Option<String>,
|
||||||
|
pub summary: Option<String>,
|
||||||
|
pub isbn: Option<String>,
|
||||||
|
pub publish_date: Option<String>,
|
||||||
|
/// Fields locked from external metadata sync
|
||||||
|
#[serde(default)]
|
||||||
|
pub locked_fields: Option<serde_json::Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update metadata for a specific book
|
||||||
|
#[utoipa::path(
|
||||||
|
patch,
|
||||||
|
path = "/books/{id}",
|
||||||
|
tag = "books",
|
||||||
|
params(("id" = String, Path, description = "Book UUID")),
|
||||||
|
request_body = UpdateBookRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = BookDetails),
|
||||||
|
(status = 400, description = "Invalid request"),
|
||||||
|
(status = 404, description = "Book not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn update_book(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
Json(body): Json<UpdateBookRequest>,
|
||||||
|
) -> Result<Json<BookDetails>, ApiError> {
|
||||||
|
let title = body.title.trim().to_string();
|
||||||
|
if title.is_empty() {
|
||||||
|
return Err(ApiError::bad_request("title cannot be empty"));
|
||||||
|
}
|
||||||
|
let author = body.author.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
|
||||||
|
let authors: Vec<String> = body.authors.iter()
|
||||||
|
.map(|a| a.trim().to_string())
|
||||||
|
.filter(|a| !a.is_empty())
|
||||||
|
.collect();
|
||||||
|
let series = body.series.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
|
||||||
|
let language = body.language.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
|
||||||
|
|
||||||
|
let summary = body.summary.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
|
||||||
|
let isbn = body.isbn.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
|
||||||
|
let publish_date = body.publish_date.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
|
||||||
|
let locked_fields = body.locked_fields.clone().unwrap_or(serde_json::json!({}));
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
UPDATE books
|
||||||
|
SET title = $2, author = $3, authors = $4, series = $5, volume = $6, language = $7,
|
||||||
|
summary = $8, isbn = $9, publish_date = $10, locked_fields = $11, updated_at = NOW()
|
||||||
|
WHERE id = $1
|
||||||
|
RETURNING id, library_id, kind, title, author, authors, series, volume, language, page_count, thumbnail_path,
|
||||||
|
summary, isbn, publish_date,
|
||||||
|
'unread' AS reading_status,
|
||||||
|
NULL::integer AS reading_current_page,
|
||||||
|
NULL::timestamptz AS reading_last_read_at
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.bind(&title)
|
||||||
|
.bind(&author)
|
||||||
|
.bind(&authors)
|
||||||
|
.bind(&series)
|
||||||
|
.bind(body.volume)
|
||||||
|
.bind(&language)
|
||||||
|
.bind(&summary)
|
||||||
|
.bind(&isbn)
|
||||||
|
.bind(&publish_date)
|
||||||
|
.bind(&locked_fields)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||||
|
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||||
|
|
||||||
|
Ok(Json(BookDetails {
|
||||||
|
id: row.get("id"),
|
||||||
|
library_id: row.get("library_id"),
|
||||||
|
kind: row.get("kind"),
|
||||||
|
title: row.get("title"),
|
||||||
|
author: row.get("author"),
|
||||||
|
authors: row.get::<Vec<String>, _>("authors"),
|
||||||
|
series: row.get("series"),
|
||||||
|
volume: row.get("volume"),
|
||||||
|
language: row.get("language"),
|
||||||
|
page_count: row.get("page_count"),
|
||||||
|
thumbnail_url: thumbnail_path.map(|_| format!("/books/{}/thumbnail", id)),
|
||||||
|
file_path: None,
|
||||||
|
file_format: None,
|
||||||
|
file_parse_status: None,
|
||||||
|
reading_status: row.get("reading_status"),
|
||||||
|
reading_current_page: row.get("reading_current_page"),
|
||||||
|
reading_last_read_at: row.get("reading_last_read_at"),
|
||||||
|
summary: row.get("summary"),
|
||||||
|
isbn: row.get("isbn"),
|
||||||
|
publish_date: row.get("publish_date"),
|
||||||
|
locked_fields: Some(locked_fields),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Thumbnail ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
use axum::{
|
use axum::{
|
||||||
body::Body,
|
body::Body,
|
||||||
http::{header, HeaderMap, HeaderValue, StatusCode},
|
http::{header, HeaderMap, HeaderValue, StatusCode},
|
||||||
response::IntoResponse,
|
response::IntoResponse,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/// Detect content type from thumbnail file extension.
|
||||||
|
fn detect_thumbnail_content_type(path: &str) -> &'static str {
|
||||||
|
if path.ends_with(".jpg") || path.ends_with(".jpeg") {
|
||||||
|
"image/jpeg"
|
||||||
|
} else if path.ends_with(".png") {
|
||||||
|
"image/png"
|
||||||
|
} else {
|
||||||
|
"image/webp"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Get book thumbnail image
|
/// Get book thumbnail image
|
||||||
#[utoipa::path(
|
#[utoipa::path(
|
||||||
get,
|
get,
|
||||||
@@ -604,9 +622,12 @@ pub async fn get_thumbnail(
|
|||||||
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||||
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||||
|
|
||||||
let data = if let Some(ref path) = thumbnail_path {
|
let (data, content_type) = if let Some(ref path) = thumbnail_path {
|
||||||
match std::fs::read(path) {
|
match std::fs::read(path) {
|
||||||
Ok(bytes) => bytes,
|
Ok(bytes) => {
|
||||||
|
let ct = detect_thumbnail_content_type(path);
|
||||||
|
(bytes, ct)
|
||||||
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
// File missing on disk (e.g. different mount in dev) — fall back to live render
|
// File missing on disk (e.g. different mount in dev) — fall back to live render
|
||||||
crate::pages::render_book_page_1(&state, book_id, 300, 80).await?
|
crate::pages::render_book_page_1(&state, book_id, 300, 80).await?
|
||||||
@@ -617,12 +638,17 @@ pub async fn get_thumbnail(
|
|||||||
crate::pages::render_book_page_1(&state, book_id, 300, 80).await?
|
crate::pages::render_book_page_1(&state, book_id, 300, 80).await?
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let etag_value = format!("\"{}_{:x}\"", book_id, data.len());
|
||||||
|
|
||||||
let mut headers = HeaderMap::new();
|
let mut headers = HeaderMap::new();
|
||||||
headers.insert(header::CONTENT_TYPE, HeaderValue::from_static("image/webp"));
|
headers.insert(header::CONTENT_TYPE, HeaderValue::from_static(content_type));
|
||||||
headers.insert(
|
headers.insert(
|
||||||
header::CACHE_CONTROL,
|
header::CACHE_CONTROL,
|
||||||
HeaderValue::from_static("public, max-age=31536000, immutable"),
|
HeaderValue::from_static("public, max-age=31536000, immutable"),
|
||||||
);
|
);
|
||||||
|
if let Ok(v) = HeaderValue::from_str(&etag_value) {
|
||||||
|
headers.insert(header::ETAG, v);
|
||||||
|
}
|
||||||
|
|
||||||
Ok((StatusCode::OK, headers, Body::from(data)))
|
Ok((StatusCode::OK, headers, Body::from(data)))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -83,3 +83,9 @@ impl From<std::io::Error> for ApiError {
|
|||||||
Self::internal(format!("IO error: {err}"))
|
Self::internal(format!("IO error: {err}"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<reqwest::Error> for ApiError {
|
||||||
|
fn from(err: reqwest::Error) -> Self {
|
||||||
|
Self::internal(format!("HTTP client error: {err}"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -16,6 +16,10 @@ pub struct RebuildRequest {
|
|||||||
pub library_id: Option<Uuid>,
|
pub library_id: Option<Uuid>,
|
||||||
#[schema(value_type = Option<bool>, example = false)]
|
#[schema(value_type = Option<bool>, example = false)]
|
||||||
pub full: Option<bool>,
|
pub full: Option<bool>,
|
||||||
|
/// Deep rescan: clears directory mtimes to force re-walking all directories,
|
||||||
|
/// discovering newly supported formats without deleting existing data.
|
||||||
|
#[schema(value_type = Option<bool>, example = false)]
|
||||||
|
pub rescan: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
@@ -117,7 +121,8 @@ pub async fn enqueue_rebuild(
|
|||||||
) -> Result<Json<IndexJobResponse>, ApiError> {
|
) -> Result<Json<IndexJobResponse>, ApiError> {
|
||||||
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
|
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
|
||||||
let is_full = payload.as_ref().and_then(|p| p.0.full).unwrap_or(false);
|
let is_full = payload.as_ref().and_then(|p| p.0.full).unwrap_or(false);
|
||||||
let job_type = if is_full { "full_rebuild" } else { "rebuild" };
|
let is_rescan = payload.as_ref().and_then(|p| p.0.rescan).unwrap_or(false);
|
||||||
|
let job_type = if is_full { "full_rebuild" } else if is_rescan { "rescan" } else { "rebuild" };
|
||||||
let id = Uuid::new_v4();
|
let id = Uuid::new_v4();
|
||||||
|
|
||||||
sqlx::query(
|
sqlx::query(
|
||||||
@@ -182,7 +187,7 @@ pub async fn cancel_job(
|
|||||||
id: axum::extract::Path<Uuid>,
|
id: axum::extract::Path<Uuid>,
|
||||||
) -> Result<Json<IndexJobResponse>, ApiError> {
|
) -> Result<Json<IndexJobResponse>, ApiError> {
|
||||||
let rows_affected = sqlx::query(
|
let rows_affected = sqlx::query(
|
||||||
"UPDATE index_jobs SET status = 'cancelled' WHERE id = $1 AND status IN ('pending', 'running', 'generating_thumbnails')",
|
"UPDATE index_jobs SET status = 'cancelled' WHERE id = $1 AND status IN ('pending', 'running', 'extracting_pages', 'generating_thumbnails')",
|
||||||
)
|
)
|
||||||
.bind(id.0)
|
.bind(id.0)
|
||||||
.execute(&state.pool)
|
.execute(&state.pool)
|
||||||
@@ -246,9 +251,9 @@ pub async fn list_folders(
|
|||||||
base_path.to_path_buf()
|
base_path.to_path_buf()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Ensure the path is within the libraries root
|
// Ensure the path is within the libraries root (avoid canonicalize — burns fd on Docker mounts)
|
||||||
let canonical_target = target_path.canonicalize().unwrap_or(target_path.clone());
|
let canonical_target = target_path.clone();
|
||||||
let canonical_base = base_path.canonicalize().unwrap_or(base_path.to_path_buf());
|
let canonical_base = base_path.to_path_buf();
|
||||||
|
|
||||||
if !canonical_target.starts_with(&canonical_base) {
|
if !canonical_target.starts_with(&canonical_base) {
|
||||||
return Err(ApiError::bad_request("Path is outside libraries root"));
|
return Err(ApiError::bad_request("Path is outside libraries root"));
|
||||||
@@ -263,19 +268,31 @@ pub async fn list_folders(
|
|||||||
0
|
0
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Ok(entries) = std::fs::read_dir(&canonical_target) {
|
let entries = std::fs::read_dir(&canonical_target)
|
||||||
for entry in entries.flatten() {
|
.map_err(|e| ApiError::internal(format!("cannot read directory {}: {}", canonical_target.display(), e)))?;
|
||||||
if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) {
|
|
||||||
|
for entry in entries {
|
||||||
|
let entry = match entry {
|
||||||
|
Ok(e) => e,
|
||||||
|
Err(e) => {
|
||||||
|
tracing::warn!("[FOLDERS] entry error in {}: {}", canonical_target.display(), e);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let is_dir = match entry.file_type() {
|
||||||
|
Ok(ft) => ft.is_dir(),
|
||||||
|
Err(e) => {
|
||||||
|
tracing::warn!("[FOLDERS] cannot stat {}: {}", entry.path().display(), e);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if is_dir {
|
||||||
let name = entry.file_name().to_string_lossy().to_string();
|
let name = entry.file_name().to_string_lossy().to_string();
|
||||||
|
|
||||||
// Check if this folder has children
|
// Check if this folder has children (best-effort, default to true on error)
|
||||||
let has_children = if let Ok(sub_entries) = std::fs::read_dir(entry.path()) {
|
let has_children = std::fs::read_dir(entry.path())
|
||||||
sub_entries.flatten().any(|e| {
|
.map(|sub| sub.flatten().any(|e| e.file_type().map(|ft| ft.is_dir()).unwrap_or(false)))
|
||||||
e.file_type().map(|ft| ft.is_dir()).unwrap_or(false)
|
.unwrap_or(true);
|
||||||
})
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
};
|
|
||||||
|
|
||||||
// Calculate the full path relative to libraries root
|
// Calculate the full path relative to libraries root
|
||||||
let full_path = if let Ok(relative) = entry.path().strip_prefix(&canonical_base) {
|
let full_path = if let Ok(relative) = entry.path().strip_prefix(&canonical_base) {
|
||||||
@@ -292,7 +309,6 @@ pub async fn list_folders(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
folders.sort_by(|a, b| a.name.cmp(&b.name));
|
folders.sort_by(|a, b| a.name.cmp(&b.name));
|
||||||
Ok(Json(folders))
|
Ok(Json(folders))
|
||||||
|
|||||||
134
apps/api/src/job_poller.rs
Normal file
134
apps/api/src/job_poller.rs
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use sqlx::{PgPool, Row};
|
||||||
|
use tracing::{error, info, trace};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::{metadata_batch, metadata_refresh};
|
||||||
|
|
||||||
|
/// Poll for pending API-only jobs (`metadata_batch`, `metadata_refresh`) and process them.
|
||||||
|
/// This mirrors the indexer's worker loop but for job types handled by the API.
|
||||||
|
pub async fn run_job_poller(pool: PgPool, interval_seconds: u64) {
|
||||||
|
let wait = Duration::from_secs(interval_seconds.max(1));
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match claim_next_api_job(&pool).await {
|
||||||
|
Ok(Some((job_id, job_type, library_id))) => {
|
||||||
|
info!("[JOB_POLLER] Claimed {job_type} job {job_id} library={library_id}");
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let library_name: Option<String> =
|
||||||
|
sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let result = match job_type.as_str() {
|
||||||
|
"metadata_refresh" => {
|
||||||
|
metadata_refresh::process_metadata_refresh(
|
||||||
|
&pool_clone,
|
||||||
|
job_id,
|
||||||
|
library_id,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
"metadata_batch" => {
|
||||||
|
metadata_batch::process_metadata_batch(
|
||||||
|
&pool_clone,
|
||||||
|
job_id,
|
||||||
|
library_id,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
_ => Err(format!("Unknown API job type: {job_type}")),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(e) = result {
|
||||||
|
error!("[JOB_POLLER] {job_type} job {job_id} failed: {e}");
|
||||||
|
let _ = sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(e.to_string())
|
||||||
|
.execute(&pool_clone)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match job_type.as_str() {
|
||||||
|
"metadata_refresh" => {
|
||||||
|
notifications::notify(
|
||||||
|
pool_clone,
|
||||||
|
notifications::NotificationEvent::MetadataRefreshFailed {
|
||||||
|
library_name,
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
"metadata_batch" => {
|
||||||
|
notifications::notify(
|
||||||
|
pool_clone,
|
||||||
|
notifications::NotificationEvent::MetadataBatchFailed {
|
||||||
|
library_name,
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Ok(None) => {
|
||||||
|
trace!("[JOB_POLLER] No pending API jobs, waiting...");
|
||||||
|
tokio::time::sleep(wait).await;
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
error!("[JOB_POLLER] Error claiming job: {err}");
|
||||||
|
tokio::time::sleep(wait).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const API_JOB_TYPES: &[&str] = &["metadata_batch", "metadata_refresh"];
|
||||||
|
|
||||||
|
async fn claim_next_api_job(pool: &PgPool) -> Result<Option<(Uuid, String, Uuid)>, sqlx::Error> {
|
||||||
|
let mut tx = pool.begin().await?;
|
||||||
|
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT id, type, library_id
|
||||||
|
FROM index_jobs
|
||||||
|
WHERE status = 'pending'
|
||||||
|
AND type = ANY($1)
|
||||||
|
AND library_id IS NOT NULL
|
||||||
|
ORDER BY created_at ASC
|
||||||
|
FOR UPDATE SKIP LOCKED
|
||||||
|
LIMIT 1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(API_JOB_TYPES)
|
||||||
|
.fetch_optional(&mut *tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let Some(row) = row else {
|
||||||
|
tx.commit().await?;
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let id: Uuid = row.get("id");
|
||||||
|
let job_type: String = row.get("type");
|
||||||
|
let library_id: Uuid = row.get("library_id");
|
||||||
|
|
||||||
|
sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'running', started_at = NOW(), error_opt = NULL WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.execute(&mut *tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok(Some((id, job_type, library_id)))
|
||||||
|
}
|
||||||
410
apps/api/src/komga.rs
Normal file
410
apps/api/src/komga.rs
Normal file
@@ -0,0 +1,410 @@
|
|||||||
|
use axum::{extract::State, Json};
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
// ─── Komga API types ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct KomgaBooksResponse {
|
||||||
|
content: Vec<KomgaBook>,
|
||||||
|
#[serde(rename = "totalPages")]
|
||||||
|
total_pages: i32,
|
||||||
|
number: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct KomgaBook {
|
||||||
|
name: String,
|
||||||
|
#[serde(rename = "seriesTitle")]
|
||||||
|
series_title: String,
|
||||||
|
metadata: KomgaBookMetadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct KomgaBookMetadata {
|
||||||
|
title: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Request / Response ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct KomgaSyncRequest {
|
||||||
|
pub url: String,
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub user_id: Uuid,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct KomgaSyncResponse {
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub id: Uuid,
|
||||||
|
pub komga_url: String,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub user_id: Option<Uuid>,
|
||||||
|
pub total_komga_read: i64,
|
||||||
|
pub matched: i64,
|
||||||
|
pub already_read: i64,
|
||||||
|
pub newly_marked: i64,
|
||||||
|
pub matched_books: Vec<String>,
|
||||||
|
pub newly_marked_books: Vec<String>,
|
||||||
|
pub unmatched: Vec<String>,
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub created_at: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct KomgaSyncReportSummary {
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub id: Uuid,
|
||||||
|
pub komga_url: String,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub user_id: Option<Uuid>,
|
||||||
|
pub total_komga_read: i64,
|
||||||
|
pub matched: i64,
|
||||||
|
pub already_read: i64,
|
||||||
|
pub newly_marked: i64,
|
||||||
|
pub unmatched_count: i32,
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub created_at: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Handlers ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Sync read books from a Komga server
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/komga/sync",
|
||||||
|
tag = "komga",
|
||||||
|
request_body = KomgaSyncRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = KomgaSyncResponse),
|
||||||
|
(status = 400, description = "Bad request"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 500, description = "Komga connection or sync error"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn sync_komga_read_books(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(body): Json<KomgaSyncRequest>,
|
||||||
|
) -> Result<Json<KomgaSyncResponse>, ApiError> {
|
||||||
|
let url = body.url.trim_end_matches('/').to_string();
|
||||||
|
if url.is_empty() {
|
||||||
|
return Err(ApiError::bad_request("url is required"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build HTTP client with basic auth
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(30))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||||
|
|
||||||
|
// Paginate through all READ books from Komga
|
||||||
|
let mut komga_books: Vec<(String, String)> = Vec::new(); // (series_title, title)
|
||||||
|
let mut page = 0;
|
||||||
|
let page_size = 100;
|
||||||
|
let max_pages = 500;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let resp = client
|
||||||
|
.post(format!("{url}/api/v1/books/list?page={page}&size={page_size}"))
|
||||||
|
.basic_auth(&body.username, Some(&body.password))
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.json(&serde_json::json!({ "condition": { "readStatus": { "operator": "is", "value": "READ" } } }))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(format!("Komga request failed: {e}")))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(ApiError::internal(format!(
|
||||||
|
"Komga returned {status}: {text}"
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: KomgaBooksResponse = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(format!("Failed to parse Komga response: {e}")))?;
|
||||||
|
|
||||||
|
for book in &data.content {
|
||||||
|
let title = if !book.metadata.title.is_empty() {
|
||||||
|
&book.metadata.title
|
||||||
|
} else {
|
||||||
|
&book.name
|
||||||
|
};
|
||||||
|
komga_books.push((book.series_title.clone(), title.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if data.number >= data.total_pages - 1 || page >= max_pages {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
page += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
let total_komga_read = komga_books.len() as i64;
|
||||||
|
|
||||||
|
// Build local lookup maps
|
||||||
|
let rows = sqlx::query(
|
||||||
|
"SELECT id, title, COALESCE(series, '') as series, LOWER(title) as title_lower, LOWER(COALESCE(series, '')) as series_lower FROM books",
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
type BookEntry = (Uuid, String, String);
|
||||||
|
// Primary: (series_lower, title_lower) -> Vec<(Uuid, title, series)>
|
||||||
|
let mut primary_map: HashMap<(String, String), Vec<BookEntry>> = HashMap::new();
|
||||||
|
// Secondary: title_lower -> Vec<(Uuid, title, series)>
|
||||||
|
let mut secondary_map: HashMap<String, Vec<BookEntry>> = HashMap::new();
|
||||||
|
|
||||||
|
for row in &rows {
|
||||||
|
let id: Uuid = row.get("id");
|
||||||
|
let title: String = row.get("title");
|
||||||
|
let series: String = row.get("series");
|
||||||
|
let title_lower: String = row.get("title_lower");
|
||||||
|
let series_lower: String = row.get("series_lower");
|
||||||
|
let entry = (id, title, series);
|
||||||
|
|
||||||
|
primary_map
|
||||||
|
.entry((series_lower, title_lower.clone()))
|
||||||
|
.or_default()
|
||||||
|
.push(entry.clone());
|
||||||
|
secondary_map.entry(title_lower).or_default().push(entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match Komga books to local books
|
||||||
|
let mut matched_entries: Vec<(Uuid, String)> = Vec::new(); // (id, display_title)
|
||||||
|
let mut unmatched: Vec<String> = Vec::new();
|
||||||
|
|
||||||
|
for (series_title, title) in &komga_books {
|
||||||
|
let title_lower = title.to_lowercase();
|
||||||
|
let series_lower = series_title.to_lowercase();
|
||||||
|
|
||||||
|
let found = if let Some(entries) = primary_map.get(&(series_lower.clone(), title_lower.clone())) {
|
||||||
|
Some(entries)
|
||||||
|
} else {
|
||||||
|
secondary_map.get(&title_lower)
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(entries) = found {
|
||||||
|
for (id, local_title, local_series) in entries {
|
||||||
|
let display = if local_series.is_empty() {
|
||||||
|
local_title.clone()
|
||||||
|
} else {
|
||||||
|
format!("{local_series} - {local_title}")
|
||||||
|
};
|
||||||
|
matched_entries.push((*id, display));
|
||||||
|
}
|
||||||
|
} else if series_title.is_empty() {
|
||||||
|
unmatched.push(title.clone());
|
||||||
|
} else {
|
||||||
|
unmatched.push(format!("{series_title} - {title}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deduplicate by ID
|
||||||
|
matched_entries.sort_by(|a, b| a.0.cmp(&b.0));
|
||||||
|
matched_entries.dedup_by(|a, b| a.0 == b.0);
|
||||||
|
|
||||||
|
let matched_ids: Vec<Uuid> = matched_entries.iter().map(|(id, _)| *id).collect();
|
||||||
|
let matched = matched_ids.len() as i64;
|
||||||
|
let mut already_read: i64 = 0;
|
||||||
|
let mut already_read_ids: std::collections::HashSet<Uuid> = std::collections::HashSet::new();
|
||||||
|
|
||||||
|
if !matched_ids.is_empty() {
|
||||||
|
// Get already-read book IDs for this user
|
||||||
|
let ar_rows = sqlx::query(
|
||||||
|
"SELECT book_id FROM book_reading_progress WHERE book_id = ANY($1) AND user_id = $2 AND status = 'read'",
|
||||||
|
)
|
||||||
|
.bind(&matched_ids)
|
||||||
|
.bind(body.user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for row in &ar_rows {
|
||||||
|
already_read_ids.insert(row.get("book_id"));
|
||||||
|
}
|
||||||
|
already_read = already_read_ids.len() as i64;
|
||||||
|
|
||||||
|
// Bulk upsert all matched books as read for this user
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
|
||||||
|
SELECT unnest($1::uuid[]), $2, 'read', NULL, NOW(), NOW()
|
||||||
|
ON CONFLICT (book_id, user_id) DO UPDATE
|
||||||
|
SET status = 'read',
|
||||||
|
current_page = NULL,
|
||||||
|
last_read_at = NOW(),
|
||||||
|
updated_at = NOW()
|
||||||
|
WHERE book_reading_progress.status != 'read'
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(&matched_ids)
|
||||||
|
.bind(body.user_id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let newly_marked = matched - already_read;
|
||||||
|
|
||||||
|
// Build matched_books and newly_marked_books lists
|
||||||
|
let mut newly_marked_books: Vec<String> = Vec::new();
|
||||||
|
let mut matched_books: Vec<String> = Vec::new();
|
||||||
|
for (id, title) in &matched_entries {
|
||||||
|
if !already_read_ids.contains(id) {
|
||||||
|
newly_marked_books.push(title.clone());
|
||||||
|
}
|
||||||
|
matched_books.push(title.clone());
|
||||||
|
}
|
||||||
|
// Sort: newly marked first, then alphabetical
|
||||||
|
let newly_marked_set: std::collections::HashSet<&str> =
|
||||||
|
newly_marked_books.iter().map(|s| s.as_str()).collect();
|
||||||
|
matched_books.sort_by(|a, b| {
|
||||||
|
let a_new = newly_marked_set.contains(a.as_str());
|
||||||
|
let b_new = newly_marked_set.contains(b.as_str());
|
||||||
|
b_new.cmp(&a_new).then(a.cmp(b))
|
||||||
|
});
|
||||||
|
newly_marked_books.sort();
|
||||||
|
|
||||||
|
// Save sync report
|
||||||
|
let unmatched_json = serde_json::to_value(&unmatched).unwrap_or_default();
|
||||||
|
let matched_books_json = serde_json::to_value(&matched_books).unwrap_or_default();
|
||||||
|
let newly_marked_books_json = serde_json::to_value(&newly_marked_books).unwrap_or_default();
|
||||||
|
let report_row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO komga_sync_reports (komga_url, user_id, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||||
|
RETURNING id, created_at
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(&url)
|
||||||
|
.bind(body.user_id)
|
||||||
|
.bind(total_komga_read)
|
||||||
|
.bind(matched)
|
||||||
|
.bind(already_read)
|
||||||
|
.bind(newly_marked)
|
||||||
|
.bind(&matched_books_json)
|
||||||
|
.bind(&newly_marked_books_json)
|
||||||
|
.bind(&unmatched_json)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(Json(KomgaSyncResponse {
|
||||||
|
id: report_row.get("id"),
|
||||||
|
komga_url: url,
|
||||||
|
user_id: Some(body.user_id),
|
||||||
|
total_komga_read,
|
||||||
|
matched,
|
||||||
|
already_read,
|
||||||
|
newly_marked,
|
||||||
|
matched_books,
|
||||||
|
newly_marked_books,
|
||||||
|
unmatched,
|
||||||
|
created_at: report_row.get("created_at"),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List Komga sync reports (most recent first)
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/komga/reports",
|
||||||
|
tag = "komga",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = Vec<KomgaSyncReportSummary>),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn list_sync_reports(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
) -> Result<Json<Vec<KomgaSyncReportSummary>>, ApiError> {
|
||||||
|
let rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT id, komga_url, user_id, total_komga_read, matched, already_read, newly_marked,
|
||||||
|
jsonb_array_length(unmatched) as unmatched_count, created_at
|
||||||
|
FROM komga_sync_reports
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT 20
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let reports: Vec<KomgaSyncReportSummary> = rows
|
||||||
|
.iter()
|
||||||
|
.map(|row| KomgaSyncReportSummary {
|
||||||
|
id: row.get("id"),
|
||||||
|
komga_url: row.get("komga_url"),
|
||||||
|
user_id: row.get("user_id"),
|
||||||
|
total_komga_read: row.get("total_komga_read"),
|
||||||
|
matched: row.get("matched"),
|
||||||
|
already_read: row.get("already_read"),
|
||||||
|
newly_marked: row.get("newly_marked"),
|
||||||
|
unmatched_count: row.get("unmatched_count"),
|
||||||
|
created_at: row.get("created_at"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(Json(reports))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a specific sync report with full unmatched list
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/komga/reports/{id}",
|
||||||
|
tag = "komga",
|
||||||
|
params(("id" = String, Path, description = "Report UUID")),
|
||||||
|
responses(
|
||||||
|
(status = 200, body = KomgaSyncResponse),
|
||||||
|
(status = 404, description = "Report not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_sync_report(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
axum::extract::Path(id): axum::extract::Path<Uuid>,
|
||||||
|
) -> Result<Json<KomgaSyncResponse>, ApiError> {
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT id, komga_url, user_id, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched, created_at
|
||||||
|
FROM komga_sync_reports
|
||||||
|
WHERE id = $1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let row = row.ok_or_else(|| ApiError::not_found("report not found"))?;
|
||||||
|
|
||||||
|
let matched_books_json: serde_json::Value = row.try_get("matched_books").unwrap_or(serde_json::Value::Array(vec![]));
|
||||||
|
let matched_books: Vec<String> = serde_json::from_value(matched_books_json).unwrap_or_default();
|
||||||
|
let newly_marked_books_json: serde_json::Value = row.try_get("newly_marked_books").unwrap_or(serde_json::Value::Array(vec![]));
|
||||||
|
let newly_marked_books: Vec<String> = serde_json::from_value(newly_marked_books_json).unwrap_or_default();
|
||||||
|
let unmatched_json: serde_json::Value = row.get("unmatched");
|
||||||
|
let unmatched: Vec<String> = serde_json::from_value(unmatched_json).unwrap_or_default();
|
||||||
|
|
||||||
|
Ok(Json(KomgaSyncResponse {
|
||||||
|
id: row.get("id"),
|
||||||
|
komga_url: row.get("komga_url"),
|
||||||
|
user_id: row.get("user_id"),
|
||||||
|
total_komga_read: row.get("total_komga_read"),
|
||||||
|
matched: row.get("matched"),
|
||||||
|
already_read: row.get("already_read"),
|
||||||
|
newly_marked: row.get("newly_marked"),
|
||||||
|
matched_books,
|
||||||
|
newly_marked_books,
|
||||||
|
unmatched,
|
||||||
|
created_at: row.get("created_at"),
|
||||||
|
}))
|
||||||
|
}
|
||||||
@@ -21,6 +21,15 @@ pub struct LibraryResponse {
|
|||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
pub next_scan_at: Option<chrono::DateTime<chrono::Utc>>,
|
pub next_scan_at: Option<chrono::DateTime<chrono::Utc>>,
|
||||||
pub watcher_enabled: bool,
|
pub watcher_enabled: bool,
|
||||||
|
pub metadata_provider: Option<String>,
|
||||||
|
pub fallback_metadata_provider: Option<String>,
|
||||||
|
pub metadata_refresh_mode: String,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub next_metadata_refresh_at: Option<chrono::DateTime<chrono::Utc>>,
|
||||||
|
pub series_count: i64,
|
||||||
|
/// First book IDs from up to 5 distinct series (for thumbnail fan display)
|
||||||
|
#[schema(value_type = Vec<String>)]
|
||||||
|
pub thumbnail_book_ids: Vec<Uuid>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, ToSchema)]
|
#[derive(Deserialize, ToSchema)]
|
||||||
@@ -39,14 +48,27 @@ pub struct CreateLibraryRequest {
|
|||||||
responses(
|
responses(
|
||||||
(status = 200, body = Vec<LibraryResponse>),
|
(status = 200, body = Vec<LibraryResponse>),
|
||||||
(status = 401, description = "Unauthorized"),
|
(status = 401, description = "Unauthorized"),
|
||||||
(status = 403, description = "Forbidden - Admin scope required"),
|
|
||||||
),
|
),
|
||||||
security(("Bearer" = []))
|
security(("Bearer" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn list_libraries(State(state): State<AppState>) -> Result<Json<Vec<LibraryResponse>>, ApiError> {
|
pub async fn list_libraries(State(state): State<AppState>) -> Result<Json<Vec<LibraryResponse>>, ApiError> {
|
||||||
let rows = sqlx::query(
|
let rows = sqlx::query(
|
||||||
"SELECT l.id, l.name, l.root_path, l.enabled, l.monitor_enabled, l.scan_mode, l.next_scan_at, l.watcher_enabled,
|
"SELECT l.id, l.name, l.root_path, l.enabled, l.monitor_enabled, l.scan_mode, l.next_scan_at, l.watcher_enabled, l.metadata_provider, l.fallback_metadata_provider, l.metadata_refresh_mode, l.next_metadata_refresh_at,
|
||||||
(SELECT COUNT(*) FROM books b WHERE b.library_id = l.id) as book_count
|
(SELECT COUNT(*) FROM books b WHERE b.library_id = l.id) as book_count,
|
||||||
|
(SELECT COUNT(DISTINCT COALESCE(NULLIF(b.series, ''), 'unclassified')) FROM books b WHERE b.library_id = l.id) as series_count,
|
||||||
|
COALESCE((
|
||||||
|
SELECT ARRAY_AGG(first_id ORDER BY series_name)
|
||||||
|
FROM (
|
||||||
|
SELECT DISTINCT ON (COALESCE(NULLIF(b.series, ''), 'unclassified'))
|
||||||
|
COALESCE(NULLIF(b.series, ''), 'unclassified') as series_name,
|
||||||
|
b.id as first_id
|
||||||
|
FROM books b
|
||||||
|
WHERE b.library_id = l.id
|
||||||
|
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'),
|
||||||
|
b.volume NULLS LAST, b.title ASC
|
||||||
|
LIMIT 5
|
||||||
|
) sub
|
||||||
|
), ARRAY[]::uuid[]) as thumbnail_book_ids
|
||||||
FROM libraries l ORDER BY l.created_at DESC"
|
FROM libraries l ORDER BY l.created_at DESC"
|
||||||
)
|
)
|
||||||
.fetch_all(&state.pool)
|
.fetch_all(&state.pool)
|
||||||
@@ -60,10 +82,16 @@ pub async fn list_libraries(State(state): State<AppState>) -> Result<Json<Vec<Li
|
|||||||
root_path: row.get("root_path"),
|
root_path: row.get("root_path"),
|
||||||
enabled: row.get("enabled"),
|
enabled: row.get("enabled"),
|
||||||
book_count: row.get("book_count"),
|
book_count: row.get("book_count"),
|
||||||
|
series_count: row.get("series_count"),
|
||||||
monitor_enabled: row.get("monitor_enabled"),
|
monitor_enabled: row.get("monitor_enabled"),
|
||||||
scan_mode: row.get("scan_mode"),
|
scan_mode: row.get("scan_mode"),
|
||||||
next_scan_at: row.get("next_scan_at"),
|
next_scan_at: row.get("next_scan_at"),
|
||||||
watcher_enabled: row.get("watcher_enabled"),
|
watcher_enabled: row.get("watcher_enabled"),
|
||||||
|
metadata_provider: row.get("metadata_provider"),
|
||||||
|
fallback_metadata_provider: row.get("fallback_metadata_provider"),
|
||||||
|
metadata_refresh_mode: row.get("metadata_refresh_mode"),
|
||||||
|
next_metadata_refresh_at: row.get("next_metadata_refresh_at"),
|
||||||
|
thumbnail_book_ids: row.get("thumbnail_book_ids"),
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
@@ -111,10 +139,16 @@ pub async fn create_library(
|
|||||||
root_path,
|
root_path,
|
||||||
enabled: true,
|
enabled: true,
|
||||||
book_count: 0,
|
book_count: 0,
|
||||||
|
series_count: 0,
|
||||||
monitor_enabled: false,
|
monitor_enabled: false,
|
||||||
scan_mode: "manual".to_string(),
|
scan_mode: "manual".to_string(),
|
||||||
next_scan_at: None,
|
next_scan_at: None,
|
||||||
watcher_enabled: false,
|
watcher_enabled: false,
|
||||||
|
metadata_provider: None,
|
||||||
|
fallback_metadata_provider: None,
|
||||||
|
metadata_refresh_mode: "manual".to_string(),
|
||||||
|
next_metadata_refresh_at: None,
|
||||||
|
thumbnail_book_ids: vec![],
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -156,14 +190,19 @@ fn canonicalize_library_root(root_path: &str) -> Result<PathBuf, ApiError> {
|
|||||||
return Err(ApiError::bad_request("root_path must be absolute"));
|
return Err(ApiError::bad_request("root_path must be absolute"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let canonical = std::fs::canonicalize(path)
|
// Avoid fs::canonicalize — it opens extra file descriptors to resolve symlinks
|
||||||
.map_err(|_| ApiError::bad_request("root_path does not exist or is inaccessible"))?;
|
// and can fail on Docker volume mounts (ro, cached) when fd limits are low.
|
||||||
|
if !path.exists() {
|
||||||
if !canonical.is_dir() {
|
return Err(ApiError::bad_request(format!(
|
||||||
|
"root_path does not exist: {}",
|
||||||
|
root_path
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
if !path.is_dir() {
|
||||||
return Err(ApiError::bad_request("root_path must point to a directory"));
|
return Err(ApiError::bad_request("root_path must point to a directory"));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(canonical)
|
Ok(path.to_path_buf())
|
||||||
}
|
}
|
||||||
|
|
||||||
use crate::index_jobs::{IndexJobResponse, RebuildRequest};
|
use crate::index_jobs::{IndexJobResponse, RebuildRequest};
|
||||||
@@ -181,7 +220,6 @@ use crate::index_jobs::{IndexJobResponse, RebuildRequest};
|
|||||||
(status = 200, body = IndexJobResponse),
|
(status = 200, body = IndexJobResponse),
|
||||||
(status = 404, description = "Library not found"),
|
(status = 404, description = "Library not found"),
|
||||||
(status = 401, description = "Unauthorized"),
|
(status = 401, description = "Unauthorized"),
|
||||||
(status = 403, description = "Forbidden - Admin scope required"),
|
|
||||||
),
|
),
|
||||||
security(("Bearer" = []))
|
security(("Bearer" = []))
|
||||||
)]
|
)]
|
||||||
@@ -201,7 +239,8 @@ pub async fn scan_library(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let is_full = payload.as_ref().and_then(|p| p.full).unwrap_or(false);
|
let is_full = payload.as_ref().and_then(|p| p.full).unwrap_or(false);
|
||||||
let job_type = if is_full { "full_rebuild" } else { "rebuild" };
|
let is_rescan = payload.as_ref().and_then(|p| p.rescan).unwrap_or(false);
|
||||||
|
let job_type = if is_full { "full_rebuild" } else if is_rescan { "rescan" } else { "rebuild" };
|
||||||
|
|
||||||
// Create indexing job for this library
|
// Create indexing job for this library
|
||||||
let job_id = Uuid::new_v4();
|
let job_id = Uuid::new_v4();
|
||||||
@@ -230,6 +269,8 @@ pub struct UpdateMonitoringRequest {
|
|||||||
#[schema(value_type = String, example = "hourly")]
|
#[schema(value_type = String, example = "hourly")]
|
||||||
pub scan_mode: String, // 'manual', 'hourly', 'daily', 'weekly'
|
pub scan_mode: String, // 'manual', 'hourly', 'daily', 'weekly'
|
||||||
pub watcher_enabled: Option<bool>,
|
pub watcher_enabled: Option<bool>,
|
||||||
|
#[schema(value_type = Option<String>, example = "daily")]
|
||||||
|
pub metadata_refresh_mode: Option<String>, // 'manual', 'hourly', 'daily', 'weekly'
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Update monitoring settings for a library
|
/// Update monitoring settings for a library
|
||||||
@@ -260,6 +301,12 @@ pub async fn update_monitoring(
|
|||||||
return Err(ApiError::bad_request("scan_mode must be one of: manual, hourly, daily, weekly"));
|
return Err(ApiError::bad_request("scan_mode must be one of: manual, hourly, daily, weekly"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate metadata_refresh_mode
|
||||||
|
let metadata_refresh_mode = input.metadata_refresh_mode.as_deref().unwrap_or("manual");
|
||||||
|
if !valid_modes.contains(&metadata_refresh_mode) {
|
||||||
|
return Err(ApiError::bad_request("metadata_refresh_mode must be one of: manual, hourly, daily, weekly"));
|
||||||
|
}
|
||||||
|
|
||||||
// Calculate next_scan_at if monitoring is enabled
|
// Calculate next_scan_at if monitoring is enabled
|
||||||
let next_scan_at = if input.monitor_enabled {
|
let next_scan_at = if input.monitor_enabled {
|
||||||
let interval_minutes = match input.scan_mode.as_str() {
|
let interval_minutes = match input.scan_mode.as_str() {
|
||||||
@@ -273,16 +320,31 @@ pub async fn update_monitoring(
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Calculate next_metadata_refresh_at
|
||||||
|
let next_metadata_refresh_at = if metadata_refresh_mode != "manual" {
|
||||||
|
let interval_minutes = match metadata_refresh_mode {
|
||||||
|
"hourly" => 60,
|
||||||
|
"daily" => 1440,
|
||||||
|
"weekly" => 10080,
|
||||||
|
_ => 1440,
|
||||||
|
};
|
||||||
|
Some(chrono::Utc::now() + chrono::Duration::minutes(interval_minutes))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
let watcher_enabled = input.watcher_enabled.unwrap_or(false);
|
let watcher_enabled = input.watcher_enabled.unwrap_or(false);
|
||||||
|
|
||||||
let result = sqlx::query(
|
let result = sqlx::query(
|
||||||
"UPDATE libraries SET monitor_enabled = $2, scan_mode = $3, next_scan_at = $4, watcher_enabled = $5 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled"
|
"UPDATE libraries SET monitor_enabled = $2, scan_mode = $3, next_scan_at = $4, watcher_enabled = $5, metadata_refresh_mode = $6, next_metadata_refresh_at = $7 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled, metadata_provider, fallback_metadata_provider, metadata_refresh_mode, next_metadata_refresh_at"
|
||||||
)
|
)
|
||||||
.bind(library_id)
|
.bind(library_id)
|
||||||
.bind(input.monitor_enabled)
|
.bind(input.monitor_enabled)
|
||||||
.bind(input.scan_mode)
|
.bind(input.scan_mode)
|
||||||
.bind(next_scan_at)
|
.bind(next_scan_at)
|
||||||
.bind(watcher_enabled)
|
.bind(watcher_enabled)
|
||||||
|
.bind(metadata_refresh_mode)
|
||||||
|
.bind(next_metadata_refresh_at)
|
||||||
.fetch_optional(&state.pool)
|
.fetch_optional(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -295,15 +357,121 @@ pub async fn update_monitoring(
|
|||||||
.fetch_one(&state.pool)
|
.fetch_one(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')) FROM books WHERE library_id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar(
|
||||||
|
"SELECT b.id FROM books b
|
||||||
|
WHERE b.library_id = $1
|
||||||
|
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'), b.volume NULLS LAST, b.title ASC
|
||||||
|
LIMIT 5"
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
Ok(Json(LibraryResponse {
|
Ok(Json(LibraryResponse {
|
||||||
id: row.get("id"),
|
id: row.get("id"),
|
||||||
name: row.get("name"),
|
name: row.get("name"),
|
||||||
root_path: row.get("root_path"),
|
root_path: row.get("root_path"),
|
||||||
enabled: row.get("enabled"),
|
enabled: row.get("enabled"),
|
||||||
book_count,
|
book_count,
|
||||||
|
series_count,
|
||||||
monitor_enabled: row.get("monitor_enabled"),
|
monitor_enabled: row.get("monitor_enabled"),
|
||||||
scan_mode: row.get("scan_mode"),
|
scan_mode: row.get("scan_mode"),
|
||||||
next_scan_at: row.get("next_scan_at"),
|
next_scan_at: row.get("next_scan_at"),
|
||||||
watcher_enabled: row.get("watcher_enabled"),
|
watcher_enabled: row.get("watcher_enabled"),
|
||||||
|
metadata_provider: row.get("metadata_provider"),
|
||||||
|
fallback_metadata_provider: row.get("fallback_metadata_provider"),
|
||||||
|
metadata_refresh_mode: row.get("metadata_refresh_mode"),
|
||||||
|
next_metadata_refresh_at: row.get("next_metadata_refresh_at"),
|
||||||
|
thumbnail_book_ids,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct UpdateMetadataProviderRequest {
|
||||||
|
pub metadata_provider: Option<String>,
|
||||||
|
pub fallback_metadata_provider: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update the metadata provider for a library
|
||||||
|
#[utoipa::path(
|
||||||
|
patch,
|
||||||
|
path = "/libraries/{id}/metadata-provider",
|
||||||
|
tag = "libraries",
|
||||||
|
params(
|
||||||
|
("id" = String, Path, description = "Library UUID"),
|
||||||
|
),
|
||||||
|
request_body = UpdateMetadataProviderRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = LibraryResponse),
|
||||||
|
(status = 404, description = "Library not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn update_metadata_provider(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
AxumPath(library_id): AxumPath<Uuid>,
|
||||||
|
Json(input): Json<UpdateMetadataProviderRequest>,
|
||||||
|
) -> Result<Json<LibraryResponse>, ApiError> {
|
||||||
|
let provider = input.metadata_provider.as_deref().filter(|s| !s.is_empty());
|
||||||
|
let fallback = input.fallback_metadata_provider.as_deref().filter(|s| !s.is_empty());
|
||||||
|
|
||||||
|
let result = sqlx::query(
|
||||||
|
"UPDATE libraries SET metadata_provider = $2, fallback_metadata_provider = $3 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled, metadata_provider, fallback_metadata_provider, metadata_refresh_mode, next_metadata_refresh_at"
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.bind(provider)
|
||||||
|
.bind(fallback)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let Some(row) = result else {
|
||||||
|
return Err(ApiError::not_found("library not found"));
|
||||||
|
};
|
||||||
|
|
||||||
|
let book_count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM books WHERE library_id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')) FROM books WHERE library_id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar(
|
||||||
|
"SELECT b.id FROM books b
|
||||||
|
WHERE b.library_id = $1
|
||||||
|
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'), b.volume NULLS LAST, b.title ASC
|
||||||
|
LIMIT 5"
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
Ok(Json(LibraryResponse {
|
||||||
|
id: row.get("id"),
|
||||||
|
name: row.get("name"),
|
||||||
|
root_path: row.get("root_path"),
|
||||||
|
enabled: row.get("enabled"),
|
||||||
|
book_count,
|
||||||
|
series_count,
|
||||||
|
monitor_enabled: row.get("monitor_enabled"),
|
||||||
|
scan_mode: row.get("scan_mode"),
|
||||||
|
next_scan_at: row.get("next_scan_at"),
|
||||||
|
watcher_enabled: row.get("watcher_enabled"),
|
||||||
|
metadata_provider: row.get("metadata_provider"),
|
||||||
|
fallback_metadata_provider: row.get("fallback_metadata_provider"),
|
||||||
|
metadata_refresh_mode: row.get("metadata_refresh_mode"),
|
||||||
|
next_metadata_refresh_at: row.get("next_metadata_refresh_at"),
|
||||||
|
thumbnail_book_ids,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,18 +1,31 @@
|
|||||||
mod auth;
|
mod auth;
|
||||||
|
mod authors;
|
||||||
mod books;
|
mod books;
|
||||||
mod error;
|
mod error;
|
||||||
mod handlers;
|
mod handlers;
|
||||||
mod index_jobs;
|
mod index_jobs;
|
||||||
|
mod job_poller;
|
||||||
|
mod komga;
|
||||||
mod libraries;
|
mod libraries;
|
||||||
|
mod metadata;
|
||||||
|
mod metadata_batch;
|
||||||
|
mod metadata_refresh;
|
||||||
|
mod metadata_providers;
|
||||||
mod api_middleware;
|
mod api_middleware;
|
||||||
mod openapi;
|
mod openapi;
|
||||||
mod pages;
|
mod pages;
|
||||||
|
mod prowlarr;
|
||||||
|
mod qbittorrent;
|
||||||
mod reading_progress;
|
mod reading_progress;
|
||||||
mod search;
|
mod search;
|
||||||
|
mod series;
|
||||||
mod settings;
|
mod settings;
|
||||||
mod state;
|
mod state;
|
||||||
|
mod stats;
|
||||||
|
mod telegram;
|
||||||
mod thumbnails;
|
mod thumbnails;
|
||||||
mod tokens;
|
mod tokens;
|
||||||
|
mod users;
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
@@ -66,8 +79,6 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
let state = AppState {
|
let state = AppState {
|
||||||
pool,
|
pool,
|
||||||
bootstrap_token: Arc::from(config.api_bootstrap_token),
|
bootstrap_token: Arc::from(config.api_bootstrap_token),
|
||||||
meili_url: Arc::from(config.meili_url),
|
|
||||||
meili_master_key: Arc::from(config.meili_master_key),
|
|
||||||
page_cache: Arc::new(Mutex::new(LruCache::new(NonZeroUsize::new(512).expect("non-zero")))),
|
page_cache: Arc::new(Mutex::new(LruCache::new(NonZeroUsize::new(512).expect("non-zero")))),
|
||||||
page_render_limit: Arc::new(Semaphore::new(concurrent_renders)),
|
page_render_limit: Arc::new(Semaphore::new(concurrent_renders)),
|
||||||
metrics: Arc::new(Metrics::new()),
|
metrics: Arc::new(Metrics::new()),
|
||||||
@@ -79,11 +90,13 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let admin_routes = Router::new()
|
let admin_routes = Router::new()
|
||||||
.route("/libraries", get(libraries::list_libraries).post(libraries::create_library))
|
.route("/libraries", axum::routing::post(libraries::create_library))
|
||||||
.route("/libraries/:id", delete(libraries::delete_library))
|
.route("/libraries/:id", delete(libraries::delete_library))
|
||||||
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
|
|
||||||
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
|
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
|
||||||
|
.route("/libraries/:id/metadata-provider", axum::routing::patch(libraries::update_metadata_provider))
|
||||||
|
.route("/books/:id", axum::routing::patch(books::update_book))
|
||||||
.route("/books/:id/convert", axum::routing::post(books::convert_book))
|
.route("/books/:id/convert", axum::routing::post(books::convert_book))
|
||||||
|
.route("/libraries/:library_id/series/:name", axum::routing::patch(series::update_series))
|
||||||
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
|
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
|
||||||
.route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild))
|
.route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild))
|
||||||
.route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate))
|
.route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate))
|
||||||
@@ -94,8 +107,31 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
.route("/index/jobs/:id/errors", get(index_jobs::get_job_errors))
|
.route("/index/jobs/:id/errors", get(index_jobs::get_job_errors))
|
||||||
.route("/index/cancel/:id", axum::routing::post(index_jobs::cancel_job))
|
.route("/index/cancel/:id", axum::routing::post(index_jobs::cancel_job))
|
||||||
.route("/folders", get(index_jobs::list_folders))
|
.route("/folders", get(index_jobs::list_folders))
|
||||||
|
.route("/admin/users", get(users::list_users).post(users::create_user))
|
||||||
|
.route("/admin/users/:id", delete(users::delete_user).patch(users::update_user))
|
||||||
.route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token))
|
.route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token))
|
||||||
.route("/admin/tokens/:id", delete(tokens::revoke_token))
|
.route("/admin/tokens/:id", delete(tokens::revoke_token).patch(tokens::update_token))
|
||||||
|
.route("/admin/tokens/:id/delete", axum::routing::post(tokens::delete_token))
|
||||||
|
.route("/prowlarr/search", axum::routing::post(prowlarr::search_prowlarr))
|
||||||
|
.route("/prowlarr/test", get(prowlarr::test_prowlarr))
|
||||||
|
.route("/qbittorrent/add", axum::routing::post(qbittorrent::add_torrent))
|
||||||
|
.route("/qbittorrent/test", get(qbittorrent::test_qbittorrent))
|
||||||
|
.route("/telegram/test", get(telegram::test_telegram))
|
||||||
|
.route("/komga/sync", axum::routing::post(komga::sync_komga_read_books))
|
||||||
|
.route("/komga/reports", get(komga::list_sync_reports))
|
||||||
|
.route("/komga/reports/:id", get(komga::get_sync_report))
|
||||||
|
.route("/metadata/search", axum::routing::post(metadata::search_metadata))
|
||||||
|
.route("/metadata/match", axum::routing::post(metadata::create_metadata_match))
|
||||||
|
.route("/metadata/approve/:id", axum::routing::post(metadata::approve_metadata))
|
||||||
|
.route("/metadata/reject/:id", axum::routing::post(metadata::reject_metadata))
|
||||||
|
.route("/metadata/links", get(metadata::get_metadata_links))
|
||||||
|
.route("/metadata/missing/:id", get(metadata::get_missing_books))
|
||||||
|
.route("/metadata/links/:id", delete(metadata::delete_metadata_link))
|
||||||
|
.route("/metadata/batch", axum::routing::post(metadata_batch::start_batch))
|
||||||
|
.route("/metadata/batch/:id/report", get(metadata_batch::get_batch_report))
|
||||||
|
.route("/metadata/batch/:id/results", get(metadata_batch::get_batch_results))
|
||||||
|
.route("/metadata/refresh", axum::routing::post(metadata_refresh::start_refresh))
|
||||||
|
.route("/metadata/refresh/:id/report", get(metadata_refresh::get_refresh_report))
|
||||||
.merge(settings::settings_routes())
|
.merge(settings::settings_routes())
|
||||||
.route_layer(middleware::from_fn_with_state(
|
.route_layer(middleware::from_fn_with_state(
|
||||||
state.clone(),
|
state.clone(),
|
||||||
@@ -103,12 +139,23 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
));
|
));
|
||||||
|
|
||||||
let read_routes = Router::new()
|
let read_routes = Router::new()
|
||||||
|
.route("/libraries", get(libraries::list_libraries))
|
||||||
|
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
|
||||||
.route("/books", get(books::list_books))
|
.route("/books", get(books::list_books))
|
||||||
|
.route("/books/ongoing", get(series::ongoing_books))
|
||||||
.route("/books/:id", get(books::get_book))
|
.route("/books/:id", get(books::get_book))
|
||||||
.route("/books/:id/thumbnail", get(books::get_thumbnail))
|
.route("/books/:id/thumbnail", get(books::get_thumbnail))
|
||||||
.route("/books/:id/pages/:n", get(pages::get_page))
|
.route("/books/:id/pages/:n", get(pages::get_page))
|
||||||
.route("/books/:id/progress", get(reading_progress::get_reading_progress).patch(reading_progress::update_reading_progress))
|
.route("/books/:id/progress", get(reading_progress::get_reading_progress).patch(reading_progress::update_reading_progress))
|
||||||
.route("/libraries/:library_id/series", get(books::list_series))
|
.route("/libraries/:library_id/series", get(series::list_series))
|
||||||
|
.route("/libraries/:library_id/series/:name/metadata", get(series::get_series_metadata))
|
||||||
|
.route("/series", get(series::list_all_series))
|
||||||
|
.route("/series/ongoing", get(series::ongoing_series))
|
||||||
|
.route("/series/statuses", get(series::series_statuses))
|
||||||
|
.route("/series/provider-statuses", get(series::provider_statuses))
|
||||||
|
.route("/series/mark-read", axum::routing::post(reading_progress::mark_series_read))
|
||||||
|
.route("/authors", get(authors::list_authors))
|
||||||
|
.route("/stats", get(stats::get_stats))
|
||||||
.route("/search", get(search::search_books))
|
.route("/search", get(search::search_books))
|
||||||
.route_layer(middleware::from_fn_with_state(state.clone(), api_middleware::read_rate_limit))
|
.route_layer(middleware::from_fn_with_state(state.clone(), api_middleware::read_rate_limit))
|
||||||
.route_layer(middleware::from_fn_with_state(
|
.route_layer(middleware::from_fn_with_state(
|
||||||
@@ -116,6 +163,9 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
auth::require_read,
|
auth::require_read,
|
||||||
));
|
));
|
||||||
|
|
||||||
|
// Clone pool before state is moved into the router
|
||||||
|
let poller_pool = state.pool.clone();
|
||||||
|
|
||||||
let app = Router::new()
|
let app = Router::new()
|
||||||
.route("/health", get(handlers::health))
|
.route("/health", get(handlers::health))
|
||||||
.route("/ready", get(handlers::ready))
|
.route("/ready", get(handlers::ready))
|
||||||
@@ -127,6 +177,11 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
.layer(middleware::from_fn_with_state(state.clone(), api_middleware::request_counter))
|
.layer(middleware::from_fn_with_state(state.clone(), api_middleware::request_counter))
|
||||||
.with_state(state);
|
.with_state(state);
|
||||||
|
|
||||||
|
// Start background poller for API-only jobs (metadata_batch, metadata_refresh)
|
||||||
|
tokio::spawn(async move {
|
||||||
|
job_poller::run_job_poller(poller_pool, 5).await;
|
||||||
|
});
|
||||||
|
|
||||||
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
|
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
|
||||||
info!(addr = %config.listen_addr, "api listening");
|
info!(addr = %config.listen_addr, "api listening");
|
||||||
axum::serve(listener, app).await?;
|
axum::serve(listener, app).await?;
|
||||||
|
|||||||
1097
apps/api/src/metadata.rs
Normal file
1097
apps/api/src/metadata.rs
Normal file
File diff suppressed because it is too large
Load Diff
1145
apps/api/src/metadata_batch.rs
Normal file
1145
apps/api/src/metadata_batch.rs
Normal file
File diff suppressed because it is too large
Load Diff
342
apps/api/src/metadata_providers/anilist.rs
Normal file
342
apps/api/src/metadata_providers/anilist.rs
Normal file
@@ -0,0 +1,342 @@
|
|||||||
|
use super::{BookCandidate, MetadataProvider, ProviderConfig, SeriesCandidate};
|
||||||
|
|
||||||
|
pub struct AniListProvider;
|
||||||
|
|
||||||
|
impl MetadataProvider for AniListProvider {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"anilist"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_series(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<SeriesCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let query = query.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { search_series_impl(&query, &config).await })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_series_books(
|
||||||
|
&self,
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<BookCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let external_id = external_id.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { get_series_books_impl(&external_id, &config).await })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const SEARCH_QUERY: &str = r#"
|
||||||
|
query ($search: String) {
|
||||||
|
Page(perPage: 20) {
|
||||||
|
media(search: $search, type: MANGA, sort: SEARCH_MATCH) {
|
||||||
|
id
|
||||||
|
title { romaji english native }
|
||||||
|
description(asHtml: false)
|
||||||
|
coverImage { large medium }
|
||||||
|
startDate { year }
|
||||||
|
status
|
||||||
|
volumes
|
||||||
|
chapters
|
||||||
|
staff { edges { node { name { full } } role } }
|
||||||
|
siteUrl
|
||||||
|
genres
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|
||||||
|
const DETAIL_QUERY: &str = r#"
|
||||||
|
query ($id: Int) {
|
||||||
|
Media(id: $id, type: MANGA) {
|
||||||
|
id
|
||||||
|
title { romaji english native }
|
||||||
|
description(asHtml: false)
|
||||||
|
coverImage { large medium }
|
||||||
|
startDate { year }
|
||||||
|
status
|
||||||
|
volumes
|
||||||
|
chapters
|
||||||
|
staff { edges { node { name { full } } role } }
|
||||||
|
siteUrl
|
||||||
|
genres
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|
||||||
|
async fn graphql_request(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
query: &str,
|
||||||
|
variables: serde_json::Value,
|
||||||
|
) -> Result<serde_json::Value, String> {
|
||||||
|
let resp = client
|
||||||
|
.post("https://graphql.anilist.co")
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.json(&serde_json::json!({
|
||||||
|
"query": query,
|
||||||
|
"variables": variables,
|
||||||
|
}))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("AniList request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(format!("AniList returned {status}: {text}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
resp.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse AniList response: {e}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn search_series_impl(
|
||||||
|
query: &str,
|
||||||
|
_config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<SeriesCandidate>, String> {
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
|
||||||
|
|
||||||
|
let data = graphql_request(
|
||||||
|
&client,
|
||||||
|
SEARCH_QUERY,
|
||||||
|
serde_json::json!({ "search": query }),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let media = match data
|
||||||
|
.get("data")
|
||||||
|
.and_then(|d| d.get("Page"))
|
||||||
|
.and_then(|p| p.get("media"))
|
||||||
|
.and_then(|m| m.as_array())
|
||||||
|
{
|
||||||
|
Some(media) => media,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
|
||||||
|
let mut candidates: Vec<SeriesCandidate> = media
|
||||||
|
.iter()
|
||||||
|
.filter_map(|m| {
|
||||||
|
let id = m.get("id").and_then(|id| id.as_i64())?;
|
||||||
|
let title_obj = m.get("title")?;
|
||||||
|
let title = title_obj
|
||||||
|
.get("english")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.or_else(|| title_obj.get("romaji").and_then(|t| t.as_str()))?
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let description = m
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(|d| d.replace("\\n", "\n").trim().to_string())
|
||||||
|
.filter(|d| !d.is_empty());
|
||||||
|
|
||||||
|
let cover_url = m
|
||||||
|
.get("coverImage")
|
||||||
|
.and_then(|ci| ci.get("large").or_else(|| ci.get("medium")))
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
|
||||||
|
let start_year = m
|
||||||
|
.get("startDate")
|
||||||
|
.and_then(|sd| sd.get("year"))
|
||||||
|
.and_then(|y| y.as_i64())
|
||||||
|
.map(|y| y as i32);
|
||||||
|
|
||||||
|
let volumes = m
|
||||||
|
.get("volumes")
|
||||||
|
.and_then(|v| v.as_i64())
|
||||||
|
.map(|v| v as i32);
|
||||||
|
|
||||||
|
let chapters = m
|
||||||
|
.get("chapters")
|
||||||
|
.and_then(|v| v.as_i64())
|
||||||
|
.map(|v| v as i32);
|
||||||
|
|
||||||
|
let status = m
|
||||||
|
.get("status")
|
||||||
|
.and_then(|s| s.as_str())
|
||||||
|
.unwrap_or("UNKNOWN")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let site_url = m
|
||||||
|
.get("siteUrl")
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
|
||||||
|
let authors = extract_authors(m);
|
||||||
|
|
||||||
|
let confidence = compute_confidence(&title, &query_lower);
|
||||||
|
|
||||||
|
// Use volumes if known, otherwise fall back to chapters count
|
||||||
|
let (total_volumes, volume_source) = match volumes {
|
||||||
|
Some(v) => (Some(v), "volumes"),
|
||||||
|
None => match chapters {
|
||||||
|
Some(c) => (Some(c), "chapters"),
|
||||||
|
None => (None, "unknown"),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(SeriesCandidate {
|
||||||
|
external_id: id.to_string(),
|
||||||
|
title,
|
||||||
|
authors,
|
||||||
|
description,
|
||||||
|
publishers: vec![],
|
||||||
|
start_year,
|
||||||
|
total_volumes,
|
||||||
|
cover_url,
|
||||||
|
external_url: site_url,
|
||||||
|
confidence,
|
||||||
|
metadata_json: serde_json::json!({
|
||||||
|
"status": status,
|
||||||
|
"chapters": chapters,
|
||||||
|
"volumes": volumes,
|
||||||
|
"volume_source": volume_source,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
candidates.sort_by(|a, b| b.confidence.partial_cmp(&a.confidence).unwrap_or(std::cmp::Ordering::Equal));
|
||||||
|
candidates.truncate(10);
|
||||||
|
Ok(candidates)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_series_books_impl(
|
||||||
|
external_id: &str,
|
||||||
|
_config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<BookCandidate>, String> {
|
||||||
|
let id: i64 = external_id
|
||||||
|
.parse()
|
||||||
|
.map_err(|_| "invalid AniList ID".to_string())?;
|
||||||
|
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
|
||||||
|
|
||||||
|
let data = graphql_request(
|
||||||
|
&client,
|
||||||
|
DETAIL_QUERY,
|
||||||
|
serde_json::json!({ "id": id }),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let media = match data.get("data").and_then(|d| d.get("Media")) {
|
||||||
|
Some(m) => m,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let title_obj = media.get("title").cloned().unwrap_or(serde_json::json!({}));
|
||||||
|
let title = title_obj
|
||||||
|
.get("english")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.or_else(|| title_obj.get("romaji").and_then(|t| t.as_str()))
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let volumes = media
|
||||||
|
.get("volumes")
|
||||||
|
.and_then(|v| v.as_i64())
|
||||||
|
.map(|v| v as i32);
|
||||||
|
|
||||||
|
let chapters = media
|
||||||
|
.get("chapters")
|
||||||
|
.and_then(|v| v.as_i64())
|
||||||
|
.map(|v| v as i32);
|
||||||
|
|
||||||
|
// Use volumes if known, otherwise fall back to chapters count
|
||||||
|
let total = volumes.or(chapters);
|
||||||
|
|
||||||
|
let cover_url = media
|
||||||
|
.get("coverImage")
|
||||||
|
.and_then(|ci| ci.get("large").or_else(|| ci.get("medium")))
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
|
||||||
|
let description = media
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(|d| d.replace("\\n", "\n").trim().to_string());
|
||||||
|
|
||||||
|
let authors = extract_authors(media);
|
||||||
|
|
||||||
|
// AniList doesn't have per-volume data — generate entries from volumes count (or chapters as fallback)
|
||||||
|
let mut books = Vec::new();
|
||||||
|
if let Some(total) = total {
|
||||||
|
for vol in 1..=total {
|
||||||
|
books.push(BookCandidate {
|
||||||
|
external_book_id: format!("{}-vol-{}", external_id, vol),
|
||||||
|
title: format!("{} Vol. {}", title, vol),
|
||||||
|
volume_number: Some(vol),
|
||||||
|
authors: authors.clone(),
|
||||||
|
isbn: None,
|
||||||
|
summary: if vol == 1 { description.clone() } else { None },
|
||||||
|
cover_url: if vol == 1 { cover_url.clone() } else { None },
|
||||||
|
page_count: None,
|
||||||
|
language: Some("ja".to_string()),
|
||||||
|
publish_date: None,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(books)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_authors(media: &serde_json::Value) -> Vec<String> {
|
||||||
|
let mut authors = Vec::new();
|
||||||
|
if let Some(edges) = media
|
||||||
|
.get("staff")
|
||||||
|
.and_then(|s| s.get("edges"))
|
||||||
|
.and_then(|e| e.as_array())
|
||||||
|
{
|
||||||
|
for edge in edges {
|
||||||
|
let role = edge
|
||||||
|
.get("role")
|
||||||
|
.and_then(|r| r.as_str())
|
||||||
|
.unwrap_or("");
|
||||||
|
let role_lower = role.to_lowercase();
|
||||||
|
if role_lower.contains("story") || role_lower.contains("art") || role_lower.contains("original") {
|
||||||
|
if let Some(name) = edge
|
||||||
|
.get("node")
|
||||||
|
.and_then(|n| n.get("name"))
|
||||||
|
.and_then(|n| n.get("full"))
|
||||||
|
.and_then(|f| f.as_str())
|
||||||
|
{
|
||||||
|
if !authors.contains(&name.to_string()) {
|
||||||
|
authors.push(name.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
authors
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_confidence(title: &str, query: &str) -> f32 {
|
||||||
|
let title_lower = title.to_lowercase();
|
||||||
|
if title_lower == query {
|
||||||
|
1.0
|
||||||
|
} else if title_lower.starts_with(query) || query.starts_with(&title_lower) {
|
||||||
|
0.8
|
||||||
|
} else if title_lower.contains(query) || query.contains(&title_lower) {
|
||||||
|
0.7
|
||||||
|
} else {
|
||||||
|
let common: usize = query.chars().filter(|c| title_lower.contains(*c)).count();
|
||||||
|
let max_len = query.len().max(title_lower.len()).max(1);
|
||||||
|
(common as f32 / max_len as f32).clamp(0.1, 0.6)
|
||||||
|
}
|
||||||
|
}
|
||||||
671
apps/api/src/metadata_providers/bedetheque.rs
Normal file
671
apps/api/src/metadata_providers/bedetheque.rs
Normal file
@@ -0,0 +1,671 @@
|
|||||||
|
use scraper::{Html, Selector};
|
||||||
|
|
||||||
|
use super::{BookCandidate, MetadataProvider, ProviderConfig, SeriesCandidate};
|
||||||
|
|
||||||
|
pub struct BedethequeProvider;
|
||||||
|
|
||||||
|
impl MetadataProvider for BedethequeProvider {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"bedetheque"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_series(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<SeriesCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let query = query.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { search_series_impl(&query, &config).await })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_series_books(
|
||||||
|
&self,
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<BookCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let external_id = external_id.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { get_series_books_impl(&external_id, &config).await })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_client() -> Result<reqwest::Client, String> {
|
||||||
|
reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(20))
|
||||||
|
.user_agent("Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:108.0) Gecko/20100101 Firefox/108.0")
|
||||||
|
.default_headers({
|
||||||
|
let mut h = reqwest::header::HeaderMap::new();
|
||||||
|
h.insert(
|
||||||
|
reqwest::header::ACCEPT,
|
||||||
|
"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
|
||||||
|
.parse()
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
h.insert(
|
||||||
|
reqwest::header::ACCEPT_LANGUAGE,
|
||||||
|
"fr-FR,fr;q=0.9,en;q=0.5".parse().unwrap(),
|
||||||
|
);
|
||||||
|
h.insert(reqwest::header::REFERER, "https://www.bedetheque.com/".parse().unwrap());
|
||||||
|
h
|
||||||
|
})
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove diacritics for URL construction (bedetheque uses ASCII slugs)
|
||||||
|
fn normalize_for_url(s: &str) -> String {
|
||||||
|
s.chars()
|
||||||
|
.map(|c| match c {
|
||||||
|
'é' | 'è' | 'ê' | 'ë' | 'É' | 'È' | 'Ê' | 'Ë' => 'e',
|
||||||
|
'à' | 'â' | 'ä' | 'À' | 'Â' | 'Ä' => 'a',
|
||||||
|
'ù' | 'û' | 'ü' | 'Ù' | 'Û' | 'Ü' => 'u',
|
||||||
|
'ô' | 'ö' | 'Ô' | 'Ö' => 'o',
|
||||||
|
'î' | 'ï' | 'Î' | 'Ï' => 'i',
|
||||||
|
'ç' | 'Ç' => 'c',
|
||||||
|
'ñ' | 'Ñ' => 'n',
|
||||||
|
_ => c,
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn urlencoded(s: &str) -> String {
|
||||||
|
let mut result = String::new();
|
||||||
|
for byte in s.bytes() {
|
||||||
|
match byte {
|
||||||
|
b'A'..=b'Z' | b'a'..=b'z' | b'0'..=b'9' | b'-' | b'_' | b'.' | b'~' => {
|
||||||
|
result.push(byte as char);
|
||||||
|
}
|
||||||
|
b' ' => result.push('+'),
|
||||||
|
_ => result.push_str(&format!("%{:02X}", byte)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Search
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async fn search_series_impl(
|
||||||
|
query: &str,
|
||||||
|
_config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<SeriesCandidate>, String> {
|
||||||
|
let client = build_client()?;
|
||||||
|
|
||||||
|
// Use the full-text search page
|
||||||
|
let url = format!(
|
||||||
|
"https://www.bedetheque.com/search/tout?RechTexte={}&RechWhere=0",
|
||||||
|
urlencoded(&normalize_for_url(query))
|
||||||
|
);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Bedetheque request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
return Err(format!("Bedetheque returned {status}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let html = resp
|
||||||
|
.text()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to read Bedetheque response: {e}"))?;
|
||||||
|
|
||||||
|
// Detect IP blacklist
|
||||||
|
if html.contains("<title></title>") || html.contains("<title> </title>") {
|
||||||
|
return Err("Bedetheque: IP may be rate-limited, please retry later".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse HTML in a block so the non-Send Html type is dropped before any .await
|
||||||
|
let candidates = {
|
||||||
|
let document = Html::parse_document(&html);
|
||||||
|
let link_sel =
|
||||||
|
Selector::parse("a[href*='/serie-']").map_err(|e| format!("selector error: {e}"))?;
|
||||||
|
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
let mut seen = std::collections::HashSet::new();
|
||||||
|
let mut candidates = Vec::new();
|
||||||
|
|
||||||
|
for el in document.select(&link_sel) {
|
||||||
|
let href = match el.value().attr("href") {
|
||||||
|
Some(h) => h.to_string(),
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
let (series_id, _slug) = match parse_serie_href(&href) {
|
||||||
|
Some(v) => v,
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
if !seen.insert(series_id.clone()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let title = el.text().collect::<String>().trim().to_string();
|
||||||
|
if title.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let confidence = compute_confidence(&title, &query_lower);
|
||||||
|
let cover_url = format!(
|
||||||
|
"https://www.bedetheque.com/cache/thb_series/PlancheS_{}.jpg",
|
||||||
|
series_id
|
||||||
|
);
|
||||||
|
|
||||||
|
candidates.push(SeriesCandidate {
|
||||||
|
external_id: series_id.clone(),
|
||||||
|
title: title.clone(),
|
||||||
|
authors: vec![],
|
||||||
|
description: None,
|
||||||
|
publishers: vec![],
|
||||||
|
start_year: None,
|
||||||
|
total_volumes: None,
|
||||||
|
cover_url: Some(cover_url),
|
||||||
|
external_url: Some(href),
|
||||||
|
confidence,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
candidates.sort_by(|a, b| {
|
||||||
|
b.confidence
|
||||||
|
.partial_cmp(&a.confidence)
|
||||||
|
.unwrap_or(std::cmp::Ordering::Equal)
|
||||||
|
});
|
||||||
|
candidates.truncate(10);
|
||||||
|
candidates
|
||||||
|
}; // document is dropped here — safe to .await below
|
||||||
|
|
||||||
|
// For the top candidates, fetch series details to enrich metadata
|
||||||
|
// (limit to top 3 to avoid hammering the site)
|
||||||
|
let mut enriched = Vec::new();
|
||||||
|
for mut c in candidates {
|
||||||
|
if enriched.len() < 3 {
|
||||||
|
if let Ok(details) = fetch_series_details(&client, &c.external_id, c.external_url.as_deref()).await {
|
||||||
|
if let Some(desc) = details.description {
|
||||||
|
c.description = Some(desc);
|
||||||
|
}
|
||||||
|
if !details.authors.is_empty() {
|
||||||
|
c.authors = details.authors;
|
||||||
|
}
|
||||||
|
if !details.publishers.is_empty() {
|
||||||
|
c.publishers = details.publishers;
|
||||||
|
}
|
||||||
|
if let Some(year) = details.start_year {
|
||||||
|
c.start_year = Some(year);
|
||||||
|
}
|
||||||
|
if let Some(count) = details.album_count {
|
||||||
|
c.total_volumes = Some(count);
|
||||||
|
}
|
||||||
|
c.metadata_json = serde_json::json!({
|
||||||
|
"description": c.description,
|
||||||
|
"authors": c.authors,
|
||||||
|
"publishers": c.publishers,
|
||||||
|
"start_year": c.start_year,
|
||||||
|
"genres": details.genres,
|
||||||
|
"status": details.status,
|
||||||
|
"origin": details.origin,
|
||||||
|
"language": details.language,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
enriched.push(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(enriched)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse serie URL to extract (id, slug)
|
||||||
|
fn parse_serie_href(href: &str) -> Option<(String, String)> {
|
||||||
|
// Patterns:
|
||||||
|
// https://www.bedetheque.com/serie-3-BD-Blacksad.html
|
||||||
|
// /serie-3-BD-Blacksad.html
|
||||||
|
let re = regex::Regex::new(r"/serie-(\d+)-[A-Za-z]+-(.+?)(?:__\d+)?\.html").ok()?;
|
||||||
|
let caps = re.captures(href)?;
|
||||||
|
Some((caps[1].to_string(), caps[2].to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SeriesDetails {
|
||||||
|
description: Option<String>,
|
||||||
|
authors: Vec<String>,
|
||||||
|
publishers: Vec<String>,
|
||||||
|
start_year: Option<i32>,
|
||||||
|
album_count: Option<i32>,
|
||||||
|
genres: Vec<String>,
|
||||||
|
status: Option<String>,
|
||||||
|
origin: Option<String>,
|
||||||
|
language: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn fetch_series_details(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
series_id: &str,
|
||||||
|
series_url: Option<&str>,
|
||||||
|
) -> Result<SeriesDetails, String> {
|
||||||
|
// Build URL — append __10000 to get all albums on one page
|
||||||
|
let url = match series_url {
|
||||||
|
Some(u) => {
|
||||||
|
// Replace .html with __10000.html
|
||||||
|
u.replace(".html", "__10000.html")
|
||||||
|
}
|
||||||
|
None => format!(
|
||||||
|
"https://www.bedetheque.com/serie-{}-BD-Serie__10000.html",
|
||||||
|
series_id
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to fetch series page: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
return Err(format!("Series page returned {}", resp.status()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let html = resp
|
||||||
|
.text()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to read series page: {e}"))?;
|
||||||
|
|
||||||
|
let doc = Html::parse_document(&html);
|
||||||
|
let mut details = SeriesDetails {
|
||||||
|
description: None,
|
||||||
|
authors: vec![],
|
||||||
|
publishers: vec![],
|
||||||
|
start_year: None,
|
||||||
|
album_count: None,
|
||||||
|
genres: vec![],
|
||||||
|
status: None,
|
||||||
|
origin: None,
|
||||||
|
language: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Description from <meta name="description"> — format: "Tout sur la série {name} : {description}"
|
||||||
|
if let Ok(sel) = Selector::parse(r#"meta[name="description"]"#) {
|
||||||
|
if let Some(el) = doc.select(&sel).next() {
|
||||||
|
if let Some(content) = el.value().attr("content") {
|
||||||
|
let desc = content.trim().to_string();
|
||||||
|
// Strip the "Tout sur la série ... : " prefix
|
||||||
|
let cleaned = if let Some(pos) = desc.find(" : ") {
|
||||||
|
desc[pos + 3..].trim().to_string()
|
||||||
|
} else {
|
||||||
|
desc
|
||||||
|
};
|
||||||
|
if !cleaned.is_empty() {
|
||||||
|
details.description = Some(cleaned);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract authors from itemprop="author" and itemprop="illustrator" (deduplicated)
|
||||||
|
{
|
||||||
|
let mut authors_set = std::collections::HashSet::new();
|
||||||
|
for attr in ["author", "illustrator"] {
|
||||||
|
if let Ok(sel) = Selector::parse(&format!(r#"[itemprop="{attr}"]"#)) {
|
||||||
|
for el in doc.select(&sel) {
|
||||||
|
let name = el.text().collect::<String>().trim().to_string();
|
||||||
|
// Names are "Last, First" — normalize to "First Last"
|
||||||
|
let normalized = if let Some((last, first)) = name.split_once(',') {
|
||||||
|
format!("{} {}", first.trim(), last.trim())
|
||||||
|
} else {
|
||||||
|
name
|
||||||
|
};
|
||||||
|
if !normalized.is_empty() && is_real_author(&normalized) {
|
||||||
|
authors_set.insert(normalized);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
details.authors = authors_set.into_iter().collect();
|
||||||
|
details.authors.sort();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract publishers from itemprop="publisher" (deduplicated)
|
||||||
|
{
|
||||||
|
let mut publishers_set = std::collections::HashSet::new();
|
||||||
|
if let Ok(sel) = Selector::parse(r#"[itemprop="publisher"]"#) {
|
||||||
|
for el in doc.select(&sel) {
|
||||||
|
let name = el.text().collect::<String>().trim().to_string();
|
||||||
|
if !name.is_empty() {
|
||||||
|
publishers_set.insert(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
details.publishers = publishers_set.into_iter().collect();
|
||||||
|
details.publishers.sort();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract series-level info from <li><label>X :</label>value</li> blocks
|
||||||
|
// Genre: <li><label>Genre :</label><span class="style-serie">Animalier, Aventure, Humour</span></li>
|
||||||
|
if let Ok(sel) = Selector::parse("span.style-serie") {
|
||||||
|
if let Some(el) = doc.select(&sel).next() {
|
||||||
|
let text = el.text().collect::<String>();
|
||||||
|
details.genres = text
|
||||||
|
.split(',')
|
||||||
|
.map(|s| s.trim().to_string())
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.collect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parution: <li><label>Parution :</label><span class="parution-serie">Série finie</span></li>
|
||||||
|
if let Ok(sel) = Selector::parse("span.parution-serie") {
|
||||||
|
if let Some(el) = doc.select(&sel).next() {
|
||||||
|
let text = el.text().collect::<String>().trim().to_string();
|
||||||
|
if !text.is_empty() {
|
||||||
|
details.status = Some(text);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Origine and Langue from page text (no dedicated CSS class)
|
||||||
|
let page_text = doc.root_element().text().collect::<String>();
|
||||||
|
|
||||||
|
if let Some(val) = extract_info_value(&page_text, "Origine") {
|
||||||
|
let val = val.lines().next().unwrap_or(val).trim();
|
||||||
|
if !val.is_empty() {
|
||||||
|
details.origin = Some(val.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(val) = extract_info_value(&page_text, "Langue") {
|
||||||
|
let val = val.lines().next().unwrap_or(val).trim();
|
||||||
|
if !val.is_empty() {
|
||||||
|
details.language = Some(val.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Album count from serie-info text (e.g. "Tomes : 8")
|
||||||
|
if let Ok(re) = regex::Regex::new(r"Tomes?\s*:\s*(\d+)") {
|
||||||
|
if let Some(caps) = re.captures(&page_text) {
|
||||||
|
if let Ok(n) = caps[1].parse::<i32>() {
|
||||||
|
details.album_count = Some(n);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start year from first <meta itemprop="datePublished" content="YYYY-MM-DD">
|
||||||
|
if let Ok(sel) = Selector::parse(r#"[itemprop="datePublished"]"#) {
|
||||||
|
if let Some(el) = doc.select(&sel).next() {
|
||||||
|
if let Some(content) = el.value().attr("content") {
|
||||||
|
// content is "YYYY-MM-DD"
|
||||||
|
if let Some(year_str) = content.split('-').next() {
|
||||||
|
if let Ok(year) = year_str.parse::<i32>() {
|
||||||
|
details.start_year = Some(year);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(details)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract value after a label like "Scénario : Jean-Claude" → "Jean-Claude"
|
||||||
|
fn extract_info_value<'a>(text: &'a str, label: &str) -> Option<&'a str> {
|
||||||
|
// Handle both "Label :" and "Label:"
|
||||||
|
let patterns = [
|
||||||
|
format!("{} :", label),
|
||||||
|
format!("{}:", label),
|
||||||
|
format!("{} :", &label.to_lowercase()),
|
||||||
|
];
|
||||||
|
for pat in &patterns {
|
||||||
|
if let Some(pos) = text.find(pat.as_str()) {
|
||||||
|
let val = text[pos + pat.len()..].trim();
|
||||||
|
if !val.is_empty() {
|
||||||
|
return Some(val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Get series books
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async fn get_series_books_impl(
|
||||||
|
external_id: &str,
|
||||||
|
_config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<BookCandidate>, String> {
|
||||||
|
let client = build_client()?;
|
||||||
|
|
||||||
|
// We need to find the series URL — try a direct fetch
|
||||||
|
// external_id is the numeric series ID
|
||||||
|
// We try to fetch the series page to get the album list
|
||||||
|
let url = format!(
|
||||||
|
"https://www.bedetheque.com/serie-{}-BD-Serie__10000.html",
|
||||||
|
external_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to fetch series: {e}"))?;
|
||||||
|
|
||||||
|
// If the generic slug fails, try without the slug part (bedetheque redirects)
|
||||||
|
let html = if resp.status().is_success() {
|
||||||
|
resp.text().await.map_err(|e| format!("Failed to read: {e}"))?
|
||||||
|
} else {
|
||||||
|
// Try alternative URL pattern
|
||||||
|
let alt_url = format!(
|
||||||
|
"https://www.bedetheque.com/serie-{}__10000.html",
|
||||||
|
external_id
|
||||||
|
);
|
||||||
|
let resp2 = client
|
||||||
|
.get(&alt_url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to fetch series (alt): {e}"))?;
|
||||||
|
if !resp2.status().is_success() {
|
||||||
|
return Err(format!("Series page not found for id {external_id}"));
|
||||||
|
}
|
||||||
|
resp2.text().await.map_err(|e| format!("Failed to read: {e}"))?
|
||||||
|
};
|
||||||
|
|
||||||
|
if html.contains("<title></title>") {
|
||||||
|
return Err("Bedetheque: IP may be rate-limited".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let doc = Html::parse_document(&html);
|
||||||
|
let mut books = Vec::new();
|
||||||
|
|
||||||
|
// Each album block starts before a .album-main div.
|
||||||
|
// The cover image (<img itemprop="image">) is OUTSIDE .album-main (sibling),
|
||||||
|
// so we iterate over a broader parent. But the simplest approach: parse all
|
||||||
|
// itemprop elements relative to each .album-main, plus pick covers separately.
|
||||||
|
let album_sel = Selector::parse(".album-main").map_err(|e| format!("selector: {e}"))?;
|
||||||
|
|
||||||
|
// Pre-collect cover images — they appear in <img itemprop="image"> before each .album-main
|
||||||
|
// and link to an album URL containing the book ID
|
||||||
|
let cover_sel = Selector::parse(r#"img[itemprop="image"]"#).map_err(|e| format!("selector: {e}"))?;
|
||||||
|
let covers: Vec<String> = doc.select(&cover_sel)
|
||||||
|
.filter_map(|el| el.value().attr("src").map(|s| {
|
||||||
|
if s.starts_with("http") { s.to_string() } else { format!("https://www.bedetheque.com{}", s) }
|
||||||
|
}))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
static RE_TOME: std::sync::LazyLock<regex::Regex> =
|
||||||
|
std::sync::LazyLock::new(|| regex::Regex::new(r"(?i)-Tome-\d+-").unwrap());
|
||||||
|
static RE_BOOK_ID: std::sync::LazyLock<regex::Regex> =
|
||||||
|
std::sync::LazyLock::new(|| regex::Regex::new(r"-(\d+)\.html").unwrap());
|
||||||
|
static RE_VOLUME: std::sync::LazyLock<regex::Regex> =
|
||||||
|
std::sync::LazyLock::new(|| regex::Regex::new(r"(?i)Tome-(\d+)-").unwrap());
|
||||||
|
|
||||||
|
for (idx, album_el) in doc.select(&album_sel).enumerate() {
|
||||||
|
// Title from <a class="titre" title="..."> — the title attribute is clean
|
||||||
|
let title_sel = Selector::parse("a.titre").ok();
|
||||||
|
let title_el = title_sel.as_ref().and_then(|s| album_el.select(s).next());
|
||||||
|
let title = title_el
|
||||||
|
.and_then(|el| el.value().attr("title"))
|
||||||
|
.unwrap_or("")
|
||||||
|
.trim()
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
if title.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// External book ID from album URL (e.g. "...-1063.html")
|
||||||
|
let album_url = title_el.and_then(|el| el.value().attr("href")).unwrap_or("");
|
||||||
|
|
||||||
|
// Only keep main tomes — their URLs contain "Tome-{N}-"
|
||||||
|
// Skip hors-série (HS), intégrales (INT/INTFL), romans, coffrets, etc.
|
||||||
|
if !RE_TOME.is_match(album_url) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let external_book_id = RE_BOOK_ID
|
||||||
|
.captures(album_url)
|
||||||
|
.map(|c| c[1].to_string())
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
// Volume number from URL pattern "Tome-{N}-" or from itemprop name
|
||||||
|
let volume_number = RE_VOLUME
|
||||||
|
.captures(album_url)
|
||||||
|
.and_then(|c| c[1].parse::<i32>().ok())
|
||||||
|
.or_else(|| extract_volume_from_title(&title));
|
||||||
|
|
||||||
|
// Authors from itemprop="author" and itemprop="illustrator"
|
||||||
|
let mut authors = Vec::new();
|
||||||
|
let author_sel = Selector::parse(r#"[itemprop="author"]"#).ok();
|
||||||
|
let illustrator_sel = Selector::parse(r#"[itemprop="illustrator"]"#).ok();
|
||||||
|
for sel in [&author_sel, &illustrator_sel].into_iter().flatten() {
|
||||||
|
for el in album_el.select(sel) {
|
||||||
|
let name = el.text().collect::<String>().trim().to_string();
|
||||||
|
// Names are "Last, First" format — normalize to "First Last"
|
||||||
|
let normalized = if let Some((last, first)) = name.split_once(',') {
|
||||||
|
format!("{} {}", first.trim(), last.trim())
|
||||||
|
} else {
|
||||||
|
name
|
||||||
|
};
|
||||||
|
if !normalized.is_empty() && is_real_author(&normalized) && !authors.contains(&normalized) {
|
||||||
|
authors.push(normalized);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ISBN from <span itemprop="isbn">
|
||||||
|
let isbn = Selector::parse(r#"[itemprop="isbn"]"#)
|
||||||
|
.ok()
|
||||||
|
.and_then(|s| album_el.select(&s).next())
|
||||||
|
.map(|el| el.text().collect::<String>().trim().to_string())
|
||||||
|
.filter(|s| !s.is_empty());
|
||||||
|
|
||||||
|
// Page count from <span itemprop="numberOfPages">
|
||||||
|
let page_count = Selector::parse(r#"[itemprop="numberOfPages"]"#)
|
||||||
|
.ok()
|
||||||
|
.and_then(|s| album_el.select(&s).next())
|
||||||
|
.and_then(|el| el.text().collect::<String>().trim().parse::<i32>().ok());
|
||||||
|
|
||||||
|
// Publish date from <meta itemprop="datePublished" content="YYYY-MM-DD">
|
||||||
|
let publish_date = Selector::parse(r#"[itemprop="datePublished"]"#)
|
||||||
|
.ok()
|
||||||
|
.and_then(|s| album_el.select(&s).next())
|
||||||
|
.and_then(|el| el.value().attr("content").map(|c| c.trim().to_string()))
|
||||||
|
.filter(|s| !s.is_empty());
|
||||||
|
|
||||||
|
// Cover from pre-collected covers (same index)
|
||||||
|
let cover_url = covers.get(idx).cloned();
|
||||||
|
|
||||||
|
books.push(BookCandidate {
|
||||||
|
external_book_id,
|
||||||
|
title,
|
||||||
|
volume_number,
|
||||||
|
authors,
|
||||||
|
isbn,
|
||||||
|
summary: None,
|
||||||
|
cover_url,
|
||||||
|
page_count,
|
||||||
|
language: Some("fr".to_string()),
|
||||||
|
publish_date,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
books.sort_by_key(|b| b.volume_number.unwrap_or(999));
|
||||||
|
Ok(books)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Filter out placeholder author names from Bédéthèque
|
||||||
|
fn is_real_author(name: &str) -> bool {
|
||||||
|
!name.starts_with('<') && !name.ends_with('>') && name != "Collectif"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_volume_from_title(title: &str) -> Option<i32> {
|
||||||
|
let patterns = [
|
||||||
|
r"(?i)(?:tome|t\.)\s*(\d+)",
|
||||||
|
r"(?i)(?:vol(?:ume)?\.?)\s*(\d+)",
|
||||||
|
r"#\s*(\d+)",
|
||||||
|
];
|
||||||
|
for pattern in &patterns {
|
||||||
|
if let Ok(re) = regex::Regex::new(pattern) {
|
||||||
|
if let Some(caps) = re.captures(title) {
|
||||||
|
if let Ok(n) = caps[1].parse::<i32>() {
|
||||||
|
return Some(n);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Normalize a title by removing French articles (leading or in parentheses)
|
||||||
|
/// and extra whitespace/punctuation, so that "Les Légendaires - Résistance"
|
||||||
|
/// and "Légendaires (Les) - Résistance" produce the same canonical form.
|
||||||
|
fn normalize_title(s: &str) -> String {
|
||||||
|
let lower = s.to_lowercase();
|
||||||
|
// Remove articles in parentheses: "(les)", "(la)", "(le)", "(l')", "(un)", "(une)", "(des)"
|
||||||
|
let re_parens = regex::Regex::new(r"\s*\((?:les?|la|l'|une?|des|du|d')\)").unwrap();
|
||||||
|
let cleaned = re_parens.replace_all(&lower, "");
|
||||||
|
// Remove leading articles: "les ", "la ", "le ", "l'", "un ", "une ", "des ", "du ", "d'"
|
||||||
|
let re_leading = regex::Regex::new(r"^(?:les?|la|l'|une?|des|du|d')\s+").unwrap();
|
||||||
|
let cleaned = re_leading.replace(&cleaned, "");
|
||||||
|
// Collapse multiple spaces/dashes into single
|
||||||
|
let re_spaces = regex::Regex::new(r"\s+").unwrap();
|
||||||
|
re_spaces.replace_all(cleaned.trim(), " ").to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_confidence(title: &str, query: &str) -> f32 {
|
||||||
|
let title_lower = title.to_lowercase();
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
if title_lower == query_lower {
|
||||||
|
return 1.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try with normalized forms (handles Bedetheque's "Name (Article)" convention)
|
||||||
|
let title_norm = normalize_title(title);
|
||||||
|
let query_norm = normalize_title(query);
|
||||||
|
if title_norm == query_norm {
|
||||||
|
return 1.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if title_lower.starts_with(&query_lower) || query_lower.starts_with(&title_lower)
|
||||||
|
|| title_norm.starts_with(&query_norm) || query_norm.starts_with(&title_norm)
|
||||||
|
{
|
||||||
|
0.85
|
||||||
|
} else if title_lower.contains(&query_lower) || query_lower.contains(&title_lower)
|
||||||
|
|| title_norm.contains(&query_norm) || query_norm.contains(&title_norm)
|
||||||
|
{
|
||||||
|
0.7
|
||||||
|
} else {
|
||||||
|
let common: usize = query_lower
|
||||||
|
.chars()
|
||||||
|
.filter(|c| title_lower.contains(*c))
|
||||||
|
.count();
|
||||||
|
let max_len = query_lower.len().max(title_lower.len()).max(1);
|
||||||
|
(common as f32 / max_len as f32).clamp(0.1, 0.6)
|
||||||
|
}
|
||||||
|
}
|
||||||
267
apps/api/src/metadata_providers/comicvine.rs
Normal file
267
apps/api/src/metadata_providers/comicvine.rs
Normal file
@@ -0,0 +1,267 @@
|
|||||||
|
use super::{BookCandidate, MetadataProvider, ProviderConfig, SeriesCandidate};
|
||||||
|
|
||||||
|
pub struct ComicVineProvider;
|
||||||
|
|
||||||
|
impl MetadataProvider for ComicVineProvider {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"comicvine"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_series(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<SeriesCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let query = query.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { search_series_impl(&query, &config).await })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_series_books(
|
||||||
|
&self,
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<BookCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let external_id = external_id.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { get_series_books_impl(&external_id, &config).await })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_client() -> Result<reqwest::Client, String> {
|
||||||
|
reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.user_agent("StripstreamLibrarian/1.0")
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn search_series_impl(
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<SeriesCandidate>, String> {
|
||||||
|
let api_key = config
|
||||||
|
.api_key
|
||||||
|
.as_deref()
|
||||||
|
.filter(|k| !k.is_empty())
|
||||||
|
.ok_or_else(|| "ComicVine requires an API key. Configure it in Settings > Integrations.".to_string())?;
|
||||||
|
|
||||||
|
let client = build_client()?;
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://comicvine.gamespot.com/api/search/?api_key={}&format=json&resources=volume&query={}&limit=20",
|
||||||
|
api_key,
|
||||||
|
urlencoded(query)
|
||||||
|
);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("ComicVine request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(format!("ComicVine returned {status}: {text}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: serde_json::Value = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse ComicVine response: {e}"))?;
|
||||||
|
|
||||||
|
let results = match data.get("results").and_then(|r| r.as_array()) {
|
||||||
|
Some(results) => results,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
|
||||||
|
let mut candidates: Vec<SeriesCandidate> = results
|
||||||
|
.iter()
|
||||||
|
.filter_map(|vol| {
|
||||||
|
let name = vol.get("name").and_then(|n| n.as_str())?.to_string();
|
||||||
|
let id = vol.get("id").and_then(|id| id.as_i64())?;
|
||||||
|
let description = vol
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(strip_html);
|
||||||
|
let publisher = vol
|
||||||
|
.get("publisher")
|
||||||
|
.and_then(|p| p.get("name"))
|
||||||
|
.and_then(|n| n.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let start_year = vol
|
||||||
|
.get("start_year")
|
||||||
|
.and_then(|y| y.as_str())
|
||||||
|
.and_then(|y| y.parse::<i32>().ok());
|
||||||
|
let count_of_issues = vol
|
||||||
|
.get("count_of_issues")
|
||||||
|
.and_then(|c| c.as_i64())
|
||||||
|
.map(|c| c as i32);
|
||||||
|
let cover_url = vol
|
||||||
|
.get("image")
|
||||||
|
.and_then(|img| img.get("medium_url").or_else(|| img.get("small_url")))
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let site_url = vol
|
||||||
|
.get("site_detail_url")
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
|
||||||
|
let confidence = compute_confidence(&name, &query_lower);
|
||||||
|
|
||||||
|
Some(SeriesCandidate {
|
||||||
|
external_id: id.to_string(),
|
||||||
|
title: name,
|
||||||
|
authors: vec![],
|
||||||
|
description,
|
||||||
|
publishers: publisher.into_iter().collect(),
|
||||||
|
start_year,
|
||||||
|
total_volumes: count_of_issues,
|
||||||
|
cover_url,
|
||||||
|
external_url: site_url,
|
||||||
|
confidence,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
candidates.sort_by(|a, b| b.confidence.partial_cmp(&a.confidence).unwrap_or(std::cmp::Ordering::Equal));
|
||||||
|
candidates.truncate(10);
|
||||||
|
Ok(candidates)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_series_books_impl(
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<BookCandidate>, String> {
|
||||||
|
let api_key = config
|
||||||
|
.api_key
|
||||||
|
.as_deref()
|
||||||
|
.filter(|k| !k.is_empty())
|
||||||
|
.ok_or_else(|| "ComicVine requires an API key".to_string())?;
|
||||||
|
|
||||||
|
let client = build_client()?;
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://comicvine.gamespot.com/api/issues/?api_key={}&format=json&filter=volume:{}&sort=issue_number:asc&limit=100&field_list=id,name,issue_number,description,image,cover_date,site_detail_url",
|
||||||
|
api_key,
|
||||||
|
external_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("ComicVine request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(format!("ComicVine returned {status}: {text}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: serde_json::Value = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse ComicVine response: {e}"))?;
|
||||||
|
|
||||||
|
let results = match data.get("results").and_then(|r| r.as_array()) {
|
||||||
|
Some(results) => results,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let books: Vec<BookCandidate> = results
|
||||||
|
.iter()
|
||||||
|
.filter_map(|issue| {
|
||||||
|
let id = issue.get("id").and_then(|id| id.as_i64())?;
|
||||||
|
let name = issue
|
||||||
|
.get("name")
|
||||||
|
.and_then(|n| n.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
let issue_number = issue
|
||||||
|
.get("issue_number")
|
||||||
|
.and_then(|n| n.as_str())
|
||||||
|
.and_then(|n| n.parse::<f64>().ok())
|
||||||
|
.map(|n| n as i32);
|
||||||
|
let description = issue
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(strip_html);
|
||||||
|
let cover_url = issue
|
||||||
|
.get("image")
|
||||||
|
.and_then(|img| img.get("medium_url").or_else(|| img.get("small_url")))
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let cover_date = issue
|
||||||
|
.get("cover_date")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
|
||||||
|
Some(BookCandidate {
|
||||||
|
external_book_id: id.to_string(),
|
||||||
|
title: name,
|
||||||
|
volume_number: issue_number,
|
||||||
|
authors: vec![],
|
||||||
|
isbn: None,
|
||||||
|
summary: description,
|
||||||
|
cover_url,
|
||||||
|
page_count: None,
|
||||||
|
language: None,
|
||||||
|
publish_date: cover_date,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(books)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn strip_html(s: &str) -> String {
|
||||||
|
let mut result = String::new();
|
||||||
|
let mut in_tag = false;
|
||||||
|
for ch in s.chars() {
|
||||||
|
match ch {
|
||||||
|
'<' => in_tag = true,
|
||||||
|
'>' => in_tag = false,
|
||||||
|
_ if !in_tag => result.push(ch),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.trim().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_confidence(title: &str, query: &str) -> f32 {
|
||||||
|
let title_lower = title.to_lowercase();
|
||||||
|
if title_lower == query {
|
||||||
|
1.0
|
||||||
|
} else if title_lower.starts_with(query) || query.starts_with(&title_lower) {
|
||||||
|
0.8
|
||||||
|
} else if title_lower.contains(query) || query.contains(&title_lower) {
|
||||||
|
0.7
|
||||||
|
} else {
|
||||||
|
let common: usize = query.chars().filter(|c| title_lower.contains(*c)).count();
|
||||||
|
let max_len = query.len().max(title_lower.len()).max(1);
|
||||||
|
(common as f32 / max_len as f32).clamp(0.1, 0.6)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn urlencoded(s: &str) -> String {
|
||||||
|
let mut result = String::new();
|
||||||
|
for byte in s.bytes() {
|
||||||
|
match byte {
|
||||||
|
b'A'..=b'Z' | b'a'..=b'z' | b'0'..=b'9' | b'-' | b'_' | b'.' | b'~' => {
|
||||||
|
result.push(byte as char);
|
||||||
|
}
|
||||||
|
_ => result.push_str(&format!("%{:02X}", byte)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
472
apps/api/src/metadata_providers/google_books.rs
Normal file
472
apps/api/src/metadata_providers/google_books.rs
Normal file
@@ -0,0 +1,472 @@
|
|||||||
|
use super::{BookCandidate, MetadataProvider, ProviderConfig, SeriesCandidate};
|
||||||
|
|
||||||
|
pub struct GoogleBooksProvider;
|
||||||
|
|
||||||
|
impl MetadataProvider for GoogleBooksProvider {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"google_books"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_series(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<SeriesCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let query = query.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { search_series_impl(&query, &config).await })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_series_books(
|
||||||
|
&self,
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<BookCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let external_id = external_id.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { get_series_books_impl(&external_id, &config).await })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn search_series_impl(
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<SeriesCandidate>, String> {
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
|
||||||
|
|
||||||
|
let search_query = format!("intitle:{}", query);
|
||||||
|
let mut url = format!(
|
||||||
|
"https://www.googleapis.com/books/v1/volumes?q={}&maxResults=20&printType=books&langRestrict={}",
|
||||||
|
urlencoded(&search_query),
|
||||||
|
urlencoded(&config.language),
|
||||||
|
);
|
||||||
|
if let Some(ref key) = config.api_key {
|
||||||
|
url.push_str(&format!("&key={}", key));
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Google Books request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(format!("Google Books returned {status}: {text}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: serde_json::Value = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse Google Books response: {e}"))?;
|
||||||
|
|
||||||
|
let items = match data.get("items").and_then(|i| i.as_array()) {
|
||||||
|
Some(items) => items,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Group volumes by series name to produce series candidates
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
let mut series_map: std::collections::HashMap<String, SeriesCandidateBuilder> =
|
||||||
|
std::collections::HashMap::new();
|
||||||
|
|
||||||
|
for item in items {
|
||||||
|
let volume_info = match item.get("volumeInfo") {
|
||||||
|
Some(vi) => vi,
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
let title = volume_info
|
||||||
|
.get("title")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
let authors: Vec<String> = volume_info
|
||||||
|
.get("authors")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.map(|arr| {
|
||||||
|
arr.iter()
|
||||||
|
.filter_map(|v| v.as_str().map(String::from))
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
let publisher = volume_info
|
||||||
|
.get("publisher")
|
||||||
|
.and_then(|p| p.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let published_date = volume_info
|
||||||
|
.get("publishedDate")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let description = volume_info
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
|
||||||
|
// Extract series info from title or seriesInfo
|
||||||
|
let series_name = volume_info
|
||||||
|
.get("seriesInfo")
|
||||||
|
.and_then(|si| si.get("title"))
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.map(String::from)
|
||||||
|
.unwrap_or_else(|| extract_series_name(&title));
|
||||||
|
|
||||||
|
let cover_url = volume_info
|
||||||
|
.get("imageLinks")
|
||||||
|
.and_then(|il| {
|
||||||
|
il.get("thumbnail")
|
||||||
|
.or_else(|| il.get("smallThumbnail"))
|
||||||
|
})
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(|s| s.replace("http://", "https://"));
|
||||||
|
|
||||||
|
let google_id = item
|
||||||
|
.get("id")
|
||||||
|
.and_then(|id| id.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let entry = series_map
|
||||||
|
.entry(series_name.clone())
|
||||||
|
.or_insert_with(|| SeriesCandidateBuilder {
|
||||||
|
title: series_name.clone(),
|
||||||
|
authors: vec![],
|
||||||
|
description: None,
|
||||||
|
publishers: vec![],
|
||||||
|
start_year: None,
|
||||||
|
volume_count: 0,
|
||||||
|
cover_url: None,
|
||||||
|
external_id: google_id.clone(),
|
||||||
|
external_url: None,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
});
|
||||||
|
|
||||||
|
entry.volume_count += 1;
|
||||||
|
|
||||||
|
// Merge authors
|
||||||
|
for a in &authors {
|
||||||
|
if !entry.authors.contains(a) {
|
||||||
|
entry.authors.push(a.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set description if not yet set
|
||||||
|
if entry.description.is_none() {
|
||||||
|
entry.description = description;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge publisher
|
||||||
|
if let Some(ref pub_name) = publisher {
|
||||||
|
if !entry.publishers.contains(pub_name) {
|
||||||
|
entry.publishers.push(pub_name.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract year
|
||||||
|
if let Some(ref date) = published_date {
|
||||||
|
if let Some(year) = extract_year(date) {
|
||||||
|
if entry.start_year.is_none() || entry.start_year.unwrap() > year {
|
||||||
|
entry.start_year = Some(year);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry.cover_url.is_none() {
|
||||||
|
entry.cover_url = cover_url;
|
||||||
|
}
|
||||||
|
|
||||||
|
entry.external_url = Some(format!(
|
||||||
|
"https://books.google.com/books?id={}",
|
||||||
|
google_id
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut candidates: Vec<SeriesCandidate> = series_map
|
||||||
|
.into_values()
|
||||||
|
.map(|b| {
|
||||||
|
let confidence = compute_confidence(&b.title, &query_lower);
|
||||||
|
SeriesCandidate {
|
||||||
|
external_id: b.external_id,
|
||||||
|
title: b.title,
|
||||||
|
authors: b.authors,
|
||||||
|
description: b.description,
|
||||||
|
publishers: b.publishers,
|
||||||
|
start_year: b.start_year,
|
||||||
|
total_volumes: if b.volume_count > 1 {
|
||||||
|
Some(b.volume_count)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
cover_url: b.cover_url,
|
||||||
|
external_url: b.external_url,
|
||||||
|
confidence,
|
||||||
|
metadata_json: b.metadata_json,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
candidates.sort_by(|a, b| b.confidence.partial_cmp(&a.confidence).unwrap_or(std::cmp::Ordering::Equal));
|
||||||
|
candidates.truncate(10);
|
||||||
|
|
||||||
|
Ok(candidates)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_series_books_impl(
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<BookCandidate>, String> {
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
|
||||||
|
|
||||||
|
// First fetch the volume to get its series info
|
||||||
|
let mut url = format!(
|
||||||
|
"https://www.googleapis.com/books/v1/volumes/{}",
|
||||||
|
external_id
|
||||||
|
);
|
||||||
|
if let Some(ref key) = config.api_key {
|
||||||
|
url.push_str(&format!("?key={}", key));
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Google Books request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(format!("Google Books returned {status}: {text}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let volume: serde_json::Value = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse Google Books response: {e}"))?;
|
||||||
|
|
||||||
|
let volume_info = volume.get("volumeInfo").cloned().unwrap_or(serde_json::json!({}));
|
||||||
|
let title = volume_info
|
||||||
|
.get("title")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.unwrap_or("");
|
||||||
|
|
||||||
|
// Search for more volumes in this series
|
||||||
|
let series_name = extract_series_name(title);
|
||||||
|
let search_query = format!("intitle:{}", series_name);
|
||||||
|
let mut search_url = format!(
|
||||||
|
"https://www.googleapis.com/books/v1/volumes?q={}&maxResults=40&printType=books&langRestrict={}",
|
||||||
|
urlencoded(&search_query),
|
||||||
|
urlencoded(&config.language),
|
||||||
|
);
|
||||||
|
if let Some(ref key) = config.api_key {
|
||||||
|
search_url.push_str(&format!("&key={}", key));
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&search_url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Google Books search failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
// Return just the single volume as a book
|
||||||
|
return Ok(vec![volume_to_book_candidate(&volume)]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: serde_json::Value = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse search response: {e}"))?;
|
||||||
|
|
||||||
|
let items = match data.get("items").and_then(|i| i.as_array()) {
|
||||||
|
Some(items) => items,
|
||||||
|
None => return Ok(vec![volume_to_book_candidate(&volume)]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut books: Vec<BookCandidate> = items
|
||||||
|
.iter()
|
||||||
|
.map(volume_to_book_candidate)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Sort by volume number
|
||||||
|
books.sort_by_key(|b| b.volume_number.unwrap_or(999));
|
||||||
|
|
||||||
|
Ok(books)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn volume_to_book_candidate(item: &serde_json::Value) -> BookCandidate {
|
||||||
|
let volume_info = item.get("volumeInfo").cloned().unwrap_or(serde_json::json!({}));
|
||||||
|
let title = volume_info
|
||||||
|
.get("title")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
let authors: Vec<String> = volume_info
|
||||||
|
.get("authors")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.map(|arr| {
|
||||||
|
arr.iter()
|
||||||
|
.filter_map(|v| v.as_str().map(String::from))
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
let isbn = volume_info
|
||||||
|
.get("industryIdentifiers")
|
||||||
|
.and_then(|ids| ids.as_array())
|
||||||
|
.and_then(|arr| {
|
||||||
|
arr.iter()
|
||||||
|
.find(|id| {
|
||||||
|
id.get("type")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.map(|t| t == "ISBN_13" || t == "ISBN_10")
|
||||||
|
.unwrap_or(false)
|
||||||
|
})
|
||||||
|
.and_then(|id| id.get("identifier").and_then(|i| i.as_str()))
|
||||||
|
})
|
||||||
|
.map(String::from);
|
||||||
|
let summary = volume_info
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let cover_url = volume_info
|
||||||
|
.get("imageLinks")
|
||||||
|
.and_then(|il| il.get("thumbnail").or_else(|| il.get("smallThumbnail")))
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(|s| s.replace("http://", "https://"));
|
||||||
|
let page_count = volume_info
|
||||||
|
.get("pageCount")
|
||||||
|
.and_then(|p| p.as_i64())
|
||||||
|
.map(|p| p as i32);
|
||||||
|
let language = volume_info
|
||||||
|
.get("language")
|
||||||
|
.and_then(|l| l.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let publish_date = volume_info
|
||||||
|
.get("publishedDate")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let google_id = item
|
||||||
|
.get("id")
|
||||||
|
.and_then(|id| id.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
let volume_number = extract_volume_number(&title);
|
||||||
|
|
||||||
|
BookCandidate {
|
||||||
|
external_book_id: google_id,
|
||||||
|
title,
|
||||||
|
volume_number,
|
||||||
|
authors,
|
||||||
|
isbn,
|
||||||
|
summary,
|
||||||
|
cover_url,
|
||||||
|
page_count,
|
||||||
|
language,
|
||||||
|
publish_date,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_series_name(title: &str) -> String {
|
||||||
|
// Remove trailing volume indicators like "Vol. 1", "Tome 2", "#3", "- Volume 1"
|
||||||
|
let re_patterns = [
|
||||||
|
r"(?i)\s*[-–—]\s*(?:vol(?:ume)?\.?\s*|tome\s*|t\.\s*|#)\s*\d+.*$",
|
||||||
|
r"(?i)\s*,?\s*(?:vol(?:ume)?\.?\s*|tome\s*|t\.\s*|#)\s*\d+.*$",
|
||||||
|
r"\s*\(\d+\)\s*$",
|
||||||
|
r"\s+\d+\s*$",
|
||||||
|
];
|
||||||
|
|
||||||
|
let mut result = title.to_string();
|
||||||
|
for pattern in &re_patterns {
|
||||||
|
if let Ok(re) = regex::Regex::new(pattern) {
|
||||||
|
let cleaned = re.replace(&result, "").to_string();
|
||||||
|
if !cleaned.is_empty() {
|
||||||
|
result = cleaned;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result.trim().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_volume_number(title: &str) -> Option<i32> {
|
||||||
|
let patterns = [
|
||||||
|
r"(?i)(?:vol(?:ume)?\.?\s*|tome\s*|t\.\s*|#)\s*(\d+)",
|
||||||
|
r"\((\d+)\)\s*$",
|
||||||
|
r"\b(\d+)\s*$",
|
||||||
|
];
|
||||||
|
|
||||||
|
for pattern in &patterns {
|
||||||
|
if let Ok(re) = regex::Regex::new(pattern) {
|
||||||
|
if let Some(caps) = re.captures(title) {
|
||||||
|
if let Some(num) = caps.get(1).and_then(|m| m.as_str().parse::<i32>().ok()) {
|
||||||
|
return Some(num);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_year(date: &str) -> Option<i32> {
|
||||||
|
date.get(..4).and_then(|s| s.parse::<i32>().ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_confidence(title: &str, query: &str) -> f32 {
|
||||||
|
let title_lower = title.to_lowercase();
|
||||||
|
if title_lower == query {
|
||||||
|
1.0
|
||||||
|
} else if title_lower.starts_with(query) || query.starts_with(&title_lower) {
|
||||||
|
0.8
|
||||||
|
} else if title_lower.contains(query) || query.contains(&title_lower) {
|
||||||
|
0.7
|
||||||
|
} else {
|
||||||
|
// Simple character overlap ratio
|
||||||
|
let common: usize = query
|
||||||
|
.chars()
|
||||||
|
.filter(|c| title_lower.contains(*c))
|
||||||
|
.count();
|
||||||
|
let max_len = query.len().max(title_lower.len()).max(1);
|
||||||
|
(common as f32 / max_len as f32).clamp(0.1, 0.6)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn urlencoded(s: &str) -> String {
|
||||||
|
let mut result = String::new();
|
||||||
|
for byte in s.bytes() {
|
||||||
|
match byte {
|
||||||
|
b'A'..=b'Z' | b'a'..=b'z' | b'0'..=b'9' | b'-' | b'_' | b'.' | b'~' => {
|
||||||
|
result.push(byte as char);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
result.push_str(&format!("%{:02X}", byte));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SeriesCandidateBuilder {
|
||||||
|
title: String,
|
||||||
|
authors: Vec<String>,
|
||||||
|
description: Option<String>,
|
||||||
|
publishers: Vec<String>,
|
||||||
|
start_year: Option<i32>,
|
||||||
|
volume_count: i32,
|
||||||
|
cover_url: Option<String>,
|
||||||
|
external_id: String,
|
||||||
|
external_url: Option<String>,
|
||||||
|
metadata_json: serde_json::Value,
|
||||||
|
}
|
||||||
295
apps/api/src/metadata_providers/mod.rs
Normal file
295
apps/api/src/metadata_providers/mod.rs
Normal file
@@ -0,0 +1,295 @@
|
|||||||
|
pub mod anilist;
|
||||||
|
pub mod bedetheque;
|
||||||
|
pub mod comicvine;
|
||||||
|
pub mod google_books;
|
||||||
|
pub mod open_library;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// Configuration passed to providers (API keys, etc.)
|
||||||
|
#[derive(Debug, Clone, Default)]
|
||||||
|
pub struct ProviderConfig {
|
||||||
|
pub api_key: Option<String>,
|
||||||
|
/// Preferred language for metadata results (ISO 639-1: "en", "fr", "es"). Defaults to "en".
|
||||||
|
pub language: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A candidate series returned by a provider search
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct SeriesCandidate {
|
||||||
|
pub external_id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub authors: Vec<String>,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub publishers: Vec<String>,
|
||||||
|
pub start_year: Option<i32>,
|
||||||
|
pub total_volumes: Option<i32>,
|
||||||
|
pub cover_url: Option<String>,
|
||||||
|
pub external_url: Option<String>,
|
||||||
|
pub confidence: f32,
|
||||||
|
pub metadata_json: serde_json::Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A candidate book within a series
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct BookCandidate {
|
||||||
|
pub external_book_id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub volume_number: Option<i32>,
|
||||||
|
pub authors: Vec<String>,
|
||||||
|
pub isbn: Option<String>,
|
||||||
|
pub summary: Option<String>,
|
||||||
|
pub cover_url: Option<String>,
|
||||||
|
pub page_count: Option<i32>,
|
||||||
|
pub language: Option<String>,
|
||||||
|
pub publish_date: Option<String>,
|
||||||
|
pub metadata_json: serde_json::Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Trait that all metadata providers must implement
|
||||||
|
pub trait MetadataProvider: Send + Sync {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn name(&self) -> &str;
|
||||||
|
|
||||||
|
fn search_series(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<SeriesCandidate>, String>> + Send + '_>,
|
||||||
|
>;
|
||||||
|
|
||||||
|
fn get_series_books(
|
||||||
|
&self,
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<BookCandidate>, String>> + Send + '_>,
|
||||||
|
>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Factory function to get a provider by name
|
||||||
|
pub fn get_provider(name: &str) -> Option<Box<dyn MetadataProvider>> {
|
||||||
|
match name {
|
||||||
|
"google_books" => Some(Box::new(google_books::GoogleBooksProvider)),
|
||||||
|
"open_library" => Some(Box::new(open_library::OpenLibraryProvider)),
|
||||||
|
"comicvine" => Some(Box::new(comicvine::ComicVineProvider)),
|
||||||
|
"anilist" => Some(Box::new(anilist::AniListProvider)),
|
||||||
|
"bedetheque" => Some(Box::new(bedetheque::BedethequeProvider)),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// End-to-end provider tests
|
||||||
|
//
|
||||||
|
// These tests hit real external APIs — run them explicitly with:
|
||||||
|
// cargo test -p api providers_e2e -- --ignored --nocapture
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod providers_e2e {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn config_fr() -> ProviderConfig {
|
||||||
|
ProviderConfig { api_key: None, language: "fr".to_string() }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn config_en() -> ProviderConfig {
|
||||||
|
ProviderConfig { api_key: None, language: "en".to_string() }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_candidate(name: &str, c: &SeriesCandidate) {
|
||||||
|
println!("\n=== {name} — best candidate ===");
|
||||||
|
println!(" title: {:?}", c.title);
|
||||||
|
println!(" external_id: {:?}", c.external_id);
|
||||||
|
println!(" authors: {:?}", c.authors);
|
||||||
|
println!(" description: {:?}", c.description.as_deref().map(|d| &d[..d.len().min(120)]));
|
||||||
|
println!(" publishers: {:?}", c.publishers);
|
||||||
|
println!(" start_year: {:?}", c.start_year);
|
||||||
|
println!(" total_volumes: {:?}", c.total_volumes);
|
||||||
|
println!(" cover_url: {}", c.cover_url.is_some());
|
||||||
|
println!(" external_url: {}", c.external_url.is_some());
|
||||||
|
println!(" confidence: {:.2}", c.confidence);
|
||||||
|
println!(" metadata_json: {}", serde_json::to_string_pretty(&c.metadata_json).unwrap_or_default());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_books(name: &str, books: &[BookCandidate]) {
|
||||||
|
println!("\n=== {name} — {} books ===", books.len());
|
||||||
|
for (i, b) in books.iter().take(5).enumerate() {
|
||||||
|
println!(
|
||||||
|
" [{}] vol={:?} title={:?} authors={} isbn={:?} pages={:?} lang={:?} date={:?} cover={}",
|
||||||
|
i, b.volume_number, b.title, b.authors.len(), b.isbn, b.page_count, b.language, b.publish_date, b.cover_url.is_some()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if books.len() > 5 { println!(" ... and {} more", books.len() - 5); }
|
||||||
|
|
||||||
|
let with_vol = books.iter().filter(|b| b.volume_number.is_some()).count();
|
||||||
|
let with_isbn = books.iter().filter(|b| b.isbn.is_some()).count();
|
||||||
|
let with_authors = books.iter().filter(|b| !b.authors.is_empty()).count();
|
||||||
|
let with_date = books.iter().filter(|b| b.publish_date.is_some()).count();
|
||||||
|
let with_cover = books.iter().filter(|b| b.cover_url.is_some()).count();
|
||||||
|
let with_pages = books.iter().filter(|b| b.page_count.is_some()).count();
|
||||||
|
println!(" --- field coverage ---");
|
||||||
|
println!(" volume_number: {with_vol}/{}", books.len());
|
||||||
|
println!(" isbn: {with_isbn}/{}", books.len());
|
||||||
|
println!(" authors: {with_authors}/{}", books.len());
|
||||||
|
println!(" publish_date: {with_date}/{}", books.len());
|
||||||
|
println!(" cover_url: {with_cover}/{}", books.len());
|
||||||
|
println!(" page_count: {with_pages}/{}", books.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Google Books ---
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn google_books_search_and_books() {
|
||||||
|
let p = get_provider("google_books").unwrap();
|
||||||
|
let cfg = config_en();
|
||||||
|
|
||||||
|
let candidates = p.search_series("Blacksad", &cfg).await.unwrap();
|
||||||
|
assert!(!candidates.is_empty(), "google_books: no results for Blacksad");
|
||||||
|
print_candidate("google_books", &candidates[0]);
|
||||||
|
|
||||||
|
let books = p.get_series_books(&candidates[0].external_id, &cfg).await.unwrap();
|
||||||
|
print_books("google_books", &books);
|
||||||
|
assert!(!books.is_empty(), "google_books: no books returned");
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Open Library ---
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn open_library_search_and_books() {
|
||||||
|
let p = get_provider("open_library").unwrap();
|
||||||
|
let cfg = config_en();
|
||||||
|
|
||||||
|
let candidates = p.search_series("Sandman Neil Gaiman", &cfg).await.unwrap();
|
||||||
|
assert!(!candidates.is_empty(), "open_library: no results for Sandman");
|
||||||
|
print_candidate("open_library", &candidates[0]);
|
||||||
|
|
||||||
|
let books = p.get_series_books(&candidates[0].external_id, &cfg).await.unwrap();
|
||||||
|
print_books("open_library", &books);
|
||||||
|
assert!(!books.is_empty(), "open_library: no books returned");
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- AniList ---
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn anilist_search_finished() {
|
||||||
|
let p = get_provider("anilist").unwrap();
|
||||||
|
let cfg = config_fr();
|
||||||
|
|
||||||
|
let candidates = p.search_series("Death Note", &cfg).await.unwrap();
|
||||||
|
assert!(!candidates.is_empty(), "anilist: no results for Death Note");
|
||||||
|
print_candidate("anilist (finished)", &candidates[0]);
|
||||||
|
|
||||||
|
let best = &candidates[0];
|
||||||
|
assert!(best.total_volumes.is_some(), "anilist: finished series should have total_volumes");
|
||||||
|
assert!(best.description.is_some(), "anilist: should have description");
|
||||||
|
assert!(!best.authors.is_empty(), "anilist: should have authors");
|
||||||
|
|
||||||
|
let status = best.metadata_json.get("status").and_then(|s| s.as_str());
|
||||||
|
assert_eq!(status, Some("FINISHED"), "anilist: Death Note should be FINISHED");
|
||||||
|
|
||||||
|
let books = p.get_series_books(&best.external_id, &cfg).await.unwrap();
|
||||||
|
print_books("anilist (Death Note)", &books);
|
||||||
|
assert!(books.len() >= 12, "anilist: Death Note should have ≥12 volumes, got {}", books.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn anilist_search_ongoing() {
|
||||||
|
let p = get_provider("anilist").unwrap();
|
||||||
|
let cfg = config_fr();
|
||||||
|
|
||||||
|
let candidates = p.search_series("One Piece", &cfg).await.unwrap();
|
||||||
|
assert!(!candidates.is_empty(), "anilist: no results for One Piece");
|
||||||
|
print_candidate("anilist (ongoing)", &candidates[0]);
|
||||||
|
|
||||||
|
let best = &candidates[0];
|
||||||
|
let status = best.metadata_json.get("status").and_then(|s| s.as_str());
|
||||||
|
assert_eq!(status, Some("RELEASING"), "anilist: One Piece should be RELEASING");
|
||||||
|
|
||||||
|
let volume_source = best.metadata_json.get("volume_source").and_then(|s| s.as_str());
|
||||||
|
println!(" volume_source: {:?}", volume_source);
|
||||||
|
println!(" total_volumes: {:?}", best.total_volumes);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Bédéthèque ---
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn bedetheque_search_and_books() {
|
||||||
|
let p = get_provider("bedetheque").unwrap();
|
||||||
|
let cfg = config_fr();
|
||||||
|
|
||||||
|
let candidates = p.search_series("De Cape et de Crocs", &cfg).await.unwrap();
|
||||||
|
assert!(!candidates.is_empty(), "bedetheque: no results");
|
||||||
|
print_candidate("bedetheque", &candidates[0]);
|
||||||
|
|
||||||
|
let best = &candidates[0];
|
||||||
|
assert!(best.description.is_some(), "bedetheque: should have description");
|
||||||
|
assert!(!best.authors.is_empty(), "bedetheque: should have authors");
|
||||||
|
assert!(!best.publishers.is_empty(), "bedetheque: should have publishers");
|
||||||
|
assert!(best.start_year.is_some(), "bedetheque: should have start_year");
|
||||||
|
assert!(best.total_volumes.is_some(), "bedetheque: should have total_volumes");
|
||||||
|
|
||||||
|
// Enriched metadata_json
|
||||||
|
let mj = &best.metadata_json;
|
||||||
|
assert!(mj.get("genres").and_then(|g| g.as_array()).map(|a| !a.is_empty()).unwrap_or(false), "bedetheque: should have genres");
|
||||||
|
assert!(mj.get("status").and_then(|s| s.as_str()).is_some(), "bedetheque: should have status");
|
||||||
|
|
||||||
|
let books = p.get_series_books(&best.external_id, &cfg).await.unwrap();
|
||||||
|
print_books("bedetheque", &books);
|
||||||
|
assert!(books.len() >= 12, "bedetheque: De Cape et de Crocs should have ≥12 volumes, got {}", books.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- ComicVine (needs API key) ---
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn comicvine_no_key() {
|
||||||
|
let p = get_provider("comicvine").unwrap();
|
||||||
|
let cfg = config_en();
|
||||||
|
|
||||||
|
let result = p.search_series("Batman", &cfg).await;
|
||||||
|
println!("\n=== comicvine (no key) ===");
|
||||||
|
match result {
|
||||||
|
Ok(c) => println!(" returned {} candidates (unexpected without key)", c.len()),
|
||||||
|
Err(e) => println!(" expected error: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Cross-provider comparison ---
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn cross_provider_blacksad() {
|
||||||
|
println!("\n{}", "=".repeat(60));
|
||||||
|
println!(" Cross-provider comparison: Blacksad");
|
||||||
|
println!("{}\n", "=".repeat(60));
|
||||||
|
|
||||||
|
let providers: Vec<(&str, ProviderConfig)> = vec![
|
||||||
|
("google_books", config_en()),
|
||||||
|
("open_library", config_en()),
|
||||||
|
("anilist", config_fr()),
|
||||||
|
("bedetheque", config_fr()),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (name, cfg) in &providers {
|
||||||
|
let p = get_provider(name).unwrap();
|
||||||
|
match p.search_series("Blacksad", cfg).await {
|
||||||
|
Ok(candidates) if !candidates.is_empty() => {
|
||||||
|
let b = &candidates[0];
|
||||||
|
println!("[{name}] title={:?} authors={} desc={} pubs={} year={:?} vols={:?} cover={} url={} conf={:.2}",
|
||||||
|
b.title, b.authors.len(), b.description.is_some(), b.publishers.len(),
|
||||||
|
b.start_year, b.total_volumes, b.cover_url.is_some(), b.external_url.is_some(), b.confidence);
|
||||||
|
}
|
||||||
|
Ok(_) => println!("[{name}] no results"),
|
||||||
|
Err(e) => println!("[{name}] error: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
351
apps/api/src/metadata_providers/open_library.rs
Normal file
351
apps/api/src/metadata_providers/open_library.rs
Normal file
@@ -0,0 +1,351 @@
|
|||||||
|
use super::{BookCandidate, MetadataProvider, ProviderConfig, SeriesCandidate};
|
||||||
|
|
||||||
|
pub struct OpenLibraryProvider;
|
||||||
|
|
||||||
|
impl MetadataProvider for OpenLibraryProvider {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"open_library"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_series(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<SeriesCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let query = query.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { search_series_impl(&query, &config).await })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_series_books(
|
||||||
|
&self,
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<BookCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let external_id = external_id.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { get_series_books_impl(&external_id, &config).await })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn search_series_impl(
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<SeriesCandidate>, String> {
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
|
||||||
|
|
||||||
|
// Open Library uses 3-letter language codes
|
||||||
|
let ol_lang = match config.language.as_str() {
|
||||||
|
"fr" => "fre",
|
||||||
|
"es" => "spa",
|
||||||
|
_ => "eng",
|
||||||
|
};
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://openlibrary.org/search.json?title={}&limit=20&language={}",
|
||||||
|
urlencoded(query),
|
||||||
|
ol_lang,
|
||||||
|
);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Open Library request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(format!("Open Library returned {status}: {text}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: serde_json::Value = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse Open Library response: {e}"))?;
|
||||||
|
|
||||||
|
let docs = match data.get("docs").and_then(|d| d.as_array()) {
|
||||||
|
Some(docs) => docs,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
let mut series_map: std::collections::HashMap<String, SeriesCandidateBuilder> =
|
||||||
|
std::collections::HashMap::new();
|
||||||
|
|
||||||
|
for doc in docs {
|
||||||
|
let title = doc
|
||||||
|
.get("title")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
let authors: Vec<String> = doc
|
||||||
|
.get("author_name")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(String::from)).collect())
|
||||||
|
.unwrap_or_default();
|
||||||
|
let publishers: Vec<String> = doc
|
||||||
|
.get("publisher")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.map(|arr| {
|
||||||
|
let mut pubs: Vec<String> = arr.iter().filter_map(|v| v.as_str().map(String::from)).collect();
|
||||||
|
pubs.truncate(3);
|
||||||
|
pubs
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
let first_publish_year = doc
|
||||||
|
.get("first_publish_year")
|
||||||
|
.and_then(|y| y.as_i64())
|
||||||
|
.map(|y| y as i32);
|
||||||
|
let cover_i = doc.get("cover_i").and_then(|c| c.as_i64());
|
||||||
|
let cover_url = cover_i.map(|id| format!("https://covers.openlibrary.org/b/id/{}-M.jpg", id));
|
||||||
|
let key = doc
|
||||||
|
.get("key")
|
||||||
|
.and_then(|k| k.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let series_name = extract_series_name(&title);
|
||||||
|
|
||||||
|
let entry = series_map
|
||||||
|
.entry(series_name.clone())
|
||||||
|
.or_insert_with(|| SeriesCandidateBuilder {
|
||||||
|
title: series_name.clone(),
|
||||||
|
authors: vec![],
|
||||||
|
description: None,
|
||||||
|
publishers: vec![],
|
||||||
|
start_year: None,
|
||||||
|
volume_count: 0,
|
||||||
|
cover_url: None,
|
||||||
|
external_id: key.clone(),
|
||||||
|
external_url: if key.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(format!("https://openlibrary.org{}", key))
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
entry.volume_count += 1;
|
||||||
|
|
||||||
|
for a in &authors {
|
||||||
|
if !entry.authors.contains(a) {
|
||||||
|
entry.authors.push(a.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for p in &publishers {
|
||||||
|
if !entry.publishers.contains(p) {
|
||||||
|
entry.publishers.push(p.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (entry.start_year.is_none() || first_publish_year.is_some_and(|y| entry.start_year.unwrap() > y))
|
||||||
|
&& first_publish_year.is_some()
|
||||||
|
{
|
||||||
|
entry.start_year = first_publish_year;
|
||||||
|
}
|
||||||
|
if entry.cover_url.is_none() {
|
||||||
|
entry.cover_url = cover_url;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut candidates: Vec<SeriesCandidate> = series_map
|
||||||
|
.into_values()
|
||||||
|
.map(|b| {
|
||||||
|
let confidence = compute_confidence(&b.title, &query_lower);
|
||||||
|
SeriesCandidate {
|
||||||
|
external_id: b.external_id,
|
||||||
|
title: b.title,
|
||||||
|
authors: b.authors,
|
||||||
|
description: b.description,
|
||||||
|
publishers: b.publishers,
|
||||||
|
start_year: b.start_year,
|
||||||
|
total_volumes: if b.volume_count > 1 { Some(b.volume_count) } else { None },
|
||||||
|
cover_url: b.cover_url,
|
||||||
|
external_url: b.external_url,
|
||||||
|
confidence,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
candidates.sort_by(|a, b| b.confidence.partial_cmp(&a.confidence).unwrap_or(std::cmp::Ordering::Equal));
|
||||||
|
candidates.truncate(10);
|
||||||
|
Ok(candidates)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_series_books_impl(
|
||||||
|
external_id: &str,
|
||||||
|
_config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<BookCandidate>, String> {
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
|
||||||
|
|
||||||
|
// Fetch the work to get its title for series search
|
||||||
|
let url = format!("https://openlibrary.org{}.json", external_id);
|
||||||
|
let resp = client.get(&url).send().await.map_err(|e| format!("Open Library request failed: {e}"))?;
|
||||||
|
|
||||||
|
let work: serde_json::Value = if resp.status().is_success() {
|
||||||
|
resp.json().await.map_err(|e| format!("Failed to parse response: {e}"))?
|
||||||
|
} else {
|
||||||
|
serde_json::json!({})
|
||||||
|
};
|
||||||
|
|
||||||
|
let title = work.get("title").and_then(|t| t.as_str()).unwrap_or("");
|
||||||
|
let series_name = extract_series_name(title);
|
||||||
|
|
||||||
|
// Search for editions of this series
|
||||||
|
let search_url = format!(
|
||||||
|
"https://openlibrary.org/search.json?title={}&limit=40",
|
||||||
|
urlencoded(&series_name)
|
||||||
|
);
|
||||||
|
let resp = client.get(&search_url).send().await.map_err(|e| format!("Open Library search failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: serde_json::Value = resp.json().await.map_err(|e| format!("Failed to parse response: {e}"))?;
|
||||||
|
let docs = match data.get("docs").and_then(|d| d.as_array()) {
|
||||||
|
Some(docs) => docs,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut books: Vec<BookCandidate> = docs
|
||||||
|
.iter()
|
||||||
|
.map(|doc| {
|
||||||
|
let title = doc.get("title").and_then(|t| t.as_str()).unwrap_or("").to_string();
|
||||||
|
let authors: Vec<String> = doc
|
||||||
|
.get("author_name")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(String::from)).collect())
|
||||||
|
.unwrap_or_default();
|
||||||
|
let isbn = doc
|
||||||
|
.get("isbn")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.and_then(|arr| arr.first())
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let page_count = doc
|
||||||
|
.get("number_of_pages_median")
|
||||||
|
.and_then(|n| n.as_i64())
|
||||||
|
.map(|n| n as i32);
|
||||||
|
let cover_i = doc.get("cover_i").and_then(|c| c.as_i64());
|
||||||
|
let cover_url = cover_i.map(|id| format!("https://covers.openlibrary.org/b/id/{}-M.jpg", id));
|
||||||
|
let language = doc
|
||||||
|
.get("language")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.and_then(|arr| arr.first())
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let publish_date = doc
|
||||||
|
.get("first_publish_year")
|
||||||
|
.and_then(|y| y.as_i64())
|
||||||
|
.map(|y| y.to_string());
|
||||||
|
let key = doc.get("key").and_then(|k| k.as_str()).unwrap_or("").to_string();
|
||||||
|
let volume_number = extract_volume_number(&title);
|
||||||
|
|
||||||
|
BookCandidate {
|
||||||
|
external_book_id: key,
|
||||||
|
title,
|
||||||
|
volume_number,
|
||||||
|
authors,
|
||||||
|
isbn,
|
||||||
|
summary: None,
|
||||||
|
cover_url,
|
||||||
|
page_count,
|
||||||
|
language,
|
||||||
|
publish_date,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
books.sort_by_key(|b| b.volume_number.unwrap_or(999));
|
||||||
|
Ok(books)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_series_name(title: &str) -> String {
|
||||||
|
let re_patterns = [
|
||||||
|
r"(?i)\s*[-–—]\s*(?:vol(?:ume)?\.?\s*|tome\s*|t\.\s*|#)\s*\d+.*$",
|
||||||
|
r"(?i)\s*,?\s*(?:vol(?:ume)?\.?\s*|tome\s*|t\.\s*|#)\s*\d+.*$",
|
||||||
|
r"\s*\(\d+\)\s*$",
|
||||||
|
r"\s+\d+\s*$",
|
||||||
|
];
|
||||||
|
let mut result = title.to_string();
|
||||||
|
for pattern in &re_patterns {
|
||||||
|
if let Ok(re) = regex::Regex::new(pattern) {
|
||||||
|
let cleaned = re.replace(&result, "").to_string();
|
||||||
|
if !cleaned.is_empty() {
|
||||||
|
result = cleaned;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.trim().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_volume_number(title: &str) -> Option<i32> {
|
||||||
|
let patterns = [
|
||||||
|
r"(?i)(?:vol(?:ume)?\.?\s*|tome\s*|t\.\s*|#)\s*(\d+)",
|
||||||
|
r"\((\d+)\)\s*$",
|
||||||
|
r"\b(\d+)\s*$",
|
||||||
|
];
|
||||||
|
for pattern in &patterns {
|
||||||
|
if let Ok(re) = regex::Regex::new(pattern) {
|
||||||
|
if let Some(caps) = re.captures(title) {
|
||||||
|
if let Some(num) = caps.get(1).and_then(|m| m.as_str().parse::<i32>().ok()) {
|
||||||
|
return Some(num);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_confidence(title: &str, query: &str) -> f32 {
|
||||||
|
let title_lower = title.to_lowercase();
|
||||||
|
if title_lower == query {
|
||||||
|
1.0
|
||||||
|
} else if title_lower.starts_with(query) || query.starts_with(&title_lower) {
|
||||||
|
0.8
|
||||||
|
} else if title_lower.contains(query) || query.contains(&title_lower) {
|
||||||
|
0.7
|
||||||
|
} else {
|
||||||
|
let common: usize = query.chars().filter(|c| title_lower.contains(*c)).count();
|
||||||
|
let max_len = query.len().max(title_lower.len()).max(1);
|
||||||
|
(common as f32 / max_len as f32).clamp(0.1, 0.6)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn urlencoded(s: &str) -> String {
|
||||||
|
let mut result = String::new();
|
||||||
|
for byte in s.bytes() {
|
||||||
|
match byte {
|
||||||
|
b'A'..=b'Z' | b'a'..=b'z' | b'0'..=b'9' | b'-' | b'_' | b'.' | b'~' => {
|
||||||
|
result.push(byte as char);
|
||||||
|
}
|
||||||
|
_ => result.push_str(&format!("%{:02X}", byte)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SeriesCandidateBuilder {
|
||||||
|
title: String,
|
||||||
|
authors: Vec<String>,
|
||||||
|
description: Option<String>,
|
||||||
|
publishers: Vec<String>,
|
||||||
|
start_year: Option<i32>,
|
||||||
|
volume_count: i32,
|
||||||
|
cover_url: Option<String>,
|
||||||
|
external_id: String,
|
||||||
|
external_url: Option<String>,
|
||||||
|
}
|
||||||
836
apps/api/src/metadata_refresh.rs
Normal file
836
apps/api/src/metadata_refresh.rs
Normal file
@@ -0,0 +1,836 @@
|
|||||||
|
use axum::{
|
||||||
|
extract::{Path as AxumPath, State},
|
||||||
|
Json,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::{PgPool, Row};
|
||||||
|
use uuid::Uuid;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
use crate::{error::ApiError, metadata_providers, state::AppState};
|
||||||
|
use crate::metadata_batch::{load_provider_config_from_pool, is_job_cancelled, update_progress};
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// DTOs
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct MetadataRefreshRequest {
|
||||||
|
pub library_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A single field change: old → new
|
||||||
|
#[derive(Serialize, Clone)]
|
||||||
|
struct FieldDiff {
|
||||||
|
field: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
old: Option<serde_json::Value>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
new: Option<serde_json::Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Per-book changes
|
||||||
|
#[derive(Serialize, Clone)]
|
||||||
|
struct BookDiff {
|
||||||
|
book_id: String,
|
||||||
|
title: String,
|
||||||
|
volume: Option<i32>,
|
||||||
|
changes: Vec<FieldDiff>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Per-series change report
|
||||||
|
#[derive(Serialize, Clone)]
|
||||||
|
struct SeriesRefreshResult {
|
||||||
|
series_name: String,
|
||||||
|
provider: String,
|
||||||
|
status: String, // "updated", "unchanged", "error"
|
||||||
|
series_changes: Vec<FieldDiff>,
|
||||||
|
book_changes: Vec<BookDiff>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
error: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Response DTO for the report endpoint
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct MetadataRefreshReportDto {
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub job_id: Uuid,
|
||||||
|
pub status: String,
|
||||||
|
pub total_links: i64,
|
||||||
|
pub refreshed: i64,
|
||||||
|
pub unchanged: i64,
|
||||||
|
pub errors: i64,
|
||||||
|
pub changes: serde_json::Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// POST /metadata/refresh — Trigger a metadata refresh job
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/metadata/refresh",
|
||||||
|
tag = "metadata",
|
||||||
|
request_body = MetadataRefreshRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Job created"),
|
||||||
|
(status = 400, description = "Bad request"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn start_refresh(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(body): Json<MetadataRefreshRequest>,
|
||||||
|
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||||
|
let library_id: Uuid = body
|
||||||
|
.library_id
|
||||||
|
.parse()
|
||||||
|
.map_err(|_| ApiError::bad_request("invalid library_id"))?;
|
||||||
|
|
||||||
|
// Verify library exists
|
||||||
|
sqlx::query("SELECT 1 FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::not_found("library not found"))?;
|
||||||
|
|
||||||
|
// Check no existing running metadata_refresh job for this library
|
||||||
|
let existing: Option<Uuid> = sqlx::query_scalar(
|
||||||
|
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'metadata_refresh' AND status IN ('pending', 'running') LIMIT 1",
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(existing_id) = existing {
|
||||||
|
return Ok(Json(serde_json::json!({
|
||||||
|
"id": existing_id.to_string(),
|
||||||
|
"status": "already_running",
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check there are approved links to refresh (only ongoing series)
|
||||||
|
let link_count: i64 = sqlx::query_scalar(
|
||||||
|
r#"
|
||||||
|
SELECT COUNT(*) FROM external_metadata_links eml
|
||||||
|
LEFT JOIN series_metadata sm
|
||||||
|
ON sm.library_id = eml.library_id AND sm.name = eml.series_name
|
||||||
|
WHERE eml.library_id = $1
|
||||||
|
AND eml.status = 'approved'
|
||||||
|
AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled')
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if link_count == 0 {
|
||||||
|
return Err(ApiError::bad_request("No approved metadata links to refresh for this library"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let job_id = Uuid::new_v4();
|
||||||
|
sqlx::query(
|
||||||
|
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'metadata_refresh', 'running', NOW())",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(library_id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Spawn the background processing task (status already 'running' to avoid poller race)
|
||||||
|
let pool = state.pool.clone();
|
||||||
|
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
if let Err(e) = process_metadata_refresh(&pool, job_id, library_id).await {
|
||||||
|
warn!("[METADATA_REFRESH] job {job_id} failed: {e}");
|
||||||
|
let _ = sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(e.to_string())
|
||||||
|
.execute(&pool)
|
||||||
|
.await;
|
||||||
|
notifications::notify(
|
||||||
|
pool.clone(),
|
||||||
|
notifications::NotificationEvent::MetadataRefreshFailed {
|
||||||
|
library_name,
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({
|
||||||
|
"id": job_id.to_string(),
|
||||||
|
"status": "pending",
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// GET /metadata/refresh/:id/report — Refresh report from stats_json
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/metadata/refresh/{id}/report",
|
||||||
|
tag = "metadata",
|
||||||
|
params(("id" = String, Path, description = "Job UUID")),
|
||||||
|
responses(
|
||||||
|
(status = 200, body = MetadataRefreshReportDto),
|
||||||
|
(status = 404, description = "Job not found"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_refresh_report(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
AxumPath(job_id): AxumPath<Uuid>,
|
||||||
|
) -> Result<Json<MetadataRefreshReportDto>, ApiError> {
|
||||||
|
let row = sqlx::query(
|
||||||
|
"SELECT status, stats_json, total_files FROM index_jobs WHERE id = $1 AND type = 'metadata_refresh'",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::not_found("job not found"))?;
|
||||||
|
|
||||||
|
let job_status: String = row.get("status");
|
||||||
|
let stats: Option<serde_json::Value> = row.get("stats_json");
|
||||||
|
let total_files: Option<i32> = row.get("total_files");
|
||||||
|
|
||||||
|
let (refreshed, unchanged, errors, changes) = if let Some(ref s) = stats {
|
||||||
|
(
|
||||||
|
s.get("refreshed").and_then(|v| v.as_i64()).unwrap_or(0),
|
||||||
|
s.get("unchanged").and_then(|v| v.as_i64()).unwrap_or(0),
|
||||||
|
s.get("errors").and_then(|v| v.as_i64()).unwrap_or(0),
|
||||||
|
s.get("changes").cloned().unwrap_or(serde_json::json!([])),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
(0, 0, 0, serde_json::json!([]))
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Json(MetadataRefreshReportDto {
|
||||||
|
job_id,
|
||||||
|
status: job_status,
|
||||||
|
total_links: total_files.unwrap_or(0) as i64,
|
||||||
|
refreshed,
|
||||||
|
unchanged,
|
||||||
|
errors,
|
||||||
|
changes,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Background processing
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
pub(crate) async fn process_metadata_refresh(
|
||||||
|
pool: &PgPool,
|
||||||
|
job_id: Uuid,
|
||||||
|
library_id: Uuid,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
// Set job to running
|
||||||
|
sqlx::query("UPDATE index_jobs SET status = 'running', started_at = NOW() WHERE id = $1")
|
||||||
|
.bind(job_id)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
// Get approved links for this library, only for ongoing series (not ended/cancelled)
|
||||||
|
let links: Vec<(Uuid, String, String, String)> = sqlx::query_as(
|
||||||
|
r#"
|
||||||
|
SELECT eml.id, eml.series_name, eml.provider, eml.external_id
|
||||||
|
FROM external_metadata_links eml
|
||||||
|
LEFT JOIN series_metadata sm
|
||||||
|
ON sm.library_id = eml.library_id AND sm.name = eml.series_name
|
||||||
|
WHERE eml.library_id = $1
|
||||||
|
AND eml.status = 'approved'
|
||||||
|
AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled')
|
||||||
|
ORDER BY eml.series_name
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let total = links.len() as i32;
|
||||||
|
sqlx::query("UPDATE index_jobs SET total_files = $2 WHERE id = $1")
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(total)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let mut processed = 0i32;
|
||||||
|
let mut refreshed = 0i32;
|
||||||
|
let mut unchanged = 0i32;
|
||||||
|
let mut errors = 0i32;
|
||||||
|
let mut all_results: Vec<SeriesRefreshResult> = Vec::new();
|
||||||
|
|
||||||
|
for (link_id, series_name, provider_name, external_id) in &links {
|
||||||
|
// Check cancellation
|
||||||
|
if is_job_cancelled(pool, job_id).await {
|
||||||
|
sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'cancelled', finished_at = NOW() WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
match refresh_link(pool, *link_id, library_id, series_name, provider_name, external_id).await {
|
||||||
|
Ok(result) => {
|
||||||
|
if result.status == "updated" {
|
||||||
|
refreshed += 1;
|
||||||
|
info!("[METADATA_REFRESH] job={job_id} updated series='{series_name}' via {provider_name}");
|
||||||
|
} else {
|
||||||
|
unchanged += 1;
|
||||||
|
}
|
||||||
|
all_results.push(result);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
errors += 1;
|
||||||
|
warn!("[METADATA_REFRESH] job={job_id} error on series='{series_name}': {e}");
|
||||||
|
all_results.push(SeriesRefreshResult {
|
||||||
|
series_name: series_name.clone(),
|
||||||
|
provider: provider_name.clone(),
|
||||||
|
status: "error".to_string(),
|
||||||
|
series_changes: vec![],
|
||||||
|
book_changes: vec![],
|
||||||
|
error: Some(e),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
processed += 1;
|
||||||
|
update_progress(pool, job_id, processed, total, series_name).await;
|
||||||
|
|
||||||
|
// Rate limit: 1s delay between provider calls
|
||||||
|
tokio::time::sleep(std::time::Duration::from_millis(1000)).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only keep series that have changes or errors (filter out "unchanged")
|
||||||
|
let changes_only: Vec<&SeriesRefreshResult> = all_results
|
||||||
|
.iter()
|
||||||
|
.filter(|r| r.status != "unchanged")
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Build stats summary
|
||||||
|
let stats = serde_json::json!({
|
||||||
|
"total_links": total,
|
||||||
|
"refreshed": refreshed,
|
||||||
|
"unchanged": unchanged,
|
||||||
|
"errors": errors,
|
||||||
|
"changes": changes_only,
|
||||||
|
});
|
||||||
|
|
||||||
|
sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), progress_percent = 100, stats_json = $2 WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(stats)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
info!("[METADATA_REFRESH] job={job_id} completed: {refreshed} updated, {unchanged} unchanged, {errors} errors");
|
||||||
|
|
||||||
|
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
notifications::notify(
|
||||||
|
pool.clone(),
|
||||||
|
notifications::NotificationEvent::MetadataRefreshCompleted {
|
||||||
|
library_name,
|
||||||
|
refreshed,
|
||||||
|
unchanged,
|
||||||
|
errors,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Refresh a single approved metadata link: re-fetch from provider, compare, sync, return diff
|
||||||
|
async fn refresh_link(
|
||||||
|
pool: &PgPool,
|
||||||
|
link_id: Uuid,
|
||||||
|
library_id: Uuid,
|
||||||
|
series_name: &str,
|
||||||
|
provider_name: &str,
|
||||||
|
external_id: &str,
|
||||||
|
) -> Result<SeriesRefreshResult, String> {
|
||||||
|
let provider = metadata_providers::get_provider(provider_name)
|
||||||
|
.ok_or_else(|| format!("Unknown provider: {provider_name}"))?;
|
||||||
|
|
||||||
|
let config = load_provider_config_from_pool(pool, provider_name).await;
|
||||||
|
|
||||||
|
let mut series_changes: Vec<FieldDiff> = Vec::new();
|
||||||
|
let mut book_changes: Vec<BookDiff> = Vec::new();
|
||||||
|
|
||||||
|
// ── Series-level refresh ──────────────────────────────────────────────
|
||||||
|
let candidates = provider
|
||||||
|
.search_series(series_name, &config)
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("provider search error: {e}"))?;
|
||||||
|
|
||||||
|
let candidate = candidates
|
||||||
|
.iter()
|
||||||
|
.find(|c| c.external_id == external_id)
|
||||||
|
.or_else(|| candidates.first());
|
||||||
|
|
||||||
|
if let Some(candidate) = candidate {
|
||||||
|
// Update link metadata_json
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
UPDATE external_metadata_links
|
||||||
|
SET metadata_json = $2,
|
||||||
|
total_volumes_external = $3,
|
||||||
|
updated_at = NOW()
|
||||||
|
WHERE id = $1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(link_id)
|
||||||
|
.bind(&candidate.metadata_json)
|
||||||
|
.bind(candidate.total_volumes)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
// Diff + sync series metadata
|
||||||
|
series_changes = sync_series_with_diff(pool, library_id, series_name, candidate).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Book-level refresh ────────────────────────────────────────────────
|
||||||
|
let books = provider
|
||||||
|
.get_series_books(external_id, &config)
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("provider books error: {e}"))?;
|
||||||
|
|
||||||
|
// Delete existing external_book_metadata for this link
|
||||||
|
sqlx::query("DELETE FROM external_book_metadata WHERE link_id = $1")
|
||||||
|
.bind(link_id)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
// Pre-fetch local books
|
||||||
|
let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as(
|
||||||
|
r#"
|
||||||
|
SELECT id, volume, title FROM books
|
||||||
|
WHERE library_id = $1
|
||||||
|
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2
|
||||||
|
ORDER BY volume NULLS LAST,
|
||||||
|
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''),
|
||||||
|
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
|
||||||
|
title ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.bind(series_name)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let local_books_with_pos: Vec<(Uuid, i32, String)> = local_books
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(idx, (id, vol, title))| (*id, vol.unwrap_or((idx + 1) as i32), title.clone()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut matched_local_ids = std::collections::HashSet::new();
|
||||||
|
|
||||||
|
for (ext_idx, book) in books.iter().enumerate() {
|
||||||
|
let ext_vol = book.volume_number.unwrap_or((ext_idx + 1) as i32);
|
||||||
|
|
||||||
|
// Match by volume number
|
||||||
|
let mut local_book_id: Option<Uuid> = local_books_with_pos
|
||||||
|
.iter()
|
||||||
|
.find(|(id, v, _)| *v == ext_vol && !matched_local_ids.contains(id))
|
||||||
|
.map(|(id, _, _)| *id);
|
||||||
|
|
||||||
|
// Match by title containment
|
||||||
|
if local_book_id.is_none() {
|
||||||
|
let ext_title_lower = book.title.to_lowercase();
|
||||||
|
local_book_id = local_books_with_pos
|
||||||
|
.iter()
|
||||||
|
.find(|(id, _, local_title)| {
|
||||||
|
if matched_local_ids.contains(id) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
let local_lower = local_title.to_lowercase();
|
||||||
|
local_lower.contains(&ext_title_lower) || ext_title_lower.contains(&local_lower)
|
||||||
|
})
|
||||||
|
.map(|(id, _, _)| *id);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(id) = local_book_id {
|
||||||
|
matched_local_ids.insert(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert external_book_metadata
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO external_book_metadata
|
||||||
|
(link_id, book_id, external_book_id, volume_number, title, authors, isbn, summary, cover_url, page_count, language, publish_date, metadata_json)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(link_id)
|
||||||
|
.bind(local_book_id)
|
||||||
|
.bind(&book.external_book_id)
|
||||||
|
.bind(book.volume_number)
|
||||||
|
.bind(&book.title)
|
||||||
|
.bind(&book.authors)
|
||||||
|
.bind(&book.isbn)
|
||||||
|
.bind(&book.summary)
|
||||||
|
.bind(&book.cover_url)
|
||||||
|
.bind(book.page_count)
|
||||||
|
.bind(&book.language)
|
||||||
|
.bind(&book.publish_date)
|
||||||
|
.bind(&book.metadata_json)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
// Diff + push metadata to matched local book
|
||||||
|
if let Some(book_id) = local_book_id {
|
||||||
|
let diffs = sync_book_with_diff(pool, book_id, book).await?;
|
||||||
|
if !diffs.is_empty() {
|
||||||
|
let local_title = local_books_with_pos
|
||||||
|
.iter()
|
||||||
|
.find(|(id, _, _)| *id == book_id)
|
||||||
|
.map(|(_, _, t)| t.clone())
|
||||||
|
.unwrap_or_default();
|
||||||
|
book_changes.push(BookDiff {
|
||||||
|
book_id: book_id.to_string(),
|
||||||
|
title: local_title,
|
||||||
|
volume: book.volume_number,
|
||||||
|
changes: diffs,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update synced_at on the link
|
||||||
|
sqlx::query("UPDATE external_metadata_links SET synced_at = NOW(), updated_at = NOW() WHERE id = $1")
|
||||||
|
.bind(link_id)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let has_changes = !series_changes.is_empty() || !book_changes.is_empty();
|
||||||
|
|
||||||
|
Ok(SeriesRefreshResult {
|
||||||
|
series_name: series_name.to_string(),
|
||||||
|
provider: provider_name.to_string(),
|
||||||
|
status: if has_changes { "updated".to_string() } else { "unchanged".to_string() },
|
||||||
|
series_changes,
|
||||||
|
book_changes,
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Diff helpers
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Compare old/new for a nullable string field. Returns Some(FieldDiff) only if value actually changed.
|
||||||
|
fn diff_opt_str(field: &str, old: Option<&str>, new: Option<&str>) -> Option<FieldDiff> {
|
||||||
|
let new_val = new.filter(|s| !s.is_empty());
|
||||||
|
// Only report a change if there is a new non-empty value AND it differs from old
|
||||||
|
match (old, new_val) {
|
||||||
|
(Some(o), Some(n)) if o != n => Some(FieldDiff {
|
||||||
|
field: field.to_string(),
|
||||||
|
old: Some(serde_json::Value::String(o.to_string())),
|
||||||
|
new: Some(serde_json::Value::String(n.to_string())),
|
||||||
|
}),
|
||||||
|
(None, Some(n)) => Some(FieldDiff {
|
||||||
|
field: field.to_string(),
|
||||||
|
old: None,
|
||||||
|
new: Some(serde_json::Value::String(n.to_string())),
|
||||||
|
}),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn diff_opt_i32(field: &str, old: Option<i32>, new: Option<i32>) -> Option<FieldDiff> {
|
||||||
|
match (old, new) {
|
||||||
|
(Some(o), Some(n)) if o != n => Some(FieldDiff {
|
||||||
|
field: field.to_string(),
|
||||||
|
old: Some(serde_json::json!(o)),
|
||||||
|
new: Some(serde_json::json!(n)),
|
||||||
|
}),
|
||||||
|
(None, Some(n)) => Some(FieldDiff {
|
||||||
|
field: field.to_string(),
|
||||||
|
old: None,
|
||||||
|
new: Some(serde_json::json!(n)),
|
||||||
|
}),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn diff_str_vec(field: &str, old: &[String], new: &[String]) -> Option<FieldDiff> {
|
||||||
|
if new.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
if old != new {
|
||||||
|
Some(FieldDiff {
|
||||||
|
field: field.to_string(),
|
||||||
|
old: Some(serde_json::json!(old)),
|
||||||
|
new: Some(serde_json::json!(new)),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Series sync with diff tracking
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async fn sync_series_with_diff(
|
||||||
|
pool: &PgPool,
|
||||||
|
library_id: Uuid,
|
||||||
|
series_name: &str,
|
||||||
|
candidate: &metadata_providers::SeriesCandidate,
|
||||||
|
) -> Result<Vec<FieldDiff>, String> {
|
||||||
|
let new_description = candidate.metadata_json
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.or(candidate.description.as_deref());
|
||||||
|
let new_authors = &candidate.authors;
|
||||||
|
let new_publishers = &candidate.publishers;
|
||||||
|
let new_start_year = candidate.start_year;
|
||||||
|
let new_total_volumes = candidate.total_volumes;
|
||||||
|
let new_status = if let Some(raw) = candidate.metadata_json.get("status").and_then(|s| s.as_str()) {
|
||||||
|
Some(crate::metadata::normalize_series_status(pool, raw).await)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
let new_status = new_status.as_deref();
|
||||||
|
|
||||||
|
// Fetch existing series metadata for diffing
|
||||||
|
let existing = sqlx::query(
|
||||||
|
r#"SELECT description, publishers, start_year, total_volumes, status, authors, locked_fields
|
||||||
|
FROM series_metadata WHERE library_id = $1 AND name = $2"#,
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.bind(series_name)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let locked = existing
|
||||||
|
.as_ref()
|
||||||
|
.map(|r| r.get::<serde_json::Value, _>("locked_fields"))
|
||||||
|
.unwrap_or(serde_json::json!({}));
|
||||||
|
let is_locked = |field: &str| -> bool {
|
||||||
|
locked.get(field).and_then(|v| v.as_bool()).unwrap_or(false)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build diffs (only for unlocked fields that actually change)
|
||||||
|
let mut diffs: Vec<FieldDiff> = Vec::new();
|
||||||
|
|
||||||
|
if !is_locked("description") {
|
||||||
|
let old_desc: Option<String> = existing.as_ref().and_then(|r| r.get("description"));
|
||||||
|
if let Some(d) = diff_opt_str("description", old_desc.as_deref(), new_description) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("authors") {
|
||||||
|
let old_authors: Vec<String> = existing.as_ref().map(|r| r.get("authors")).unwrap_or_default();
|
||||||
|
if let Some(d) = diff_str_vec("authors", &old_authors, new_authors) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("publishers") {
|
||||||
|
let old_publishers: Vec<String> = existing.as_ref().map(|r| r.get("publishers")).unwrap_or_default();
|
||||||
|
if let Some(d) = diff_str_vec("publishers", &old_publishers, new_publishers) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("start_year") {
|
||||||
|
let old_year: Option<i32> = existing.as_ref().and_then(|r| r.get("start_year"));
|
||||||
|
if let Some(d) = diff_opt_i32("start_year", old_year, new_start_year) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("total_volumes") {
|
||||||
|
let old_vols: Option<i32> = existing.as_ref().and_then(|r| r.get("total_volumes"));
|
||||||
|
if let Some(d) = diff_opt_i32("total_volumes", old_vols, new_total_volumes) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("status") {
|
||||||
|
let old_status: Option<String> = existing.as_ref().and_then(|r| r.get("status"));
|
||||||
|
if let Some(d) = diff_opt_str("status", old_status.as_deref(), new_status) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now do the actual upsert
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO series_metadata (library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
|
||||||
|
ON CONFLICT (library_id, name)
|
||||||
|
DO UPDATE SET
|
||||||
|
description = CASE
|
||||||
|
WHEN (series_metadata.locked_fields->>'description')::boolean IS TRUE THEN series_metadata.description
|
||||||
|
ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series_metadata.description)
|
||||||
|
END,
|
||||||
|
publishers = CASE
|
||||||
|
WHEN (series_metadata.locked_fields->>'publishers')::boolean IS TRUE THEN series_metadata.publishers
|
||||||
|
WHEN array_length(EXCLUDED.publishers, 1) > 0 THEN EXCLUDED.publishers
|
||||||
|
ELSE series_metadata.publishers
|
||||||
|
END,
|
||||||
|
start_year = CASE
|
||||||
|
WHEN (series_metadata.locked_fields->>'start_year')::boolean IS TRUE THEN series_metadata.start_year
|
||||||
|
ELSE COALESCE(EXCLUDED.start_year, series_metadata.start_year)
|
||||||
|
END,
|
||||||
|
total_volumes = CASE
|
||||||
|
WHEN (series_metadata.locked_fields->>'total_volumes')::boolean IS TRUE THEN series_metadata.total_volumes
|
||||||
|
ELSE COALESCE(EXCLUDED.total_volumes, series_metadata.total_volumes)
|
||||||
|
END,
|
||||||
|
status = CASE
|
||||||
|
WHEN (series_metadata.locked_fields->>'status')::boolean IS TRUE THEN series_metadata.status
|
||||||
|
ELSE COALESCE(EXCLUDED.status, series_metadata.status)
|
||||||
|
END,
|
||||||
|
authors = CASE
|
||||||
|
WHEN (series_metadata.locked_fields->>'authors')::boolean IS TRUE THEN series_metadata.authors
|
||||||
|
WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors
|
||||||
|
ELSE series_metadata.authors
|
||||||
|
END,
|
||||||
|
updated_at = NOW()
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.bind(series_name)
|
||||||
|
.bind(new_description)
|
||||||
|
.bind(new_publishers)
|
||||||
|
.bind(new_start_year)
|
||||||
|
.bind(new_total_volumes)
|
||||||
|
.bind(new_status)
|
||||||
|
.bind(new_authors)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
Ok(diffs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Book sync with diff tracking
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async fn sync_book_with_diff(
|
||||||
|
pool: &PgPool,
|
||||||
|
book_id: Uuid,
|
||||||
|
ext_book: &metadata_providers::BookCandidate,
|
||||||
|
) -> Result<Vec<FieldDiff>, String> {
|
||||||
|
// Fetch current book state
|
||||||
|
let current = sqlx::query(
|
||||||
|
"SELECT summary, isbn, publish_date, language, authors, locked_fields FROM books WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(book_id)
|
||||||
|
.fetch_one(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let locked = current.get::<serde_json::Value, _>("locked_fields");
|
||||||
|
let is_locked = |field: &str| -> bool {
|
||||||
|
locked.get(field).and_then(|v| v.as_bool()).unwrap_or(false)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build diffs
|
||||||
|
let mut diffs: Vec<FieldDiff> = Vec::new();
|
||||||
|
|
||||||
|
if !is_locked("summary") {
|
||||||
|
let old: Option<String> = current.get("summary");
|
||||||
|
if let Some(d) = diff_opt_str("summary", old.as_deref(), ext_book.summary.as_deref()) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("isbn") {
|
||||||
|
let old: Option<String> = current.get("isbn");
|
||||||
|
if let Some(d) = diff_opt_str("isbn", old.as_deref(), ext_book.isbn.as_deref()) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("publish_date") {
|
||||||
|
let old: Option<String> = current.get("publish_date");
|
||||||
|
if let Some(d) = diff_opt_str("publish_date", old.as_deref(), ext_book.publish_date.as_deref()) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("language") {
|
||||||
|
let old: Option<String> = current.get("language");
|
||||||
|
if let Some(d) = diff_opt_str("language", old.as_deref(), ext_book.language.as_deref()) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("authors") {
|
||||||
|
let old: Vec<String> = current.get("authors");
|
||||||
|
if let Some(d) = diff_str_vec("authors", &old, &ext_book.authors) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Do the actual update
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
UPDATE books SET
|
||||||
|
summary = CASE
|
||||||
|
WHEN (locked_fields->>'summary')::boolean IS TRUE THEN summary
|
||||||
|
ELSE COALESCE(NULLIF($2, ''), summary)
|
||||||
|
END,
|
||||||
|
isbn = CASE
|
||||||
|
WHEN (locked_fields->>'isbn')::boolean IS TRUE THEN isbn
|
||||||
|
ELSE COALESCE(NULLIF($3, ''), isbn)
|
||||||
|
END,
|
||||||
|
publish_date = CASE
|
||||||
|
WHEN (locked_fields->>'publish_date')::boolean IS TRUE THEN publish_date
|
||||||
|
ELSE COALESCE(NULLIF($4, ''), publish_date)
|
||||||
|
END,
|
||||||
|
language = CASE
|
||||||
|
WHEN (locked_fields->>'language')::boolean IS TRUE THEN language
|
||||||
|
ELSE COALESCE(NULLIF($5, ''), language)
|
||||||
|
END,
|
||||||
|
authors = CASE
|
||||||
|
WHEN (locked_fields->>'authors')::boolean IS TRUE THEN authors
|
||||||
|
WHEN CARDINALITY($6::text[]) > 0 THEN $6
|
||||||
|
ELSE authors
|
||||||
|
END,
|
||||||
|
author = CASE
|
||||||
|
WHEN (locked_fields->>'authors')::boolean IS TRUE THEN author
|
||||||
|
WHEN CARDINALITY($6::text[]) > 0 THEN $6[1]
|
||||||
|
ELSE author
|
||||||
|
END,
|
||||||
|
updated_at = NOW()
|
||||||
|
WHERE id = $1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(book_id)
|
||||||
|
.bind(&ext_book.summary)
|
||||||
|
.bind(&ext_book.isbn)
|
||||||
|
.bind(&ext_book.publish_date)
|
||||||
|
.bind(&ext_book.language)
|
||||||
|
.bind(&ext_book.authors)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
Ok(diffs)
|
||||||
|
}
|
||||||
@@ -8,9 +8,16 @@ use utoipa::OpenApi;
|
|||||||
crate::books::get_book,
|
crate::books::get_book,
|
||||||
crate::reading_progress::get_reading_progress,
|
crate::reading_progress::get_reading_progress,
|
||||||
crate::reading_progress::update_reading_progress,
|
crate::reading_progress::update_reading_progress,
|
||||||
|
crate::reading_progress::mark_series_read,
|
||||||
crate::books::get_thumbnail,
|
crate::books::get_thumbnail,
|
||||||
crate::books::list_series,
|
crate::series::list_series,
|
||||||
|
crate::series::list_all_series,
|
||||||
|
crate::series::ongoing_series,
|
||||||
|
crate::series::ongoing_books,
|
||||||
crate::books::convert_book,
|
crate::books::convert_book,
|
||||||
|
crate::books::update_book,
|
||||||
|
crate::series::get_series_metadata,
|
||||||
|
crate::series::update_series,
|
||||||
crate::pages::get_page,
|
crate::pages::get_page,
|
||||||
crate::search::search_books,
|
crate::search::search_books,
|
||||||
crate::index_jobs::enqueue_rebuild,
|
crate::index_jobs::enqueue_rebuild,
|
||||||
@@ -28,15 +35,43 @@ use utoipa::OpenApi;
|
|||||||
crate::libraries::delete_library,
|
crate::libraries::delete_library,
|
||||||
crate::libraries::scan_library,
|
crate::libraries::scan_library,
|
||||||
crate::libraries::update_monitoring,
|
crate::libraries::update_monitoring,
|
||||||
|
crate::libraries::update_metadata_provider,
|
||||||
crate::tokens::list_tokens,
|
crate::tokens::list_tokens,
|
||||||
crate::tokens::create_token,
|
crate::tokens::create_token,
|
||||||
crate::tokens::revoke_token,
|
crate::tokens::revoke_token,
|
||||||
|
crate::tokens::delete_token,
|
||||||
|
crate::authors::list_authors,
|
||||||
|
crate::stats::get_stats,
|
||||||
crate::settings::get_settings,
|
crate::settings::get_settings,
|
||||||
crate::settings::get_setting,
|
crate::settings::get_setting,
|
||||||
crate::settings::update_setting,
|
crate::settings::update_setting,
|
||||||
crate::settings::clear_cache,
|
crate::settings::clear_cache,
|
||||||
crate::settings::get_cache_stats,
|
crate::settings::get_cache_stats,
|
||||||
crate::settings::get_thumbnail_stats,
|
crate::settings::get_thumbnail_stats,
|
||||||
|
crate::metadata::search_metadata,
|
||||||
|
crate::metadata::create_metadata_match,
|
||||||
|
crate::metadata::approve_metadata,
|
||||||
|
crate::metadata::reject_metadata,
|
||||||
|
crate::metadata::get_metadata_links,
|
||||||
|
crate::metadata::get_missing_books,
|
||||||
|
crate::metadata::delete_metadata_link,
|
||||||
|
crate::series::series_statuses,
|
||||||
|
crate::series::provider_statuses,
|
||||||
|
crate::settings::list_status_mappings,
|
||||||
|
crate::settings::upsert_status_mapping,
|
||||||
|
crate::settings::delete_status_mapping,
|
||||||
|
crate::prowlarr::search_prowlarr,
|
||||||
|
crate::prowlarr::test_prowlarr,
|
||||||
|
crate::qbittorrent::add_torrent,
|
||||||
|
crate::qbittorrent::test_qbittorrent,
|
||||||
|
crate::metadata_batch::start_batch,
|
||||||
|
crate::metadata_batch::get_batch_report,
|
||||||
|
crate::metadata_batch::get_batch_results,
|
||||||
|
crate::metadata_refresh::start_refresh,
|
||||||
|
crate::metadata_refresh::get_refresh_report,
|
||||||
|
crate::komga::sync_komga_read_books,
|
||||||
|
crate::komga::list_sync_reports,
|
||||||
|
crate::komga::get_sync_report,
|
||||||
),
|
),
|
||||||
components(
|
components(
|
||||||
schemas(
|
schemas(
|
||||||
@@ -46,8 +81,16 @@ use utoipa::OpenApi;
|
|||||||
crate::books::BookDetails,
|
crate::books::BookDetails,
|
||||||
crate::reading_progress::ReadingProgressResponse,
|
crate::reading_progress::ReadingProgressResponse,
|
||||||
crate::reading_progress::UpdateReadingProgressRequest,
|
crate::reading_progress::UpdateReadingProgressRequest,
|
||||||
crate::books::SeriesItem,
|
crate::reading_progress::MarkSeriesReadRequest,
|
||||||
crate::books::SeriesPage,
|
crate::reading_progress::MarkSeriesReadResponse,
|
||||||
|
crate::series::SeriesItem,
|
||||||
|
crate::series::SeriesPage,
|
||||||
|
crate::series::ListAllSeriesQuery,
|
||||||
|
crate::series::OngoingQuery,
|
||||||
|
crate::books::UpdateBookRequest,
|
||||||
|
crate::series::SeriesMetadata,
|
||||||
|
crate::series::UpdateSeriesRequest,
|
||||||
|
crate::series::UpdateSeriesResponse,
|
||||||
crate::pages::PageQuery,
|
crate::pages::PageQuery,
|
||||||
crate::search::SearchQuery,
|
crate::search::SearchQuery,
|
||||||
crate::search::SearchResponse,
|
crate::search::SearchResponse,
|
||||||
@@ -62,6 +105,7 @@ use utoipa::OpenApi;
|
|||||||
crate::libraries::LibraryResponse,
|
crate::libraries::LibraryResponse,
|
||||||
crate::libraries::CreateLibraryRequest,
|
crate::libraries::CreateLibraryRequest,
|
||||||
crate::libraries::UpdateMonitoringRequest,
|
crate::libraries::UpdateMonitoringRequest,
|
||||||
|
crate::libraries::UpdateMetadataProviderRequest,
|
||||||
crate::tokens::CreateTokenRequest,
|
crate::tokens::CreateTokenRequest,
|
||||||
crate::tokens::TokenResponse,
|
crate::tokens::TokenResponse,
|
||||||
crate::tokens::CreatedTokenResponse,
|
crate::tokens::CreatedTokenResponse,
|
||||||
@@ -69,6 +113,50 @@ use utoipa::OpenApi;
|
|||||||
crate::settings::ClearCacheResponse,
|
crate::settings::ClearCacheResponse,
|
||||||
crate::settings::CacheStats,
|
crate::settings::CacheStats,
|
||||||
crate::settings::ThumbnailStats,
|
crate::settings::ThumbnailStats,
|
||||||
|
crate::settings::StatusMappingDto,
|
||||||
|
crate::settings::UpsertStatusMappingRequest,
|
||||||
|
crate::authors::ListAuthorsQuery,
|
||||||
|
crate::authors::AuthorItem,
|
||||||
|
crate::authors::AuthorsPageResponse,
|
||||||
|
crate::stats::StatsResponse,
|
||||||
|
crate::stats::StatsOverview,
|
||||||
|
crate::stats::ReadingStatusStats,
|
||||||
|
crate::stats::FormatCount,
|
||||||
|
crate::stats::LanguageCount,
|
||||||
|
crate::stats::LibraryStats,
|
||||||
|
crate::stats::TopSeries,
|
||||||
|
crate::stats::MonthlyAdditions,
|
||||||
|
crate::stats::MetadataStats,
|
||||||
|
crate::stats::ProviderCount,
|
||||||
|
crate::metadata::ApproveRequest,
|
||||||
|
crate::metadata::ApproveResponse,
|
||||||
|
crate::metadata::SyncReport,
|
||||||
|
crate::metadata::SeriesSyncReport,
|
||||||
|
crate::metadata::BookSyncReport,
|
||||||
|
crate::metadata::FieldChange,
|
||||||
|
crate::metadata::MetadataSearchRequest,
|
||||||
|
crate::metadata::SeriesCandidateDto,
|
||||||
|
crate::metadata::MetadataMatchRequest,
|
||||||
|
crate::metadata::ExternalMetadataLinkDto,
|
||||||
|
crate::metadata::MissingBooksDto,
|
||||||
|
crate::metadata::MissingBookItem,
|
||||||
|
crate::qbittorrent::QBittorrentAddRequest,
|
||||||
|
crate::qbittorrent::QBittorrentAddResponse,
|
||||||
|
crate::qbittorrent::QBittorrentTestResponse,
|
||||||
|
crate::prowlarr::ProwlarrSearchRequest,
|
||||||
|
crate::prowlarr::ProwlarrRelease,
|
||||||
|
crate::prowlarr::ProwlarrCategory,
|
||||||
|
crate::prowlarr::ProwlarrSearchResponse,
|
||||||
|
crate::prowlarr::MissingVolumeInput,
|
||||||
|
crate::prowlarr::ProwlarrTestResponse,
|
||||||
|
crate::metadata_batch::MetadataBatchRequest,
|
||||||
|
crate::metadata_batch::MetadataBatchReportDto,
|
||||||
|
crate::metadata_batch::MetadataBatchResultDto,
|
||||||
|
crate::metadata_refresh::MetadataRefreshRequest,
|
||||||
|
crate::metadata_refresh::MetadataRefreshReportDto,
|
||||||
|
crate::komga::KomgaSyncRequest,
|
||||||
|
crate::komga::KomgaSyncResponse,
|
||||||
|
crate::komga::KomgaSyncReportSummary,
|
||||||
ErrorResponse,
|
ErrorResponse,
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
@@ -76,12 +164,20 @@ use utoipa::OpenApi;
|
|||||||
("Bearer" = [])
|
("Bearer" = [])
|
||||||
),
|
),
|
||||||
tags(
|
tags(
|
||||||
(name = "books", description = "Read-only endpoints for browsing and searching books"),
|
(name = "books", description = "Book browsing, details and management"),
|
||||||
|
(name = "series", description = "Series browsing, filtering and management"),
|
||||||
|
(name = "search", description = "Full-text search across books and series"),
|
||||||
(name = "reading-progress", description = "Reading progress tracking per book"),
|
(name = "reading-progress", description = "Reading progress tracking per book"),
|
||||||
(name = "libraries", description = "Library management endpoints (Admin only)"),
|
(name = "authors", description = "Author browsing and listing"),
|
||||||
|
(name = "stats", description = "Collection statistics and dashboard data"),
|
||||||
|
(name = "libraries", description = "Library listing, scanning, and management (create/delete/settings: Admin only)"),
|
||||||
(name = "indexing", description = "Search index management and job control (Admin only)"),
|
(name = "indexing", description = "Search index management and job control (Admin only)"),
|
||||||
|
(name = "metadata", description = "External metadata providers and matching (Admin only)"),
|
||||||
|
(name = "komga", description = "Komga read-status sync (Admin only)"),
|
||||||
(name = "tokens", description = "API token management (Admin only)"),
|
(name = "tokens", description = "API token management (Admin only)"),
|
||||||
(name = "settings", description = "Application settings and cache management (Admin only)"),
|
(name = "settings", description = "Application settings and cache management (Admin only)"),
|
||||||
|
(name = "prowlarr", description = "Prowlarr indexer integration (Admin only)"),
|
||||||
|
(name = "qbittorrent", description = "qBittorrent download client integration (Admin only)"),
|
||||||
),
|
),
|
||||||
modifiers(&SecurityAddon)
|
modifiers(&SecurityAddon)
|
||||||
)]
|
)]
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
use std::{
|
use std::{
|
||||||
io::{Read, Write},
|
io::Write,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::{atomic::Ordering, Arc},
|
sync::{atomic::Ordering, Arc},
|
||||||
time::Duration,
|
time::Duration,
|
||||||
@@ -16,7 +16,7 @@ use serde::Deserialize;
|
|||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
use sqlx::Row;
|
use sqlx::Row;
|
||||||
use tracing::{debug, error, info, instrument, warn};
|
use tracing::{error, info, instrument, warn};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::{error::ApiError, state::AppState};
|
use crate::{error::ApiError, state::AppState};
|
||||||
@@ -32,9 +32,9 @@ fn remap_libraries_path(path: &str) -> String {
|
|||||||
|
|
||||||
fn parse_filter(s: &str) -> image::imageops::FilterType {
|
fn parse_filter(s: &str) -> image::imageops::FilterType {
|
||||||
match s {
|
match s {
|
||||||
"triangle" => image::imageops::FilterType::Triangle,
|
"lanczos3" => image::imageops::FilterType::Lanczos3,
|
||||||
"nearest" => image::imageops::FilterType::Nearest,
|
"nearest" => image::imageops::FilterType::Nearest,
|
||||||
_ => image::imageops::FilterType::Lanczos3,
|
_ => image::imageops::FilterType::Triangle, // Triangle (bilinear) is fast and good enough for comics
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -64,7 +64,7 @@ fn write_to_disk_cache(cache_path: &Path, data: &[u8]) -> Result<(), std::io::Er
|
|||||||
}
|
}
|
||||||
let mut file = std::fs::File::create(cache_path)?;
|
let mut file = std::fs::File::create(cache_path)?;
|
||||||
file.write_all(data)?;
|
file.write_all(data)?;
|
||||||
file.sync_data()?;
|
// No sync_data() — this is a cache, durability is not critical
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -80,6 +80,8 @@ pub struct PageQuery {
|
|||||||
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
#[derive(Clone, Copy, Debug)]
|
||||||
enum OutputFormat {
|
enum OutputFormat {
|
||||||
|
/// Serve raw bytes from the archive — no decode, no re-encode.
|
||||||
|
Original,
|
||||||
Jpeg,
|
Jpeg,
|
||||||
Png,
|
Png,
|
||||||
Webp,
|
Webp,
|
||||||
@@ -87,16 +89,19 @@ enum OutputFormat {
|
|||||||
|
|
||||||
impl OutputFormat {
|
impl OutputFormat {
|
||||||
fn parse(value: Option<&str>) -> Result<Self, ApiError> {
|
fn parse(value: Option<&str>) -> Result<Self, ApiError> {
|
||||||
match value.unwrap_or("webp") {
|
match value {
|
||||||
"jpeg" | "jpg" => Ok(Self::Jpeg),
|
None => Ok(Self::Original),
|
||||||
"png" => Ok(Self::Png),
|
Some("original") => Ok(Self::Original),
|
||||||
"webp" => Ok(Self::Webp),
|
Some("jpeg") | Some("jpg") => Ok(Self::Jpeg),
|
||||||
_ => Err(ApiError::bad_request("format must be webp|jpeg|png")),
|
Some("png") => Ok(Self::Png),
|
||||||
|
Some("webp") => Ok(Self::Webp),
|
||||||
|
_ => Err(ApiError::bad_request("format must be original|webp|jpeg|png")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn content_type(&self) -> &'static str {
|
fn content_type(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
|
Self::Original => "application/octet-stream", // will be overridden by detected type
|
||||||
Self::Jpeg => "image/jpeg",
|
Self::Jpeg => "image/jpeg",
|
||||||
Self::Png => "image/png",
|
Self::Png => "image/png",
|
||||||
Self::Webp => "image/webp",
|
Self::Webp => "image/webp",
|
||||||
@@ -105,6 +110,7 @@ impl OutputFormat {
|
|||||||
|
|
||||||
fn extension(&self) -> &'static str {
|
fn extension(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
|
Self::Original => "orig",
|
||||||
Self::Jpeg => "jpg",
|
Self::Jpeg => "jpg",
|
||||||
Self::Png => "png",
|
Self::Png => "png",
|
||||||
Self::Webp => "webp",
|
Self::Webp => "webp",
|
||||||
@@ -112,6 +118,17 @@ impl OutputFormat {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Detect content type from raw image bytes.
|
||||||
|
fn detect_content_type(data: &[u8]) -> &'static str {
|
||||||
|
match image::guess_format(data) {
|
||||||
|
Ok(ImageFormat::Jpeg) => "image/jpeg",
|
||||||
|
Ok(ImageFormat::Png) => "image/png",
|
||||||
|
Ok(ImageFormat::WebP) => "image/webp",
|
||||||
|
Ok(ImageFormat::Avif) => "image/avif",
|
||||||
|
_ => "application/octet-stream",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Get a specific page image from a book with optional format conversion
|
/// Get a specific page image from a book with optional format conversion
|
||||||
#[utoipa::path(
|
#[utoipa::path(
|
||||||
get,
|
get,
|
||||||
@@ -132,30 +149,26 @@ impl OutputFormat {
|
|||||||
),
|
),
|
||||||
security(("Bearer" = []))
|
security(("Bearer" = []))
|
||||||
)]
|
)]
|
||||||
#[instrument(skip(state), fields(book_id = %book_id, page = n))]
|
#[instrument(skip(state, headers), fields(book_id = %book_id, page = n))]
|
||||||
pub async fn get_page(
|
pub async fn get_page(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
AxumPath((book_id, n)): AxumPath<(Uuid, u32)>,
|
AxumPath((book_id, n)): AxumPath<(Uuid, u32)>,
|
||||||
Query(query): Query<PageQuery>,
|
Query(query): Query<PageQuery>,
|
||||||
|
headers: HeaderMap,
|
||||||
) -> Result<Response, ApiError> {
|
) -> Result<Response, ApiError> {
|
||||||
info!("Processing image request");
|
|
||||||
|
|
||||||
if n == 0 {
|
if n == 0 {
|
||||||
warn!("Invalid page number: 0");
|
|
||||||
return Err(ApiError::bad_request("page index starts at 1"));
|
return Err(ApiError::bad_request("page index starts at 1"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let (default_format, default_quality, max_width, filter_str, timeout_secs, cache_dir) = {
|
let (default_quality, max_width, filter_str, timeout_secs, cache_dir) = {
|
||||||
let s = state.settings.read().await;
|
let s = state.settings.read().await;
|
||||||
(s.image_format.clone(), s.image_quality, s.image_max_width, s.image_filter.clone(), s.timeout_seconds, s.cache_directory.clone())
|
(s.image_quality, s.image_max_width, s.image_filter.clone(), s.timeout_seconds, s.cache_directory.clone())
|
||||||
};
|
};
|
||||||
|
|
||||||
let format_str = query.format.as_deref().unwrap_or(default_format.as_str());
|
let format = OutputFormat::parse(query.format.as_deref())?;
|
||||||
let format = OutputFormat::parse(Some(format_str))?;
|
|
||||||
let quality = query.quality.unwrap_or(default_quality).clamp(1, 100);
|
let quality = query.quality.unwrap_or(default_quality).clamp(1, 100);
|
||||||
let width = query.width.unwrap_or(0);
|
let width = query.width.unwrap_or(0);
|
||||||
if width > max_width {
|
if width > max_width {
|
||||||
warn!("Invalid width: {}", width);
|
|
||||||
return Err(ApiError::bad_request(format!("width must be <= {}", max_width)));
|
return Err(ApiError::bad_request(format!("width must be <= {}", max_width)));
|
||||||
}
|
}
|
||||||
let filter = parse_filter(&filter_str);
|
let filter = parse_filter(&filter_str);
|
||||||
@@ -165,11 +178,9 @@ pub async fn get_page(
|
|||||||
|
|
||||||
if let Some(cached) = state.page_cache.lock().await.get(&memory_cache_key).cloned() {
|
if let Some(cached) = state.page_cache.lock().await.get(&memory_cache_key).cloned() {
|
||||||
state.metrics.page_cache_hits.fetch_add(1, Ordering::Relaxed);
|
state.metrics.page_cache_hits.fetch_add(1, Ordering::Relaxed);
|
||||||
debug!("Memory cache hit for key: {}", memory_cache_key);
|
return Ok(image_response(cached, format, None, &headers));
|
||||||
return Ok(image_response(cached, format.content_type(), None));
|
|
||||||
}
|
}
|
||||||
state.metrics.page_cache_misses.fetch_add(1, Ordering::Relaxed);
|
state.metrics.page_cache_misses.fetch_add(1, Ordering::Relaxed);
|
||||||
debug!("Memory cache miss for key: {}", memory_cache_key);
|
|
||||||
|
|
||||||
let row = sqlx::query(
|
let row = sqlx::query(
|
||||||
r#"
|
r#"
|
||||||
@@ -191,7 +202,6 @@ pub async fn get_page(
|
|||||||
let row = match row {
|
let row = match row {
|
||||||
Some(r) => r,
|
Some(r) => r,
|
||||||
None => {
|
None => {
|
||||||
error!("Book file not found for book_id: {}", book_id);
|
|
||||||
return Err(ApiError::not_found("book file not found"));
|
return Err(ApiError::not_found("book file not found"));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -200,18 +210,22 @@ pub async fn get_page(
|
|||||||
let abs_path = remap_libraries_path(&abs_path);
|
let abs_path = remap_libraries_path(&abs_path);
|
||||||
let input_format: String = row.get("format");
|
let input_format: String = row.get("format");
|
||||||
|
|
||||||
info!("Processing book file: {} (format: {})", abs_path, input_format);
|
|
||||||
|
|
||||||
let disk_cache_key = get_cache_key(&abs_path, n, format.extension(), quality, width);
|
let disk_cache_key = get_cache_key(&abs_path, n, format.extension(), quality, width);
|
||||||
let cache_path = get_cache_path(&disk_cache_key, &format, &cache_dir_path);
|
let cache_path = get_cache_path(&disk_cache_key, &format, &cache_dir_path);
|
||||||
|
|
||||||
|
// If-None-Match: return 304 if the client already has this version
|
||||||
|
if let Some(if_none_match) = headers.get(header::IF_NONE_MATCH) {
|
||||||
|
let expected_etag = format!("\"{}\"", disk_cache_key);
|
||||||
|
if if_none_match.as_bytes() == expected_etag.as_bytes() {
|
||||||
|
return Ok(StatusCode::NOT_MODIFIED.into_response());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(cached_bytes) = read_from_disk_cache(&cache_path) {
|
if let Some(cached_bytes) = read_from_disk_cache(&cache_path) {
|
||||||
info!("Disk cache hit for: {}", cache_path.display());
|
|
||||||
let bytes = Arc::new(cached_bytes);
|
let bytes = Arc::new(cached_bytes);
|
||||||
state.page_cache.lock().await.put(memory_cache_key, bytes.clone());
|
state.page_cache.lock().await.put(memory_cache_key, bytes.clone());
|
||||||
return Ok(image_response(bytes, format.content_type(), Some(&disk_cache_key)));
|
return Ok(image_response(bytes, format, Some(&disk_cache_key), &headers));
|
||||||
}
|
}
|
||||||
debug!("Disk cache miss for: {}", cache_path.display());
|
|
||||||
|
|
||||||
let _permit = state
|
let _permit = state
|
||||||
.page_render_limit
|
.page_render_limit
|
||||||
@@ -223,7 +237,6 @@ pub async fn get_page(
|
|||||||
ApiError::internal("render limiter unavailable")
|
ApiError::internal("render limiter unavailable")
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
info!("Rendering page {} from {}", n, abs_path);
|
|
||||||
let abs_path_clone = abs_path.clone();
|
let abs_path_clone = abs_path.clone();
|
||||||
let format_clone = format;
|
let format_clone = format;
|
||||||
let start_time = std::time::Instant::now();
|
let start_time = std::time::Instant::now();
|
||||||
@@ -248,18 +261,37 @@ pub async fn get_page(
|
|||||||
|
|
||||||
match bytes {
|
match bytes {
|
||||||
Ok(data) => {
|
Ok(data) => {
|
||||||
info!("Successfully rendered page {} in {:?}", n, duration);
|
info!("Rendered page {} in {:?}", n, duration);
|
||||||
|
|
||||||
if let Err(e) = write_to_disk_cache(&cache_path, &data) {
|
if let Err(e) = write_to_disk_cache(&cache_path, &data) {
|
||||||
warn!("Failed to write to disk cache: {}", e);
|
warn!("Failed to write to disk cache: {}", e);
|
||||||
} else {
|
|
||||||
info!("Cached rendered image to: {}", cache_path.display());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let bytes = Arc::new(data);
|
let bytes = Arc::new(data);
|
||||||
state.page_cache.lock().await.put(memory_cache_key, bytes.clone());
|
state.page_cache.lock().await.put(memory_cache_key.clone(), bytes.clone());
|
||||||
|
|
||||||
Ok(image_response(bytes, format.content_type(), Some(&disk_cache_key)))
|
// Prefetch next 2 pages in background (fire-and-forget)
|
||||||
|
for next_page in [n + 1, n + 2] {
|
||||||
|
let state2 = state.clone();
|
||||||
|
let abs_path2 = abs_path.clone();
|
||||||
|
let cache_dir2 = cache_dir_path.clone();
|
||||||
|
let format2 = format;
|
||||||
|
tokio::spawn(async move {
|
||||||
|
prefetch_page(state2, &PrefetchParams {
|
||||||
|
book_id,
|
||||||
|
abs_path: &abs_path2,
|
||||||
|
page: next_page,
|
||||||
|
format: format2,
|
||||||
|
quality,
|
||||||
|
width,
|
||||||
|
filter,
|
||||||
|
timeout_secs,
|
||||||
|
cache_dir: &cache_dir2,
|
||||||
|
}).await;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(image_response(bytes, format, Some(&disk_cache_key), &headers))
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Failed to render page {} from {}: {:?}", n, abs_path, e);
|
error!("Failed to render page {} from {}: {:?}", n, abs_path, e);
|
||||||
@@ -268,11 +300,84 @@ pub async fn get_page(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn image_response(bytes: Arc<Vec<u8>>, content_type: &str, etag_suffix: Option<&str>) -> Response {
|
struct PrefetchParams<'a> {
|
||||||
let mut headers = HeaderMap::new();
|
book_id: Uuid,
|
||||||
headers.insert(header::CONTENT_TYPE, HeaderValue::from_str(content_type).unwrap_or(HeaderValue::from_static("application/octet-stream")));
|
abs_path: &'a str,
|
||||||
headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("public, max-age=31536000, immutable"));
|
page: u32,
|
||||||
|
format: OutputFormat,
|
||||||
|
quality: u8,
|
||||||
|
width: u32,
|
||||||
|
filter: image::imageops::FilterType,
|
||||||
|
timeout_secs: u64,
|
||||||
|
cache_dir: &'a Path,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Prefetch a single page into disk+memory cache (best-effort, ignores errors).
|
||||||
|
async fn prefetch_page(state: AppState, params: &PrefetchParams<'_>) {
|
||||||
|
let book_id = params.book_id;
|
||||||
|
let page = params.page;
|
||||||
|
let format = params.format;
|
||||||
|
let quality = params.quality;
|
||||||
|
let width = params.width;
|
||||||
|
let filter = params.filter;
|
||||||
|
let timeout_secs = params.timeout_secs;
|
||||||
|
let abs_path = params.abs_path;
|
||||||
|
let cache_dir = params.cache_dir;
|
||||||
|
|
||||||
|
let mem_key = format!("{book_id}:{page}:{}:{quality}:{width}", format.extension());
|
||||||
|
// Already in memory cache?
|
||||||
|
if state.page_cache.lock().await.contains(&mem_key) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Already on disk?
|
||||||
|
let disk_key = get_cache_key(abs_path, page, format.extension(), quality, width);
|
||||||
|
let cache_path = get_cache_path(&disk_key, &format, cache_dir);
|
||||||
|
if cache_path.exists() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Acquire render permit (don't block too long — if busy, skip)
|
||||||
|
let permit = tokio::time::timeout(
|
||||||
|
Duration::from_millis(100),
|
||||||
|
state.page_render_limit.clone().acquire_owned(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
let _permit = match permit {
|
||||||
|
Ok(Ok(p)) => p,
|
||||||
|
_ => return,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Fetch the book format from the path extension as a shortcut
|
||||||
|
let input_format = match abs_path.rsplit('.').next().map(|e| e.to_ascii_lowercase()) {
|
||||||
|
Some(ref e) if e == "cbz" => "cbz",
|
||||||
|
Some(ref e) if e == "cbr" => "cbr",
|
||||||
|
Some(ref e) if e == "pdf" => "pdf",
|
||||||
|
Some(ref e) if e == "epub" => "epub",
|
||||||
|
_ => return,
|
||||||
|
}
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let abs_clone = abs_path.to_string();
|
||||||
|
let fmt = format;
|
||||||
|
let result = tokio::time::timeout(
|
||||||
|
Duration::from_secs(timeout_secs),
|
||||||
|
tokio::task::spawn_blocking(move || {
|
||||||
|
render_page(&abs_clone, &input_format, page, &fmt, quality, width, filter)
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
if let Ok(Ok(Ok(data))) = result {
|
||||||
|
let _ = write_to_disk_cache(&cache_path, &data);
|
||||||
|
let bytes = Arc::new(data);
|
||||||
|
state.page_cache.lock().await.put(mem_key, bytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn image_response(bytes: Arc<Vec<u8>>, format: OutputFormat, etag_suffix: Option<&str>, req_headers: &HeaderMap) -> Response {
|
||||||
|
let content_type = match format {
|
||||||
|
OutputFormat::Original => detect_content_type(&bytes),
|
||||||
|
_ => format.content_type(),
|
||||||
|
};
|
||||||
let etag = if let Some(suffix) = etag_suffix {
|
let etag = if let Some(suffix) = etag_suffix {
|
||||||
format!("\"{}\"", suffix)
|
format!("\"{}\"", suffix)
|
||||||
} else {
|
} else {
|
||||||
@@ -281,19 +386,37 @@ fn image_response(bytes: Arc<Vec<u8>>, content_type: &str, etag_suffix: Option<&
|
|||||||
format!("\"{:x}\"", hasher.finalize())
|
format!("\"{:x}\"", hasher.finalize())
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Check If-None-Match for 304
|
||||||
|
if let Some(if_none_match) = req_headers.get(header::IF_NONE_MATCH) {
|
||||||
|
if if_none_match.as_bytes() == etag.as_bytes() {
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("public, max-age=31536000, immutable"));
|
||||||
if let Ok(v) = HeaderValue::from_str(&etag) {
|
if let Ok(v) = HeaderValue::from_str(&etag) {
|
||||||
headers.insert(header::ETAG, v);
|
headers.insert(header::ETAG, v);
|
||||||
}
|
}
|
||||||
(StatusCode::OK, headers, Body::from((*bytes).clone())).into_response()
|
return (StatusCode::NOT_MODIFIED, headers).into_response();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(header::CONTENT_TYPE, HeaderValue::from_str(content_type).unwrap_or(HeaderValue::from_static("application/octet-stream")));
|
||||||
|
headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("public, max-age=31536000, immutable"));
|
||||||
|
if let Ok(v) = HeaderValue::from_str(&etag) {
|
||||||
|
headers.insert(header::ETAG, v);
|
||||||
|
}
|
||||||
|
// Use Bytes to avoid cloning the Vec — shares the Arc's allocation via zero-copy
|
||||||
|
let body_bytes = axum::body::Bytes::from(Arc::unwrap_or_clone(bytes));
|
||||||
|
(StatusCode::OK, headers, Body::from(body_bytes)).into_response()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Render page 1 of a book (for thumbnail fallback or thumbnail checkup). Uses thumbnail dimensions by default.
|
/// Render page 1 of a book (for thumbnail fallback or thumbnail checkup). Uses thumbnail dimensions by default.
|
||||||
|
/// Render page 1 as a thumbnail fallback. Returns (bytes, content_type).
|
||||||
pub async fn render_book_page_1(
|
pub async fn render_book_page_1(
|
||||||
state: &AppState,
|
state: &AppState,
|
||||||
book_id: Uuid,
|
book_id: Uuid,
|
||||||
width: u32,
|
width: u32,
|
||||||
quality: u8,
|
quality: u8,
|
||||||
) -> Result<Vec<u8>, ApiError> {
|
) -> Result<(Vec<u8>, &'static str), ApiError> {
|
||||||
let row = sqlx::query(
|
let row = sqlx::query(
|
||||||
r#"SELECT abs_path, format FROM book_files WHERE book_id = $1 ORDER BY updated_at DESC LIMIT 1"#,
|
r#"SELECT abs_path, format FROM book_files WHERE book_id = $1 ORDER BY updated_at DESC LIMIT 1"#,
|
||||||
)
|
)
|
||||||
@@ -328,7 +451,7 @@ pub async fn render_book_page_1(
|
|||||||
&abs_path_clone,
|
&abs_path_clone,
|
||||||
&input_format,
|
&input_format,
|
||||||
1,
|
1,
|
||||||
&OutputFormat::Webp,
|
&OutputFormat::Original,
|
||||||
quality,
|
quality,
|
||||||
width,
|
width,
|
||||||
filter,
|
filter,
|
||||||
@@ -339,7 +462,9 @@ pub async fn render_book_page_1(
|
|||||||
.map_err(|_| ApiError::internal("page rendering timeout"))?
|
.map_err(|_| ApiError::internal("page rendering timeout"))?
|
||||||
.map_err(|e| ApiError::internal(format!("render task failed: {e}")))?;
|
.map_err(|e| ApiError::internal(format!("render task failed: {e}")))?;
|
||||||
|
|
||||||
bytes
|
let bytes = bytes?;
|
||||||
|
let content_type = detect_content_type(&bytes);
|
||||||
|
Ok((bytes, content_type))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_page(
|
fn render_page(
|
||||||
@@ -351,190 +476,113 @@ fn render_page(
|
|||||||
width: u32,
|
width: u32,
|
||||||
filter: image::imageops::FilterType,
|
filter: image::imageops::FilterType,
|
||||||
) -> Result<Vec<u8>, ApiError> {
|
) -> Result<Vec<u8>, ApiError> {
|
||||||
let page_bytes = match input_format {
|
let format = match input_format {
|
||||||
"cbz" => extract_cbz_page(abs_path, page_number)?,
|
"cbz" => parsers::BookFormat::Cbz,
|
||||||
"cbr" => extract_cbr_page(abs_path, page_number)?,
|
"cbr" => parsers::BookFormat::Cbr,
|
||||||
"pdf" => render_pdf_page(abs_path, page_number, width)?,
|
"pdf" => parsers::BookFormat::Pdf,
|
||||||
|
"epub" => parsers::BookFormat::Epub,
|
||||||
_ => return Err(ApiError::bad_request("unsupported source format")),
|
_ => return Err(ApiError::bad_request("unsupported source format")),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let pdf_render_width = if width > 0 { width } else { 1200 };
|
||||||
|
let page_bytes = parsers::extract_page(
|
||||||
|
std::path::Path::new(abs_path),
|
||||||
|
format,
|
||||||
|
page_number,
|
||||||
|
pdf_render_width,
|
||||||
|
)
|
||||||
|
.map_err(|e| {
|
||||||
|
error!("Failed to extract page {} from {}: {}", page_number, abs_path, e);
|
||||||
|
ApiError::internal(format!("page extraction failed: {e}"))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Original mode or source matches output with no resize → return raw bytes (zero transcoding)
|
||||||
|
if matches!(out_format, OutputFormat::Original) && width == 0 {
|
||||||
|
return Ok(page_bytes);
|
||||||
|
}
|
||||||
|
if width == 0 {
|
||||||
|
if let Ok(source_fmt) = image::guess_format(&page_bytes) {
|
||||||
|
if format_matches(&source_fmt, out_format) {
|
||||||
|
return Ok(page_bytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
transcode_image(&page_bytes, out_format, quality, width, filter)
|
transcode_image(&page_bytes, out_format, quality, width, filter)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_cbz_page(abs_path: &str, page_number: u32) -> Result<Vec<u8>, ApiError> {
|
|
||||||
debug!("Opening CBZ archive: {}", abs_path);
|
/// Fast JPEG decode with DCT scaling: decodes directly at reduced resolution.
|
||||||
let file = std::fs::File::open(abs_path).map_err(|e| {
|
fn fast_jpeg_decode(input: &[u8], target_w: u32, target_h: u32) -> Option<image::DynamicImage> {
|
||||||
if e.kind() == std::io::ErrorKind::NotFound {
|
if image::guess_format(input).ok()? != ImageFormat::Jpeg {
|
||||||
ApiError::not_found("book file not accessible")
|
return None;
|
||||||
} else {
|
|
||||||
error!("Cannot open CBZ file {}: {}", abs_path, e);
|
|
||||||
ApiError::internal(format!("cannot open cbz: {e}"))
|
|
||||||
}
|
}
|
||||||
})?;
|
let mut decoder = jpeg_decoder::Decoder::new(std::io::Cursor::new(input));
|
||||||
|
decoder.read_info().ok()?;
|
||||||
let mut archive = zip::ZipArchive::new(file).map_err(|e| {
|
decoder.scale(target_w as u16, target_h as u16).ok()?;
|
||||||
error!("Invalid CBZ archive {}: {}", abs_path, e);
|
let pixels = decoder.decode().ok()?;
|
||||||
ApiError::internal(format!("invalid cbz: {e}"))
|
let info = decoder.info()?;
|
||||||
})?;
|
let w = info.width as u32;
|
||||||
|
let h = info.height as u32;
|
||||||
let mut image_names: Vec<String> = Vec::new();
|
match info.pixel_format {
|
||||||
for i in 0..archive.len() {
|
jpeg_decoder::PixelFormat::RGB24 => {
|
||||||
let entry = archive.by_index(i).map_err(|e| {
|
let buf = image::RgbImage::from_raw(w, h, pixels)?;
|
||||||
error!("Failed to read CBZ entry {} in {}: {}", i, abs_path, e);
|
Some(image::DynamicImage::ImageRgb8(buf))
|
||||||
ApiError::internal(format!("cbz entry read failed: {e}"))
|
|
||||||
})?;
|
|
||||||
let name = entry.name().to_ascii_lowercase();
|
|
||||||
if is_image_name(&name) {
|
|
||||||
image_names.push(entry.name().to_string());
|
|
||||||
}
|
}
|
||||||
|
jpeg_decoder::PixelFormat::L8 => {
|
||||||
|
let buf = image::GrayImage::from_raw(w, h, pixels)?;
|
||||||
|
Some(image::DynamicImage::ImageLuma8(buf))
|
||||||
}
|
}
|
||||||
image_names.sort_by(|a, b| natord::compare(a, b));
|
_ => None,
|
||||||
debug!("Found {} images in CBZ {}", image_names.len(), abs_path);
|
|
||||||
|
|
||||||
let index = page_number as usize - 1;
|
|
||||||
let selected = image_names.get(index).ok_or_else(|| {
|
|
||||||
error!("Page {} out of range in {} (total: {})", page_number, abs_path, image_names.len());
|
|
||||||
ApiError::not_found("page out of range")
|
|
||||||
})?;
|
|
||||||
|
|
||||||
debug!("Extracting page {} ({}) from {}", page_number, selected, abs_path);
|
|
||||||
let mut entry = archive.by_name(selected).map_err(|e| {
|
|
||||||
error!("Failed to read CBZ page {} from {}: {}", selected, abs_path, e);
|
|
||||||
ApiError::internal(format!("cbz page read failed: {e}"))
|
|
||||||
})?;
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
entry.read_to_end(&mut buf).map_err(|e| {
|
|
||||||
error!("Failed to load CBZ page {} from {}: {}", selected, abs_path, e);
|
|
||||||
ApiError::internal(format!("cbz page load failed: {e}"))
|
|
||||||
})?;
|
|
||||||
Ok(buf)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn extract_cbr_page(abs_path: &str, page_number: u32) -> Result<Vec<u8>, ApiError> {
|
|
||||||
info!("Opening CBR archive: {}", abs_path);
|
|
||||||
let index = page_number as usize - 1;
|
|
||||||
|
|
||||||
// Pass 1: list all image names (in-process, no subprocess)
|
|
||||||
let mut image_names: Vec<String> = {
|
|
||||||
let archive = unrar::Archive::new(abs_path)
|
|
||||||
.open_for_listing()
|
|
||||||
.map_err(|e| ApiError::internal(format!("unrar listing failed: {}", e)))?;
|
|
||||||
let mut names = Vec::new();
|
|
||||||
for entry in archive {
|
|
||||||
let entry = entry.map_err(|e| ApiError::internal(format!("unrar entry error: {}", e)))?;
|
|
||||||
let name = entry.filename.to_string_lossy().to_string();
|
|
||||||
if is_image_name(&name.to_ascii_lowercase()) {
|
|
||||||
names.push(name);
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
names
|
|
||||||
};
|
|
||||||
|
|
||||||
image_names.sort_by(|a, b| natord::compare(a, b));
|
|
||||||
|
|
||||||
let target = image_names
|
|
||||||
.get(index)
|
|
||||||
.ok_or_else(|| {
|
|
||||||
error!("Page {} out of range (total: {})", page_number, image_names.len());
|
|
||||||
ApiError::not_found("page out of range")
|
|
||||||
})?
|
|
||||||
.clone();
|
|
||||||
|
|
||||||
// Pass 2: extract only the target page to memory
|
|
||||||
let mut archive = unrar::Archive::new(abs_path)
|
|
||||||
.open_for_processing()
|
|
||||||
.map_err(|e| ApiError::internal(format!("unrar processing failed: {}", e)))?;
|
|
||||||
|
|
||||||
while let Some(header) = archive
|
|
||||||
.read_header()
|
|
||||||
.map_err(|e| ApiError::internal(format!("unrar read header: {}", e)))?
|
|
||||||
{
|
|
||||||
let entry_name = header.entry().filename.to_string_lossy().to_string();
|
|
||||||
if entry_name == target {
|
|
||||||
let (data, _) = header
|
|
||||||
.read()
|
|
||||||
.map_err(|e| ApiError::internal(format!("unrar read: {}", e)))?;
|
|
||||||
info!("Extracted CBR page {} ({} bytes)", page_number, data.len());
|
|
||||||
return Ok(data);
|
|
||||||
}
|
|
||||||
archive = header
|
|
||||||
.skip()
|
|
||||||
.map_err(|e| ApiError::internal(format!("unrar skip: {}", e)))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(ApiError::not_found("page not found in archive"))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn render_pdf_page(abs_path: &str, page_number: u32, width: u32) -> Result<Vec<u8>, ApiError> {
|
|
||||||
use pdfium_render::prelude::*;
|
|
||||||
|
|
||||||
debug!("Rendering PDF page {} of {} (width: {})", page_number, abs_path, width);
|
|
||||||
|
|
||||||
let pdfium = Pdfium::new(
|
|
||||||
Pdfium::bind_to_system_library()
|
|
||||||
.map_err(|e| ApiError::internal(format!("pdfium not available: {:?}", e)))?,
|
|
||||||
);
|
|
||||||
|
|
||||||
let document = pdfium
|
|
||||||
.load_pdf_from_file(abs_path, None)
|
|
||||||
.map_err(|e| ApiError::internal(format!("pdf load failed: {:?}", e)))?;
|
|
||||||
|
|
||||||
let page_index = (page_number - 1) as u16;
|
|
||||||
let page = document
|
|
||||||
.pages()
|
|
||||||
.get(page_index)
|
|
||||||
.map_err(|_| ApiError::not_found("page out of range"))?;
|
|
||||||
|
|
||||||
let render_width = if width > 0 { width as i32 } else { 1200 };
|
|
||||||
let config = PdfRenderConfig::new().set_target_width(render_width);
|
|
||||||
|
|
||||||
let bitmap = page
|
|
||||||
.render_with_config(&config)
|
|
||||||
.map_err(|e| ApiError::internal(format!("pdf render failed: {:?}", e)))?;
|
|
||||||
|
|
||||||
let image = bitmap.as_image();
|
|
||||||
let mut buf = std::io::Cursor::new(Vec::new());
|
|
||||||
image
|
|
||||||
.write_to(&mut buf, image::ImageFormat::Png)
|
|
||||||
.map_err(|e| ApiError::internal(format!("png encode failed: {}", e)))?;
|
|
||||||
|
|
||||||
debug!("Rendered PDF page {} ({} bytes)", page_number, buf.get_ref().len());
|
|
||||||
Ok(buf.into_inner())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn transcode_image(input: &[u8], out_format: &OutputFormat, quality: u8, width: u32, filter: image::imageops::FilterType) -> Result<Vec<u8>, ApiError> {
|
fn transcode_image(input: &[u8], out_format: &OutputFormat, quality: u8, width: u32, filter: image::imageops::FilterType) -> Result<Vec<u8>, ApiError> {
|
||||||
debug!("Transcoding image: {} bytes, format: {:?}, quality: {}, width: {}", input.len(), out_format, quality, width);
|
|
||||||
let source_format = image::guess_format(input).ok();
|
let source_format = image::guess_format(input).ok();
|
||||||
debug!("Source format detected: {:?}", source_format);
|
|
||||||
let needs_transcode = source_format.map(|f| !format_matches(&f, out_format)).unwrap_or(true);
|
// Resolve "Original" to the actual source format for encoding
|
||||||
|
let effective_format = match out_format {
|
||||||
|
OutputFormat::Original => match source_format {
|
||||||
|
Some(ImageFormat::Png) => OutputFormat::Png,
|
||||||
|
Some(ImageFormat::WebP) => OutputFormat::Webp,
|
||||||
|
_ => OutputFormat::Jpeg, // default to JPEG for original resize
|
||||||
|
},
|
||||||
|
other => *other,
|
||||||
|
};
|
||||||
|
|
||||||
|
let needs_transcode = source_format.map(|f| !format_matches(&f, &effective_format)).unwrap_or(true);
|
||||||
|
|
||||||
if width == 0 && !needs_transcode {
|
if width == 0 && !needs_transcode {
|
||||||
debug!("No transcoding needed, returning original");
|
|
||||||
return Ok(input.to_vec());
|
return Ok(input.to_vec());
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("Loading image from memory...");
|
// For JPEG with resize: use DCT scaling to decode at ~target size (much faster)
|
||||||
let mut image = image::load_from_memory(input).map_err(|e| {
|
let mut image = if width > 0 {
|
||||||
error!("Failed to load image from memory: {} (input size: {} bytes)", e, input.len());
|
fast_jpeg_decode(input, width, u32::MAX)
|
||||||
|
.unwrap_or_else(|| {
|
||||||
|
image::load_from_memory(input).unwrap_or_default()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
image::load_from_memory(input).map_err(|e| {
|
||||||
ApiError::internal(format!("invalid source image: {e}"))
|
ApiError::internal(format!("invalid source image: {e}"))
|
||||||
})?;
|
})?
|
||||||
|
};
|
||||||
|
|
||||||
if width > 0 {
|
if width > 0 {
|
||||||
debug!("Resizing image to width: {}", width);
|
|
||||||
image = image.resize(width, u32::MAX, filter);
|
image = image.resize(width, u32::MAX, filter);
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("Converting to RGBA...");
|
|
||||||
let rgba = image.to_rgba8();
|
let rgba = image.to_rgba8();
|
||||||
let (w, h) = rgba.dimensions();
|
let (w, h) = rgba.dimensions();
|
||||||
debug!("Image dimensions: {}x{}", w, h);
|
|
||||||
|
|
||||||
let mut out = Vec::new();
|
let mut out = Vec::new();
|
||||||
match out_format {
|
match effective_format {
|
||||||
OutputFormat::Jpeg => {
|
OutputFormat::Jpeg | OutputFormat::Original => {
|
||||||
|
// JPEG doesn't support alpha — convert RGBA to RGB
|
||||||
|
let rgb = image::DynamicImage::ImageRgba8(rgba.clone()).to_rgb8();
|
||||||
let mut encoder = JpegEncoder::new_with_quality(&mut out, quality);
|
let mut encoder = JpegEncoder::new_with_quality(&mut out, quality);
|
||||||
encoder
|
encoder
|
||||||
.encode(&rgba, w, h, ColorType::Rgba8.into())
|
.encode(&rgb, w, h, ColorType::Rgb8.into())
|
||||||
.map_err(|e| ApiError::internal(format!("jpeg encode failed: {e}")))?;
|
.map_err(|e| ApiError::internal(format!("jpeg encode failed: {e}")))?;
|
||||||
}
|
}
|
||||||
OutputFormat::Png => {
|
OutputFormat::Png => {
|
||||||
@@ -549,7 +597,7 @@ fn transcode_image(input: &[u8], out_format: &OutputFormat, quality: u8, width:
|
|||||||
.flat_map(|p| [p[0], p[1], p[2]])
|
.flat_map(|p| [p[0], p[1], p[2]])
|
||||||
.collect();
|
.collect();
|
||||||
let webp_data = webp::Encoder::new(&rgb_data, webp::PixelLayout::Rgb, w, h)
|
let webp_data = webp::Encoder::new(&rgb_data, webp::PixelLayout::Rgb, w, h)
|
||||||
.encode(f32::max(quality as f32, 85.0));
|
.encode(quality as f32);
|
||||||
out.extend_from_slice(&webp_data);
|
out.extend_from_slice(&webp_data);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -565,20 +613,3 @@ fn format_matches(source: &ImageFormat, target: &OutputFormat) -> bool {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_image_name(name: &str) -> bool {
|
|
||||||
let lower = name.to_lowercase();
|
|
||||||
lower.ends_with(".jpg")
|
|
||||||
|| lower.ends_with(".jpeg")
|
|
||||||
|| lower.ends_with(".png")
|
|
||||||
|| lower.ends_with(".webp")
|
|
||||||
|| lower.ends_with(".avif")
|
|
||||||
|| lower.ends_with(".gif")
|
|
||||||
|| lower.ends_with(".tif")
|
|
||||||
|| lower.ends_with(".tiff")
|
|
||||||
|| lower.ends_with(".bmp")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
fn _is_absolute_path(value: &str) -> bool {
|
|
||||||
Path::new(value).is_absolute()
|
|
||||||
}
|
|
||||||
|
|||||||
363
apps/api/src/prowlarr.rs
Normal file
363
apps/api/src/prowlarr.rs
Normal file
@@ -0,0 +1,363 @@
|
|||||||
|
use axum::{extract::State, Json};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct MissingVolumeInput {
|
||||||
|
pub volume_number: Option<i32>,
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub title: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct ProwlarrSearchRequest {
|
||||||
|
pub series_name: String,
|
||||||
|
pub volume_number: Option<i32>,
|
||||||
|
pub custom_query: Option<String>,
|
||||||
|
pub missing_volumes: Option<Vec<MissingVolumeInput>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, ToSchema)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct ProwlarrRawRelease {
|
||||||
|
pub guid: String,
|
||||||
|
pub title: String,
|
||||||
|
pub size: i64,
|
||||||
|
pub download_url: Option<String>,
|
||||||
|
pub indexer: Option<String>,
|
||||||
|
pub seeders: Option<i32>,
|
||||||
|
pub leechers: Option<i32>,
|
||||||
|
pub publish_date: Option<String>,
|
||||||
|
pub protocol: Option<String>,
|
||||||
|
pub info_url: Option<String>,
|
||||||
|
pub categories: Option<Vec<ProwlarrCategory>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct ProwlarrRelease {
|
||||||
|
pub guid: String,
|
||||||
|
pub title: String,
|
||||||
|
pub size: i64,
|
||||||
|
pub download_url: Option<String>,
|
||||||
|
pub indexer: Option<String>,
|
||||||
|
pub seeders: Option<i32>,
|
||||||
|
pub leechers: Option<i32>,
|
||||||
|
pub publish_date: Option<String>,
|
||||||
|
pub protocol: Option<String>,
|
||||||
|
pub info_url: Option<String>,
|
||||||
|
pub categories: Option<Vec<ProwlarrCategory>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub matched_missing_volumes: Option<Vec<i32>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, ToSchema)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct ProwlarrCategory {
|
||||||
|
pub id: i32,
|
||||||
|
pub name: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct ProwlarrSearchResponse {
|
||||||
|
pub results: Vec<ProwlarrRelease>,
|
||||||
|
pub query: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct ProwlarrTestResponse {
|
||||||
|
pub success: bool,
|
||||||
|
pub message: String,
|
||||||
|
pub indexer_count: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Config helper ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct ProwlarrConfig {
|
||||||
|
url: String,
|
||||||
|
api_key: String,
|
||||||
|
categories: Option<Vec<i32>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn load_prowlarr_config(
|
||||||
|
pool: &sqlx::PgPool,
|
||||||
|
) -> Result<(String, String, Vec<i32>), ApiError> {
|
||||||
|
let row = sqlx::query("SELECT value FROM app_settings WHERE key = 'prowlarr'")
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let row = row.ok_or_else(|| ApiError::bad_request("Prowlarr is not configured"))?;
|
||||||
|
let value: serde_json::Value = row.get("value");
|
||||||
|
let config: ProwlarrConfig = serde_json::from_value(value)
|
||||||
|
.map_err(|e| ApiError::internal(format!("invalid prowlarr config: {e}")))?;
|
||||||
|
|
||||||
|
if config.url.is_empty() || config.api_key.is_empty() {
|
||||||
|
return Err(ApiError::bad_request(
|
||||||
|
"Prowlarr URL and API key must be configured in settings",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let url = config.url.trim_end_matches('/').to_string();
|
||||||
|
let categories = config.categories.unwrap_or_else(|| vec![7030, 7020]);
|
||||||
|
|
||||||
|
Ok((url, config.api_key, categories))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Volume matching ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Extract volume numbers from a release title.
|
||||||
|
/// Looks for patterns like: T01, Tome 01, Vol. 01, v01, #01,
|
||||||
|
/// or standalone numbers that appear after common separators.
|
||||||
|
fn extract_volumes_from_title(title: &str) -> Vec<i32> {
|
||||||
|
let lower = title.to_lowercase();
|
||||||
|
let mut volumes = Vec::new();
|
||||||
|
|
||||||
|
// Patterns: T01, Tome 01, Tome01, Vol 01, Vol.01, v01, #01
|
||||||
|
let prefixes = ["tome", "vol.", "vol ", "t", "v", "#"];
|
||||||
|
let chars: Vec<char> = lower.chars().collect();
|
||||||
|
let len = chars.len();
|
||||||
|
|
||||||
|
for prefix in &prefixes {
|
||||||
|
let mut start = 0;
|
||||||
|
while let Some(pos) = lower[start..].find(prefix) {
|
||||||
|
let abs_pos = start + pos;
|
||||||
|
let after = abs_pos + prefix.len();
|
||||||
|
|
||||||
|
// For single-char prefixes (t, v, #), ensure it's at a word boundary
|
||||||
|
if prefix.len() == 1 && *prefix != "#" {
|
||||||
|
if abs_pos > 0 && chars[abs_pos - 1].is_alphanumeric() {
|
||||||
|
start = after;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip optional spaces after prefix
|
||||||
|
let mut i = after;
|
||||||
|
while i < len && chars[i] == ' ' {
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read digits
|
||||||
|
let digit_start = i;
|
||||||
|
while i < len && chars[i].is_ascii_digit() {
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if i > digit_start {
|
||||||
|
if let Ok(num) = lower[digit_start..i].parse::<i32>() {
|
||||||
|
if !volumes.contains(&num) {
|
||||||
|
volumes.push(num);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
start = after;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
volumes
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Match releases against missing volume numbers.
|
||||||
|
fn match_missing_volumes(
|
||||||
|
releases: Vec<ProwlarrRawRelease>,
|
||||||
|
missing: &[MissingVolumeInput],
|
||||||
|
) -> Vec<ProwlarrRelease> {
|
||||||
|
let missing_numbers: Vec<i32> = missing
|
||||||
|
.iter()
|
||||||
|
.filter_map(|m| m.volume_number)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
releases
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| {
|
||||||
|
let matched = if missing_numbers.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let title_volumes = extract_volumes_from_title(&r.title);
|
||||||
|
let matched: Vec<i32> = title_volumes
|
||||||
|
.into_iter()
|
||||||
|
.filter(|v| missing_numbers.contains(v))
|
||||||
|
.collect();
|
||||||
|
if matched.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(matched)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ProwlarrRelease {
|
||||||
|
guid: r.guid,
|
||||||
|
title: r.title,
|
||||||
|
size: r.size,
|
||||||
|
download_url: r.download_url,
|
||||||
|
indexer: r.indexer,
|
||||||
|
seeders: r.seeders,
|
||||||
|
leechers: r.leechers,
|
||||||
|
publish_date: r.publish_date,
|
||||||
|
protocol: r.protocol,
|
||||||
|
info_url: r.info_url,
|
||||||
|
categories: r.categories,
|
||||||
|
matched_missing_volumes: matched,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Handlers ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Search for releases on Prowlarr
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/prowlarr/search",
|
||||||
|
tag = "prowlarr",
|
||||||
|
request_body = ProwlarrSearchRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = ProwlarrSearchResponse),
|
||||||
|
(status = 400, description = "Bad request or Prowlarr not configured"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 500, description = "Prowlarr connection error"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn search_prowlarr(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(body): Json<ProwlarrSearchRequest>,
|
||||||
|
) -> Result<Json<ProwlarrSearchResponse>, ApiError> {
|
||||||
|
let (url, api_key, categories) = load_prowlarr_config(&state.pool).await?;
|
||||||
|
|
||||||
|
let query = if let Some(custom) = &body.custom_query {
|
||||||
|
custom.clone()
|
||||||
|
} else if let Some(vol) = body.volume_number {
|
||||||
|
format!("\"{}\" {}", body.series_name, vol)
|
||||||
|
} else {
|
||||||
|
format!("\"{}\"", body.series_name)
|
||||||
|
};
|
||||||
|
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(30))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||||
|
|
||||||
|
let mut params: Vec<(&str, String)> = vec![
|
||||||
|
("query", query.clone()),
|
||||||
|
("type", "search".to_string()),
|
||||||
|
];
|
||||||
|
for cat in &categories {
|
||||||
|
params.push(("categories", cat.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(format!("{url}/api/v1/search"))
|
||||||
|
.query(¶ms)
|
||||||
|
.header("X-Api-Key", &api_key)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(format!("Prowlarr request failed: {e}")))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(ApiError::internal(format!(
|
||||||
|
"Prowlarr returned {status}: {text}"
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let raw_text = resp
|
||||||
|
.text()
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(format!("Failed to read Prowlarr response: {e}")))?;
|
||||||
|
|
||||||
|
tracing::debug!("Prowlarr raw response length: {} chars", raw_text.len());
|
||||||
|
|
||||||
|
let raw_releases: Vec<ProwlarrRawRelease> = serde_json::from_str(&raw_text)
|
||||||
|
.map_err(|e| {
|
||||||
|
tracing::error!("Failed to parse Prowlarr response: {e}");
|
||||||
|
tracing::error!("Raw response (first 500 chars): {}", &raw_text[..raw_text.len().min(500)]);
|
||||||
|
ApiError::internal(format!("Failed to parse Prowlarr response: {e}"))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let results = if let Some(missing) = &body.missing_volumes {
|
||||||
|
match_missing_volumes(raw_releases, missing)
|
||||||
|
} else {
|
||||||
|
raw_releases
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| ProwlarrRelease {
|
||||||
|
guid: r.guid,
|
||||||
|
title: r.title,
|
||||||
|
size: r.size,
|
||||||
|
download_url: r.download_url,
|
||||||
|
indexer: r.indexer,
|
||||||
|
seeders: r.seeders,
|
||||||
|
leechers: r.leechers,
|
||||||
|
publish_date: r.publish_date,
|
||||||
|
protocol: r.protocol,
|
||||||
|
info_url: r.info_url,
|
||||||
|
categories: r.categories,
|
||||||
|
matched_missing_volumes: None,
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Json(ProwlarrSearchResponse { results, query }))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test connection to Prowlarr
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/prowlarr/test",
|
||||||
|
tag = "prowlarr",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = ProwlarrTestResponse),
|
||||||
|
(status = 400, description = "Prowlarr not configured"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn test_prowlarr(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
) -> Result<Json<ProwlarrTestResponse>, ApiError> {
|
||||||
|
let (url, api_key, _categories) = load_prowlarr_config(&state.pool).await?;
|
||||||
|
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(10))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(format!("{url}/api/v1/indexer"))
|
||||||
|
.header("X-Api-Key", &api_key)
|
||||||
|
.send()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match resp {
|
||||||
|
Ok(r) if r.status().is_success() => {
|
||||||
|
let indexers: Vec<serde_json::Value> = r.json().await.unwrap_or_default();
|
||||||
|
Ok(Json(ProwlarrTestResponse {
|
||||||
|
success: true,
|
||||||
|
message: format!("Connected successfully ({} indexers)", indexers.len()),
|
||||||
|
indexer_count: Some(indexers.len() as i32),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
Ok(r) => {
|
||||||
|
let status = r.status();
|
||||||
|
let text = r.text().await.unwrap_or_default();
|
||||||
|
Ok(Json(ProwlarrTestResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("Prowlarr returned {status}: {text}"),
|
||||||
|
indexer_count: None,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
Err(e) => Ok(Json(ProwlarrTestResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("Connection failed: {e}"),
|
||||||
|
indexer_count: None,
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
}
|
||||||
218
apps/api/src/qbittorrent.rs
Normal file
218
apps/api/src/qbittorrent.rs
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
use axum::{extract::State, Json};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct QBittorrentAddRequest {
|
||||||
|
pub url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct QBittorrentAddResponse {
|
||||||
|
pub success: bool,
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct QBittorrentTestResponse {
|
||||||
|
pub success: bool,
|
||||||
|
pub message: String,
|
||||||
|
pub version: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Config helper ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct QBittorrentConfig {
|
||||||
|
url: String,
|
||||||
|
username: String,
|
||||||
|
password: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn load_qbittorrent_config(
|
||||||
|
pool: &sqlx::PgPool,
|
||||||
|
) -> Result<(String, String, String), ApiError> {
|
||||||
|
let row = sqlx::query("SELECT value FROM app_settings WHERE key = 'qbittorrent'")
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let row = row.ok_or_else(|| ApiError::bad_request("qBittorrent is not configured"))?;
|
||||||
|
let value: serde_json::Value = row.get("value");
|
||||||
|
let config: QBittorrentConfig = serde_json::from_value(value)
|
||||||
|
.map_err(|e| ApiError::internal(format!("invalid qbittorrent config: {e}")))?;
|
||||||
|
|
||||||
|
if config.url.is_empty() || config.username.is_empty() {
|
||||||
|
return Err(ApiError::bad_request(
|
||||||
|
"qBittorrent URL and username must be configured in settings",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let url = config.url.trim_end_matches('/').to_string();
|
||||||
|
Ok((url, config.username, config.password))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Login helper ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async fn qbittorrent_login(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
base_url: &str,
|
||||||
|
username: &str,
|
||||||
|
password: &str,
|
||||||
|
) -> Result<String, ApiError> {
|
||||||
|
let resp = client
|
||||||
|
.post(format!("{base_url}/api/v2/auth/login"))
|
||||||
|
.form(&[("username", username), ("password", password)])
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(format!("qBittorrent login request failed: {e}")))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(ApiError::internal(format!(
|
||||||
|
"qBittorrent login failed ({status}): {text}"
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract SID from Set-Cookie header
|
||||||
|
let cookie_header = resp
|
||||||
|
.headers()
|
||||||
|
.get("set-cookie")
|
||||||
|
.and_then(|v| v.to_str().ok())
|
||||||
|
.unwrap_or("");
|
||||||
|
|
||||||
|
let sid = cookie_header
|
||||||
|
.split(';')
|
||||||
|
.next()
|
||||||
|
.and_then(|s| s.strip_prefix("SID="))
|
||||||
|
.ok_or_else(|| ApiError::internal("Failed to get SID cookie from qBittorrent"))?
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
Ok(sid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Handlers ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Add a torrent to qBittorrent
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/qbittorrent/add",
|
||||||
|
tag = "qbittorrent",
|
||||||
|
request_body = QBittorrentAddRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = QBittorrentAddResponse),
|
||||||
|
(status = 400, description = "Bad request or qBittorrent not configured"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 500, description = "qBittorrent connection error"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn add_torrent(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(body): Json<QBittorrentAddRequest>,
|
||||||
|
) -> Result<Json<QBittorrentAddResponse>, ApiError> {
|
||||||
|
if body.url.is_empty() {
|
||||||
|
return Err(ApiError::bad_request("url is required"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let (base_url, username, password) = load_qbittorrent_config(&state.pool).await?;
|
||||||
|
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(10))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||||
|
|
||||||
|
let sid = qbittorrent_login(&client, &base_url, &username, &password).await?;
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.post(format!("{base_url}/api/v2/torrents/add"))
|
||||||
|
.header("Cookie", format!("SID={sid}"))
|
||||||
|
.form(&[("urls", &body.url)])
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(format!("qBittorrent add request failed: {e}")))?;
|
||||||
|
|
||||||
|
if resp.status().is_success() {
|
||||||
|
Ok(Json(QBittorrentAddResponse {
|
||||||
|
success: true,
|
||||||
|
message: "Torrent added to qBittorrent".to_string(),
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
Ok(Json(QBittorrentAddResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("qBittorrent returned {status}: {text}"),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test connection to qBittorrent
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/qbittorrent/test",
|
||||||
|
tag = "qbittorrent",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = QBittorrentTestResponse),
|
||||||
|
(status = 400, description = "qBittorrent not configured"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn test_qbittorrent(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
) -> Result<Json<QBittorrentTestResponse>, ApiError> {
|
||||||
|
let (base_url, username, password) = load_qbittorrent_config(&state.pool).await?;
|
||||||
|
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(10))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||||
|
|
||||||
|
let sid = match qbittorrent_login(&client, &base_url, &username, &password).await {
|
||||||
|
Ok(sid) => sid,
|
||||||
|
Err(e) => {
|
||||||
|
return Ok(Json(QBittorrentTestResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("Login failed: {}", e.message),
|
||||||
|
version: None,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(format!("{base_url}/api/v2/app/version"))
|
||||||
|
.header("Cookie", format!("SID={sid}"))
|
||||||
|
.send()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match resp {
|
||||||
|
Ok(r) if r.status().is_success() => {
|
||||||
|
let version = r.text().await.unwrap_or_default();
|
||||||
|
Ok(Json(QBittorrentTestResponse {
|
||||||
|
success: true,
|
||||||
|
message: format!("Connected successfully ({})", version.trim()),
|
||||||
|
version: Some(version.trim().to_string()),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
Ok(r) => {
|
||||||
|
let status = r.status();
|
||||||
|
let text = r.text().await.unwrap_or_default();
|
||||||
|
Ok(Json(QBittorrentTestResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("qBittorrent returned {status}: {text}"),
|
||||||
|
version: None,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
Err(e) => Ok(Json(QBittorrentTestResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("Connection failed: {e}"),
|
||||||
|
version: None,
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,11 +1,11 @@
|
|||||||
use axum::{extract::{Path, State}, Json};
|
use axum::{extract::{Extension, Path, State}, Json};
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::Row;
|
use sqlx::Row;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
use crate::{error::ApiError, state::AppState};
|
use crate::{auth::AuthUser, error::ApiError, state::AppState};
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
pub struct ReadingProgressResponse {
|
pub struct ReadingProgressResponse {
|
||||||
@@ -42,8 +42,10 @@ pub struct UpdateReadingProgressRequest {
|
|||||||
)]
|
)]
|
||||||
pub async fn get_reading_progress(
|
pub async fn get_reading_progress(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
|
user: Option<Extension<AuthUser>>,
|
||||||
Path(id): Path<Uuid>,
|
Path(id): Path<Uuid>,
|
||||||
) -> Result<Json<ReadingProgressResponse>, ApiError> {
|
) -> Result<Json<ReadingProgressResponse>, ApiError> {
|
||||||
|
let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0;
|
||||||
// Verify book exists
|
// Verify book exists
|
||||||
let exists: bool = sqlx::query_scalar("SELECT EXISTS(SELECT 1 FROM books WHERE id = $1)")
|
let exists: bool = sqlx::query_scalar("SELECT EXISTS(SELECT 1 FROM books WHERE id = $1)")
|
||||||
.bind(id)
|
.bind(id)
|
||||||
@@ -55,9 +57,10 @@ pub async fn get_reading_progress(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let row = sqlx::query(
|
let row = sqlx::query(
|
||||||
"SELECT status, current_page, last_read_at FROM book_reading_progress WHERE book_id = $1",
|
"SELECT status, current_page, last_read_at FROM book_reading_progress WHERE book_id = $1 AND user_id = $2",
|
||||||
)
|
)
|
||||||
.bind(id)
|
.bind(id)
|
||||||
|
.bind(auth_user.user_id)
|
||||||
.fetch_optional(&state.pool)
|
.fetch_optional(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -96,9 +99,11 @@ pub async fn get_reading_progress(
|
|||||||
)]
|
)]
|
||||||
pub async fn update_reading_progress(
|
pub async fn update_reading_progress(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
|
user: Option<Extension<AuthUser>>,
|
||||||
Path(id): Path<Uuid>,
|
Path(id): Path<Uuid>,
|
||||||
Json(body): Json<UpdateReadingProgressRequest>,
|
Json(body): Json<UpdateReadingProgressRequest>,
|
||||||
) -> Result<Json<ReadingProgressResponse>, ApiError> {
|
) -> Result<Json<ReadingProgressResponse>, ApiError> {
|
||||||
|
let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0;
|
||||||
// Validate status value
|
// Validate status value
|
||||||
if !["unread", "reading", "read"].contains(&body.status.as_str()) {
|
if !["unread", "reading", "read"].contains(&body.status.as_str()) {
|
||||||
return Err(ApiError::bad_request(format!(
|
return Err(ApiError::bad_request(format!(
|
||||||
@@ -143,9 +148,9 @@ pub async fn update_reading_progress(
|
|||||||
|
|
||||||
let row = sqlx::query(
|
let row = sqlx::query(
|
||||||
r#"
|
r#"
|
||||||
INSERT INTO book_reading_progress (book_id, status, current_page, last_read_at, updated_at)
|
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
|
||||||
VALUES ($1, $2, $3, NOW(), NOW())
|
VALUES ($1, $2, $3, $4, NOW(), NOW())
|
||||||
ON CONFLICT (book_id) DO UPDATE
|
ON CONFLICT (book_id, user_id) DO UPDATE
|
||||||
SET status = EXCLUDED.status,
|
SET status = EXCLUDED.status,
|
||||||
current_page = EXCLUDED.current_page,
|
current_page = EXCLUDED.current_page,
|
||||||
last_read_at = NOW(),
|
last_read_at = NOW(),
|
||||||
@@ -154,6 +159,7 @@ pub async fn update_reading_progress(
|
|||||||
"#,
|
"#,
|
||||||
)
|
)
|
||||||
.bind(id)
|
.bind(id)
|
||||||
|
.bind(auth_user.user_id)
|
||||||
.bind(&body.status)
|
.bind(&body.status)
|
||||||
.bind(current_page)
|
.bind(current_page)
|
||||||
.fetch_one(&state.pool)
|
.fetch_one(&state.pool)
|
||||||
@@ -165,3 +171,120 @@ pub async fn update_reading_progress(
|
|||||||
last_read_at: row.get("last_read_at"),
|
last_read_at: row.get("last_read_at"),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct MarkSeriesReadRequest {
|
||||||
|
/// Series name (use "unclassified" for books without series)
|
||||||
|
pub series: String,
|
||||||
|
/// Status to set: "read" or "unread"
|
||||||
|
pub status: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct MarkSeriesReadResponse {
|
||||||
|
pub updated: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mark all books in a series as read or unread
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/series/mark-read",
|
||||||
|
tag = "reading-progress",
|
||||||
|
request_body = MarkSeriesReadRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = MarkSeriesReadResponse),
|
||||||
|
(status = 422, description = "Invalid status"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn mark_series_read(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
user: Option<Extension<AuthUser>>,
|
||||||
|
Json(body): Json<MarkSeriesReadRequest>,
|
||||||
|
) -> Result<Json<MarkSeriesReadResponse>, ApiError> {
|
||||||
|
let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0;
|
||||||
|
if !["read", "unread"].contains(&body.status.as_str()) {
|
||||||
|
return Err(ApiError::bad_request(
|
||||||
|
"status must be 'read' or 'unread'",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let series_filter = if body.series == "unclassified" {
|
||||||
|
"(series IS NULL OR series = '')"
|
||||||
|
} else {
|
||||||
|
"series = $1"
|
||||||
|
};
|
||||||
|
|
||||||
|
let sql = if body.status == "unread" {
|
||||||
|
// Delete progress records to reset to unread (scoped to this user)
|
||||||
|
if body.series == "unclassified" {
|
||||||
|
format!(
|
||||||
|
r#"
|
||||||
|
WITH target_books AS (
|
||||||
|
SELECT id FROM books WHERE {series_filter}
|
||||||
|
)
|
||||||
|
DELETE FROM book_reading_progress
|
||||||
|
WHERE book_id IN (SELECT id FROM target_books) AND user_id = $1
|
||||||
|
"#
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!(
|
||||||
|
r#"
|
||||||
|
WITH target_books AS (
|
||||||
|
SELECT id FROM books WHERE {series_filter}
|
||||||
|
)
|
||||||
|
DELETE FROM book_reading_progress
|
||||||
|
WHERE book_id IN (SELECT id FROM target_books) AND user_id = $2
|
||||||
|
"#
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else if body.series == "unclassified" {
|
||||||
|
format!(
|
||||||
|
r#"
|
||||||
|
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
|
||||||
|
SELECT id, $1, 'read', NULL, NOW(), NOW()
|
||||||
|
FROM books
|
||||||
|
WHERE {series_filter}
|
||||||
|
ON CONFLICT (book_id, user_id) DO UPDATE
|
||||||
|
SET status = 'read',
|
||||||
|
current_page = NULL,
|
||||||
|
last_read_at = NOW(),
|
||||||
|
updated_at = NOW()
|
||||||
|
"#
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!(
|
||||||
|
r#"
|
||||||
|
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
|
||||||
|
SELECT id, $2, 'read', NULL, NOW(), NOW()
|
||||||
|
FROM books
|
||||||
|
WHERE {series_filter}
|
||||||
|
ON CONFLICT (book_id, user_id) DO UPDATE
|
||||||
|
SET status = 'read',
|
||||||
|
current_page = NULL,
|
||||||
|
last_read_at = NOW(),
|
||||||
|
updated_at = NOW()
|
||||||
|
"#
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = if body.series == "unclassified" {
|
||||||
|
// $1 = user_id (no series bind needed)
|
||||||
|
sqlx::query(&sql)
|
||||||
|
.bind(auth_user.user_id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?
|
||||||
|
} else {
|
||||||
|
// $1 = series, $2 = user_id
|
||||||
|
sqlx::query(&sql)
|
||||||
|
.bind(&body.series)
|
||||||
|
.bind(auth_user.user_id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Json(MarkSeriesReadResponse {
|
||||||
|
updated: result.rows_affected() as i64,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|||||||
@@ -39,15 +39,15 @@ pub struct SearchResponse {
|
|||||||
pub processing_time_ms: Option<u64>,
|
pub processing_time_ms: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Search books across all libraries using Meilisearch
|
/// Search books across all libraries
|
||||||
#[utoipa::path(
|
#[utoipa::path(
|
||||||
get,
|
get,
|
||||||
path = "/search",
|
path = "/search",
|
||||||
tag = "books",
|
tag = "search",
|
||||||
params(
|
params(
|
||||||
("q" = String, Query, description = "Search query (books via Meilisearch + series via ILIKE)"),
|
("q" = String, Query, description = "Search query (books + series via PostgreSQL full-text)"),
|
||||||
("library_id" = Option<String>, Query, description = "Filter by library ID"),
|
("library_id" = Option<String>, Query, description = "Filter by library ID"),
|
||||||
("type" = Option<String>, Query, description = "Filter by type (cbz, cbr, pdf)"),
|
("type" = Option<String>, Query, description = "Filter by type (cbz, cbr, pdf, epub)"),
|
||||||
("kind" = Option<String>, Query, description = "Filter by kind (alias for type)"),
|
("kind" = Option<String>, Query, description = "Filter by kind (alias for type)"),
|
||||||
("limit" = Option<usize>, Query, description = "Max results per type (max 100)"),
|
("limit" = Option<usize>, Query, description = "Max results per type (max 100)"),
|
||||||
),
|
),
|
||||||
@@ -65,34 +65,38 @@ pub async fn search_books(
|
|||||||
return Err(ApiError::bad_request("q is required"));
|
return Err(ApiError::bad_request("q is required"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut filters: Vec<String> = Vec::new();
|
let limit_val = query.limit.unwrap_or(20).clamp(1, 100) as i64;
|
||||||
if let Some(library_id) = query.library_id.as_deref() {
|
|
||||||
filters.push(format!("library_id = '{}'", library_id.replace('"', "")));
|
|
||||||
}
|
|
||||||
let kind_filter = query.r#type.as_deref().or(query.kind.as_deref());
|
|
||||||
if let Some(kind) = kind_filter {
|
|
||||||
filters.push(format!("kind = '{}'", kind.replace('"', "")));
|
|
||||||
}
|
|
||||||
|
|
||||||
let body = serde_json::json!({
|
|
||||||
"q": query.q,
|
|
||||||
"limit": query.limit.unwrap_or(20).clamp(1, 100),
|
|
||||||
"filter": if filters.is_empty() { serde_json::Value::Null } else { serde_json::Value::String(filters.join(" AND ")) }
|
|
||||||
});
|
|
||||||
|
|
||||||
let limit_val = query.limit.unwrap_or(20).clamp(1, 100);
|
|
||||||
let q_pattern = format!("%{}%", query.q);
|
let q_pattern = format!("%{}%", query.q);
|
||||||
let library_id_uuid: Option<uuid::Uuid> = query.library_id.as_deref()
|
let library_id_uuid: Option<Uuid> = query.library_id.as_deref()
|
||||||
.and_then(|s| s.parse().ok());
|
.and_then(|s| s.parse().ok());
|
||||||
|
let kind_filter: Option<&str> = query.r#type.as_deref().or(query.kind.as_deref());
|
||||||
|
|
||||||
// Recherche Meilisearch (books) + séries PG en parallèle
|
let start = std::time::Instant::now();
|
||||||
let client = reqwest::Client::new();
|
|
||||||
let url = format!("{}/indexes/books/search", state.meili_url.trim_end_matches('/'));
|
// Book search via PostgreSQL ILIKE on title, authors, series
|
||||||
let meili_fut = client
|
let books_sql = r#"
|
||||||
.post(&url)
|
SELECT b.id, b.library_id, b.kind, b.title,
|
||||||
.header("Authorization", format!("Bearer {}", state.meili_master_key))
|
COALESCE(b.authors, CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END) as authors,
|
||||||
.json(&body)
|
b.series, b.volume, b.language
|
||||||
.send();
|
FROM books b
|
||||||
|
LEFT JOIN series_metadata sm
|
||||||
|
ON sm.library_id = b.library_id
|
||||||
|
AND sm.name = COALESCE(NULLIF(b.series, ''), 'unclassified')
|
||||||
|
WHERE (
|
||||||
|
b.title ILIKE $1
|
||||||
|
OR b.series ILIKE $1
|
||||||
|
OR EXISTS (SELECT 1 FROM unnest(
|
||||||
|
COALESCE(b.authors, CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END)
|
||||||
|
|| COALESCE(sm.authors, ARRAY[]::text[])
|
||||||
|
) AS a WHERE a ILIKE $1)
|
||||||
|
)
|
||||||
|
AND ($2::uuid IS NULL OR b.library_id = $2)
|
||||||
|
AND ($3::text IS NULL OR b.kind = $3)
|
||||||
|
ORDER BY
|
||||||
|
CASE WHEN b.title ILIKE $1 THEN 0 ELSE 1 END,
|
||||||
|
b.title ASC
|
||||||
|
LIMIT $4
|
||||||
|
"#;
|
||||||
|
|
||||||
let series_sql = r#"
|
let series_sql = r#"
|
||||||
WITH sorted_books AS (
|
WITH sorted_books AS (
|
||||||
@@ -108,7 +112,7 @@ pub async fn search_books(
|
|||||||
title ASC
|
title ASC
|
||||||
) as rn
|
) as rn
|
||||||
FROM books
|
FROM books
|
||||||
WHERE ($1::uuid IS NULL OR library_id = $1)
|
WHERE ($2::uuid IS NULL OR library_id = $2)
|
||||||
),
|
),
|
||||||
series_counts AS (
|
series_counts AS (
|
||||||
SELECT
|
SELECT
|
||||||
@@ -123,39 +127,49 @@ pub async fn search_books(
|
|||||||
SELECT sc.library_id, sc.name, sc.book_count, sc.books_read_count, sb.id as first_book_id
|
SELECT sc.library_id, sc.name, sc.book_count, sc.books_read_count, sb.id as first_book_id
|
||||||
FROM series_counts sc
|
FROM series_counts sc
|
||||||
JOIN sorted_books sb ON sb.library_id = sc.library_id AND sb.name = sc.name AND sb.rn = 1
|
JOIN sorted_books sb ON sb.library_id = sc.library_id AND sb.name = sc.name AND sb.rn = 1
|
||||||
WHERE sc.name ILIKE $2
|
WHERE sc.name ILIKE $1
|
||||||
ORDER BY sc.name ASC
|
ORDER BY sc.name ASC
|
||||||
LIMIT $3
|
LIMIT $4
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let series_fut = sqlx::query(series_sql)
|
let (books_rows, series_rows) = tokio::join!(
|
||||||
.bind(library_id_uuid)
|
sqlx::query(books_sql)
|
||||||
.bind(&q_pattern)
|
.bind(&q_pattern)
|
||||||
.bind(limit_val as i64)
|
.bind(library_id_uuid)
|
||||||
.fetch_all(&state.pool);
|
.bind(kind_filter)
|
||||||
|
.bind(limit_val)
|
||||||
|
.fetch_all(&state.pool),
|
||||||
|
sqlx::query(series_sql)
|
||||||
|
.bind(&q_pattern)
|
||||||
|
.bind(library_id_uuid)
|
||||||
|
.bind(kind_filter) // unused in series query but keeps bind positions consistent
|
||||||
|
.bind(limit_val)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
);
|
||||||
|
|
||||||
let (meili_resp, series_rows) = tokio::join!(meili_fut, series_fut);
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
||||||
|
|
||||||
// Traitement Meilisearch
|
// Build book hits as JSON array (same shape as before)
|
||||||
let meili_resp = meili_resp.map_err(|e| ApiError::internal(format!("meili request failed: {e}")))?;
|
let books_rows = books_rows.map_err(|e| ApiError::internal(format!("book search failed: {e}")))?;
|
||||||
let (hits, estimated_total_hits, processing_time_ms) = if !meili_resp.status().is_success() {
|
let hits: Vec<serde_json::Value> = books_rows
|
||||||
let body = meili_resp.text().await.unwrap_or_default();
|
.iter()
|
||||||
if body.contains("index_not_found") {
|
.map(|row| {
|
||||||
(serde_json::json!([]), Some(0u64), Some(0u64))
|
serde_json::json!({
|
||||||
} else {
|
"id": row.get::<Uuid, _>("id").to_string(),
|
||||||
return Err(ApiError::internal(format!("meili error: {body}")));
|
"library_id": row.get::<Uuid, _>("library_id").to_string(),
|
||||||
}
|
"kind": row.get::<String, _>("kind"),
|
||||||
} else {
|
"title": row.get::<String, _>("title"),
|
||||||
let payload: serde_json::Value = meili_resp.json().await
|
"authors": row.get::<Vec<String>, _>("authors"),
|
||||||
.map_err(|e| ApiError::internal(format!("invalid meili response: {e}")))?;
|
"series": row.get::<Option<String>, _>("series"),
|
||||||
(
|
"volume": row.get::<Option<i32>, _>("volume"),
|
||||||
payload.get("hits").cloned().unwrap_or_else(|| serde_json::json!([])),
|
"language": row.get::<Option<String>, _>("language"),
|
||||||
payload.get("estimatedTotalHits").and_then(|v| v.as_u64()),
|
})
|
||||||
payload.get("processingTimeMs").and_then(|v| v.as_u64()),
|
})
|
||||||
)
|
.collect();
|
||||||
};
|
|
||||||
|
|
||||||
// Traitement séries
|
let estimated_total_hits = hits.len() as u64;
|
||||||
|
|
||||||
|
// Series hits
|
||||||
let series_hits: Vec<SeriesHit> = series_rows
|
let series_hits: Vec<SeriesHit> = series_rows
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.iter()
|
.iter()
|
||||||
@@ -169,9 +183,9 @@ pub async fn search_books(
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(Json(SearchResponse {
|
Ok(Json(SearchResponse {
|
||||||
hits,
|
hits: serde_json::Value::Array(hits),
|
||||||
series_hits,
|
series_hits,
|
||||||
estimated_total_hits,
|
estimated_total_hits: Some(estimated_total_hits),
|
||||||
processing_time_ms,
|
processing_time_ms: Some(elapsed_ms),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|||||||
1043
apps/api/src/series.rs
Normal file
1043
apps/api/src/series.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,12 @@
|
|||||||
use axum::{
|
use axum::{
|
||||||
extract::State,
|
extract::{Path as AxumPath, State},
|
||||||
routing::{get, post},
|
routing::{delete, get, post},
|
||||||
Json, Router,
|
Json, Router,
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use sqlx::Row;
|
use sqlx::Row;
|
||||||
|
use uuid::Uuid;
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
use crate::{error::ApiError, state::{AppState, load_dynamic_settings}};
|
use crate::{error::ApiError, state::{AppState, load_dynamic_settings}};
|
||||||
@@ -42,6 +43,14 @@ pub fn settings_routes() -> Router<AppState> {
|
|||||||
.route("/settings/cache/clear", post(clear_cache))
|
.route("/settings/cache/clear", post(clear_cache))
|
||||||
.route("/settings/cache/stats", get(get_cache_stats))
|
.route("/settings/cache/stats", get(get_cache_stats))
|
||||||
.route("/settings/thumbnail/stats", get(get_thumbnail_stats))
|
.route("/settings/thumbnail/stats", get(get_thumbnail_stats))
|
||||||
|
.route(
|
||||||
|
"/settings/status-mappings",
|
||||||
|
get(list_status_mappings).post(upsert_status_mapping),
|
||||||
|
)
|
||||||
|
.route(
|
||||||
|
"/settings/status-mappings/:id",
|
||||||
|
delete(delete_status_mapping),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List all settings
|
/// List all settings
|
||||||
@@ -324,3 +333,125 @@ pub async fn get_thumbnail_stats(State(_state): State<AppState>) -> Result<Json<
|
|||||||
|
|
||||||
Ok(Json(stats))
|
Ok(Json(stats))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Status Mappings
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
|
||||||
|
pub struct StatusMappingDto {
|
||||||
|
pub id: String,
|
||||||
|
pub provider_status: String,
|
||||||
|
pub mapped_status: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, ToSchema)]
|
||||||
|
pub struct UpsertStatusMappingRequest {
|
||||||
|
pub provider_status: String,
|
||||||
|
pub mapped_status: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List all status mappings
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/settings/status-mappings",
|
||||||
|
tag = "settings",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = Vec<StatusMappingDto>),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn list_status_mappings(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
) -> Result<Json<Vec<StatusMappingDto>>, ApiError> {
|
||||||
|
let rows = sqlx::query(
|
||||||
|
"SELECT id, provider_status, mapped_status FROM status_mappings ORDER BY mapped_status NULLS LAST, provider_status",
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mappings = rows
|
||||||
|
.iter()
|
||||||
|
.map(|row| StatusMappingDto {
|
||||||
|
id: row.get::<Uuid, _>("id").to_string(),
|
||||||
|
provider_status: row.get("provider_status"),
|
||||||
|
mapped_status: row.get::<Option<String>, _>("mapped_status"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(Json(mappings))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create or update a status mapping
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/settings/status-mappings",
|
||||||
|
tag = "settings",
|
||||||
|
request_body = UpsertStatusMappingRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = StatusMappingDto),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn upsert_status_mapping(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(body): Json<UpsertStatusMappingRequest>,
|
||||||
|
) -> Result<Json<StatusMappingDto>, ApiError> {
|
||||||
|
let provider_status = body.provider_status.to_lowercase();
|
||||||
|
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO status_mappings (provider_status, mapped_status)
|
||||||
|
VALUES ($1, $2)
|
||||||
|
ON CONFLICT (provider_status)
|
||||||
|
DO UPDATE SET mapped_status = $2, updated_at = NOW()
|
||||||
|
RETURNING id, provider_status, mapped_status
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(&provider_status)
|
||||||
|
.bind(&body.mapped_status)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(Json(StatusMappingDto {
|
||||||
|
id: row.get::<Uuid, _>("id").to_string(),
|
||||||
|
provider_status: row.get("provider_status"),
|
||||||
|
mapped_status: row.get::<Option<String>, _>("mapped_status"),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Unmap a status mapping (sets mapped_status to NULL, keeps the provider status known)
|
||||||
|
#[utoipa::path(
|
||||||
|
delete,
|
||||||
|
path = "/settings/status-mappings/{id}",
|
||||||
|
tag = "settings",
|
||||||
|
params(("id" = String, Path, description = "Mapping UUID")),
|
||||||
|
responses(
|
||||||
|
(status = 200, body = StatusMappingDto),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 404, description = "Not found"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn delete_status_mapping(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
AxumPath(id): AxumPath<Uuid>,
|
||||||
|
) -> Result<Json<StatusMappingDto>, ApiError> {
|
||||||
|
let row = sqlx::query(
|
||||||
|
"UPDATE status_mappings SET mapped_status = NULL, updated_at = NOW() WHERE id = $1 RETURNING id, provider_status, mapped_status",
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
match row {
|
||||||
|
Some(row) => Ok(Json(StatusMappingDto {
|
||||||
|
id: row.get::<Uuid, _>("id").to_string(),
|
||||||
|
provider_status: row.get("provider_status"),
|
||||||
|
mapped_status: row.get::<Option<String>, _>("mapped_status"),
|
||||||
|
})),
|
||||||
|
None => Err(ApiError::not_found("status mapping not found")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -12,8 +12,6 @@ use tokio::sync::{Mutex, RwLock, Semaphore};
|
|||||||
pub struct AppState {
|
pub struct AppState {
|
||||||
pub pool: sqlx::PgPool,
|
pub pool: sqlx::PgPool,
|
||||||
pub bootstrap_token: Arc<str>,
|
pub bootstrap_token: Arc<str>,
|
||||||
pub meili_url: Arc<str>,
|
|
||||||
pub meili_master_key: Arc<str>,
|
|
||||||
pub page_cache: Arc<Mutex<LruCache<String, Arc<Vec<u8>>>>>,
|
pub page_cache: Arc<Mutex<LruCache<String, Arc<Vec<u8>>>>>,
|
||||||
pub page_render_limit: Arc<Semaphore>,
|
pub page_render_limit: Arc<Semaphore>,
|
||||||
pub metrics: Arc<Metrics>,
|
pub metrics: Arc<Metrics>,
|
||||||
@@ -39,7 +37,7 @@ impl Default for DynamicSettings {
|
|||||||
timeout_seconds: 12,
|
timeout_seconds: 12,
|
||||||
image_format: "webp".to_string(),
|
image_format: "webp".to_string(),
|
||||||
image_quality: 85,
|
image_quality: 85,
|
||||||
image_filter: "lanczos3".to_string(),
|
image_filter: "triangle".to_string(),
|
||||||
image_max_width: 2160,
|
image_max_width: 2160,
|
||||||
cache_directory: std::env::var("IMAGE_CACHE_DIR")
|
cache_directory: std::env::var("IMAGE_CACHE_DIR")
|
||||||
.unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string()),
|
.unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string()),
|
||||||
|
|||||||
819
apps/api/src/stats.rs
Normal file
819
apps/api/src/stats.rs
Normal file
@@ -0,0 +1,819 @@
|
|||||||
|
use axum::{
|
||||||
|
extract::{Extension, Query, State},
|
||||||
|
Json,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use utoipa::{IntoParams, ToSchema};
|
||||||
|
|
||||||
|
use crate::{auth::AuthUser, error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Deserialize, IntoParams)]
|
||||||
|
pub struct StatsQuery {
|
||||||
|
/// Granularity: "day", "week" or "month" (default: "month")
|
||||||
|
pub period: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct StatsOverview {
|
||||||
|
pub total_books: i64,
|
||||||
|
pub total_series: i64,
|
||||||
|
pub total_libraries: i64,
|
||||||
|
pub total_pages: i64,
|
||||||
|
pub total_size_bytes: i64,
|
||||||
|
pub total_authors: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct ReadingStatusStats {
|
||||||
|
pub unread: i64,
|
||||||
|
pub reading: i64,
|
||||||
|
pub read: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct FormatCount {
|
||||||
|
pub format: String,
|
||||||
|
pub count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct LanguageCount {
|
||||||
|
pub language: Option<String>,
|
||||||
|
pub count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct LibraryStats {
|
||||||
|
pub library_name: String,
|
||||||
|
pub book_count: i64,
|
||||||
|
pub size_bytes: i64,
|
||||||
|
pub read_count: i64,
|
||||||
|
pub reading_count: i64,
|
||||||
|
pub unread_count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct TopSeries {
|
||||||
|
pub series: String,
|
||||||
|
pub book_count: i64,
|
||||||
|
pub read_count: i64,
|
||||||
|
pub total_pages: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct MonthlyAdditions {
|
||||||
|
pub month: String,
|
||||||
|
pub books_added: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct MetadataStats {
|
||||||
|
pub total_series: i64,
|
||||||
|
pub series_linked: i64,
|
||||||
|
pub series_unlinked: i64,
|
||||||
|
pub books_with_summary: i64,
|
||||||
|
pub books_with_isbn: i64,
|
||||||
|
pub by_provider: Vec<ProviderCount>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct ProviderCount {
|
||||||
|
pub provider: String,
|
||||||
|
pub count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct CurrentlyReadingItem {
|
||||||
|
pub book_id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub series: Option<String>,
|
||||||
|
pub current_page: i32,
|
||||||
|
pub page_count: i32,
|
||||||
|
pub username: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct RecentlyReadItem {
|
||||||
|
pub book_id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub series: Option<String>,
|
||||||
|
pub last_read_at: String,
|
||||||
|
pub username: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct MonthlyReading {
|
||||||
|
pub month: String,
|
||||||
|
pub books_read: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct UserMonthlyReading {
|
||||||
|
pub month: String,
|
||||||
|
pub username: String,
|
||||||
|
pub books_read: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct JobTimePoint {
|
||||||
|
pub label: String,
|
||||||
|
pub scan: i64,
|
||||||
|
pub rebuild: i64,
|
||||||
|
pub thumbnail: i64,
|
||||||
|
pub other: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct StatsResponse {
|
||||||
|
pub overview: StatsOverview,
|
||||||
|
pub reading_status: ReadingStatusStats,
|
||||||
|
pub currently_reading: Vec<CurrentlyReadingItem>,
|
||||||
|
pub recently_read: Vec<RecentlyReadItem>,
|
||||||
|
pub reading_over_time: Vec<MonthlyReading>,
|
||||||
|
pub by_format: Vec<FormatCount>,
|
||||||
|
pub by_language: Vec<LanguageCount>,
|
||||||
|
pub by_library: Vec<LibraryStats>,
|
||||||
|
pub top_series: Vec<TopSeries>,
|
||||||
|
pub additions_over_time: Vec<MonthlyAdditions>,
|
||||||
|
pub jobs_over_time: Vec<JobTimePoint>,
|
||||||
|
pub metadata: MetadataStats,
|
||||||
|
pub users_reading_over_time: Vec<UserMonthlyReading>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get collection statistics for the dashboard
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/stats",
|
||||||
|
tag = "stats",
|
||||||
|
params(StatsQuery),
|
||||||
|
responses(
|
||||||
|
(status = 200, body = StatsResponse),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_stats(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Query(query): Query<StatsQuery>,
|
||||||
|
user: Option<Extension<AuthUser>>,
|
||||||
|
) -> Result<Json<StatsResponse>, ApiError> {
|
||||||
|
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
|
||||||
|
let period = query.period.as_deref().unwrap_or("month");
|
||||||
|
// Overview + reading status in one query
|
||||||
|
let overview_row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
COUNT(*) AS total_books,
|
||||||
|
COUNT(DISTINCT NULLIF(series, '')) AS total_series,
|
||||||
|
COUNT(DISTINCT library_id) AS total_libraries,
|
||||||
|
COALESCE(SUM(page_count), 0)::BIGINT AS total_pages,
|
||||||
|
(SELECT COUNT(DISTINCT a) FROM (
|
||||||
|
SELECT DISTINCT UNNEST(authors) AS a FROM books WHERE authors != '{}'
|
||||||
|
UNION
|
||||||
|
SELECT DISTINCT author FROM books WHERE author IS NOT NULL AND author != ''
|
||||||
|
) sub) AS total_authors,
|
||||||
|
COUNT(*) FILTER (WHERE COALESCE(brp.status, 'unread') = 'unread') AS unread,
|
||||||
|
COUNT(*) FILTER (WHERE brp.status = 'reading') AS reading,
|
||||||
|
COUNT(*) FILTER (WHERE brp.status = 'read') AS read
|
||||||
|
FROM books b
|
||||||
|
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Total size from book_files
|
||||||
|
let size_row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT COALESCE(SUM(bf.size_bytes), 0)::BIGINT AS total_size_bytes
|
||||||
|
FROM (
|
||||||
|
SELECT DISTINCT ON (book_id) size_bytes
|
||||||
|
FROM book_files
|
||||||
|
ORDER BY book_id, updated_at DESC
|
||||||
|
) bf
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let overview = StatsOverview {
|
||||||
|
total_books: overview_row.get("total_books"),
|
||||||
|
total_series: overview_row.get("total_series"),
|
||||||
|
total_libraries: overview_row.get("total_libraries"),
|
||||||
|
total_pages: overview_row.get("total_pages"),
|
||||||
|
total_size_bytes: size_row.get("total_size_bytes"),
|
||||||
|
total_authors: overview_row.get("total_authors"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let reading_status = ReadingStatusStats {
|
||||||
|
unread: overview_row.get("unread"),
|
||||||
|
reading: overview_row.get("reading"),
|
||||||
|
read: overview_row.get("read"),
|
||||||
|
};
|
||||||
|
|
||||||
|
// By format
|
||||||
|
let format_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT COALESCE(bf.format, b.kind) AS fmt, COUNT(*) AS count
|
||||||
|
FROM books b
|
||||||
|
LEFT JOIN LATERAL (
|
||||||
|
SELECT format FROM book_files WHERE book_id = b.id ORDER BY updated_at DESC LIMIT 1
|
||||||
|
) bf ON TRUE
|
||||||
|
GROUP BY fmt
|
||||||
|
ORDER BY count DESC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let by_format: Vec<FormatCount> = format_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| FormatCount {
|
||||||
|
format: r.get::<Option<String>, _>("fmt").unwrap_or_else(|| "unknown".to_string()),
|
||||||
|
count: r.get("count"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// By language
|
||||||
|
let lang_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT language, COUNT(*) AS count
|
||||||
|
FROM books
|
||||||
|
GROUP BY language
|
||||||
|
ORDER BY count DESC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let by_language: Vec<LanguageCount> = lang_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| LanguageCount {
|
||||||
|
language: r.get("language"),
|
||||||
|
count: r.get("count"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// By library
|
||||||
|
let lib_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
l.name AS library_name,
|
||||||
|
COUNT(b.id) AS book_count,
|
||||||
|
COALESCE(SUM(bf.size_bytes), 0)::BIGINT AS size_bytes,
|
||||||
|
COUNT(*) FILTER (WHERE brp.status = 'read') AS read_count,
|
||||||
|
COUNT(*) FILTER (WHERE brp.status = 'reading') AS reading_count,
|
||||||
|
COUNT(*) FILTER (WHERE COALESCE(brp.status, 'unread') = 'unread') AS unread_count
|
||||||
|
FROM libraries l
|
||||||
|
LEFT JOIN books b ON b.library_id = l.id
|
||||||
|
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
LEFT JOIN LATERAL (
|
||||||
|
SELECT size_bytes FROM book_files WHERE book_id = b.id ORDER BY updated_at DESC LIMIT 1
|
||||||
|
) bf ON TRUE
|
||||||
|
GROUP BY l.id, l.name
|
||||||
|
ORDER BY book_count DESC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let by_library: Vec<LibraryStats> = lib_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| LibraryStats {
|
||||||
|
library_name: r.get("library_name"),
|
||||||
|
book_count: r.get("book_count"),
|
||||||
|
size_bytes: r.get("size_bytes"),
|
||||||
|
read_count: r.get("read_count"),
|
||||||
|
reading_count: r.get("reading_count"),
|
||||||
|
unread_count: r.get("unread_count"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Top series (by book count)
|
||||||
|
let series_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
b.series,
|
||||||
|
COUNT(*) AS book_count,
|
||||||
|
COUNT(*) FILTER (WHERE brp.status = 'read') AS read_count,
|
||||||
|
COALESCE(SUM(b.page_count), 0)::BIGINT AS total_pages
|
||||||
|
FROM books b
|
||||||
|
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
WHERE b.series IS NOT NULL AND b.series != ''
|
||||||
|
GROUP BY b.series
|
||||||
|
ORDER BY book_count DESC
|
||||||
|
LIMIT 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let top_series: Vec<TopSeries> = series_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| TopSeries {
|
||||||
|
series: r.get("series"),
|
||||||
|
book_count: r.get("book_count"),
|
||||||
|
read_count: r.get("read_count"),
|
||||||
|
total_pages: r.get("total_pages"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Additions over time (with gap filling)
|
||||||
|
let additions_rows = match period {
|
||||||
|
"day" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
COALESCE(cnt.books_added, 0) AS books_added
|
||||||
|
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT created_at::date AS dt, COUNT(*) AS books_added
|
||||||
|
FROM books
|
||||||
|
WHERE created_at >= CURRENT_DATE - INTERVAL '6 days'
|
||||||
|
GROUP BY created_at::date
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
"week" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
COALESCE(cnt.books_added, 0) AS books_added
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
|
||||||
|
DATE_TRUNC('week', NOW()),
|
||||||
|
'1 week'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('week', created_at) AS dt, COUNT(*) AS books_added
|
||||||
|
FROM books
|
||||||
|
WHERE created_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
|
||||||
|
GROUP BY DATE_TRUNC('week', created_at)
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM') AS month,
|
||||||
|
COALESCE(cnt.books_added, 0) AS books_added
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
|
||||||
|
DATE_TRUNC('month', NOW()),
|
||||||
|
'1 month'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('month', created_at) AS dt, COUNT(*) AS books_added
|
||||||
|
FROM books
|
||||||
|
WHERE created_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
||||||
|
GROUP BY DATE_TRUNC('month', created_at)
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let additions_over_time: Vec<MonthlyAdditions> = additions_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| MonthlyAdditions {
|
||||||
|
month: r.get("month"),
|
||||||
|
books_added: r.get("books_added"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Metadata stats
|
||||||
|
let meta_row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
(SELECT COUNT(DISTINCT NULLIF(series, '')) FROM books) AS total_series,
|
||||||
|
(SELECT COUNT(DISTINCT series_name) FROM external_metadata_links WHERE status = 'approved') AS series_linked,
|
||||||
|
(SELECT COUNT(*) FROM books WHERE summary IS NOT NULL AND summary != '') AS books_with_summary,
|
||||||
|
(SELECT COUNT(*) FROM books WHERE isbn IS NOT NULL AND isbn != '') AS books_with_isbn
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let meta_total_series: i64 = meta_row.get("total_series");
|
||||||
|
let meta_series_linked: i64 = meta_row.get("series_linked");
|
||||||
|
|
||||||
|
let provider_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT provider, COUNT(DISTINCT series_name) AS count
|
||||||
|
FROM external_metadata_links
|
||||||
|
WHERE status = 'approved'
|
||||||
|
GROUP BY provider
|
||||||
|
ORDER BY count DESC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let by_provider: Vec<ProviderCount> = provider_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| ProviderCount {
|
||||||
|
provider: r.get("provider"),
|
||||||
|
count: r.get("count"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let metadata = MetadataStats {
|
||||||
|
total_series: meta_total_series,
|
||||||
|
series_linked: meta_series_linked,
|
||||||
|
series_unlinked: meta_total_series - meta_series_linked,
|
||||||
|
books_with_summary: meta_row.get("books_with_summary"),
|
||||||
|
books_with_isbn: meta_row.get("books_with_isbn"),
|
||||||
|
by_provider,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Currently reading books
|
||||||
|
let reading_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT b.id AS book_id, b.title, b.series, brp.current_page, b.page_count, u.username
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
JOIN books b ON b.id = brp.book_id
|
||||||
|
LEFT JOIN users u ON u.id = brp.user_id
|
||||||
|
WHERE brp.status = 'reading' AND brp.current_page IS NOT NULL
|
||||||
|
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
ORDER BY brp.updated_at DESC
|
||||||
|
LIMIT 20
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let currently_reading: Vec<CurrentlyReadingItem> = reading_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| {
|
||||||
|
let id: uuid::Uuid = r.get("book_id");
|
||||||
|
CurrentlyReadingItem {
|
||||||
|
book_id: id.to_string(),
|
||||||
|
title: r.get("title"),
|
||||||
|
series: r.get("series"),
|
||||||
|
current_page: r.get::<Option<i32>, _>("current_page").unwrap_or(0),
|
||||||
|
page_count: r.get::<Option<i32>, _>("page_count").unwrap_or(0),
|
||||||
|
username: r.get("username"),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Recently read books
|
||||||
|
let recent_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT b.id AS book_id, b.title, b.series,
|
||||||
|
TO_CHAR(brp.last_read_at, 'YYYY-MM-DD') AS last_read_at,
|
||||||
|
u.username
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
JOIN books b ON b.id = brp.book_id
|
||||||
|
LEFT JOIN users u ON u.id = brp.user_id
|
||||||
|
WHERE brp.status = 'read' AND brp.last_read_at IS NOT NULL
|
||||||
|
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
ORDER BY brp.last_read_at DESC
|
||||||
|
LIMIT 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let recently_read: Vec<RecentlyReadItem> = recent_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| {
|
||||||
|
let id: uuid::Uuid = r.get("book_id");
|
||||||
|
RecentlyReadItem {
|
||||||
|
book_id: id.to_string(),
|
||||||
|
title: r.get("title"),
|
||||||
|
series: r.get("series"),
|
||||||
|
last_read_at: r.get::<Option<String>, _>("last_read_at").unwrap_or_default(),
|
||||||
|
username: r.get("username"),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Reading activity over time (with gap filling)
|
||||||
|
let reading_time_rows = match period {
|
||||||
|
"day" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT brp.last_read_at::date AS dt, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= CURRENT_DATE - INTERVAL '6 days'
|
||||||
|
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
GROUP BY brp.last_read_at::date
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
"week" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
|
||||||
|
DATE_TRUNC('week', NOW()),
|
||||||
|
'1 week'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('week', brp.last_read_at) AS dt, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
|
||||||
|
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
GROUP BY DATE_TRUNC('week', brp.last_read_at)
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM') AS month,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
|
||||||
|
DATE_TRUNC('month', NOW()),
|
||||||
|
'1 month'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('month', brp.last_read_at) AS dt, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
||||||
|
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
GROUP BY DATE_TRUNC('month', brp.last_read_at)
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let reading_over_time: Vec<MonthlyReading> = reading_time_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| MonthlyReading {
|
||||||
|
month: r.get::<Option<String>, _>("month").unwrap_or_default(),
|
||||||
|
books_read: r.get("books_read"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Per-user reading over time (admin view — always all users, no user_id filter)
|
||||||
|
let users_reading_time_rows = match period {
|
||||||
|
"day" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
u.username,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
|
||||||
|
CROSS JOIN users u
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT brp.last_read_at::date AS dt, brp.user_id, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= CURRENT_DATE - INTERVAL '6 days'
|
||||||
|
GROUP BY brp.last_read_at::date, brp.user_id
|
||||||
|
) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id
|
||||||
|
ORDER BY month ASC, u.username
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
"week" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
u.username,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
|
||||||
|
DATE_TRUNC('week', NOW()),
|
||||||
|
'1 week'
|
||||||
|
) AS d(dt)
|
||||||
|
CROSS JOIN users u
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('week', brp.last_read_at) AS dt, brp.user_id, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
|
||||||
|
GROUP BY DATE_TRUNC('week', brp.last_read_at), brp.user_id
|
||||||
|
) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id
|
||||||
|
ORDER BY month ASC, u.username
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM') AS month,
|
||||||
|
u.username,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
|
||||||
|
DATE_TRUNC('month', NOW()),
|
||||||
|
'1 month'
|
||||||
|
) AS d(dt)
|
||||||
|
CROSS JOIN users u
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('month', brp.last_read_at) AS dt, brp.user_id, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
||||||
|
GROUP BY DATE_TRUNC('month', brp.last_read_at), brp.user_id
|
||||||
|
) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id
|
||||||
|
ORDER BY month ASC, u.username
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let users_reading_over_time: Vec<UserMonthlyReading> = users_reading_time_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| UserMonthlyReading {
|
||||||
|
month: r.get::<Option<String>, _>("month").unwrap_or_default(),
|
||||||
|
username: r.get("username"),
|
||||||
|
books_read: r.get("books_read"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Jobs over time (with gap filling, grouped by type category)
|
||||||
|
let jobs_rows = match period {
|
||||||
|
"day" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS label,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'scan'), 0)::BIGINT AS scan,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'rebuild'), 0)::BIGINT AS rebuild,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'thumbnail'), 0)::BIGINT AS thumbnail,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'other'), 0)::BIGINT AS other
|
||||||
|
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT
|
||||||
|
finished_at::date AS dt,
|
||||||
|
CASE
|
||||||
|
WHEN type = 'scan' THEN 'scan'
|
||||||
|
WHEN type IN ('rebuild', 'full_rebuild', 'rescan') THEN 'rebuild'
|
||||||
|
WHEN type IN ('thumbnail_rebuild', 'thumbnail_regenerate') THEN 'thumbnail'
|
||||||
|
ELSE 'other'
|
||||||
|
END AS cat,
|
||||||
|
COUNT(*) AS c
|
||||||
|
FROM index_jobs
|
||||||
|
WHERE status IN ('success', 'failed')
|
||||||
|
AND finished_at >= CURRENT_DATE - INTERVAL '6 days'
|
||||||
|
GROUP BY finished_at::date, cat
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
GROUP BY d.dt
|
||||||
|
ORDER BY label ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
"week" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS label,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'scan'), 0)::BIGINT AS scan,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'rebuild'), 0)::BIGINT AS rebuild,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'thumbnail'), 0)::BIGINT AS thumbnail,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'other'), 0)::BIGINT AS other
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
|
||||||
|
DATE_TRUNC('week', NOW()),
|
||||||
|
'1 week'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT
|
||||||
|
DATE_TRUNC('week', finished_at) AS dt,
|
||||||
|
CASE
|
||||||
|
WHEN type = 'scan' THEN 'scan'
|
||||||
|
WHEN type IN ('rebuild', 'full_rebuild', 'rescan') THEN 'rebuild'
|
||||||
|
WHEN type IN ('thumbnail_rebuild', 'thumbnail_regenerate') THEN 'thumbnail'
|
||||||
|
ELSE 'other'
|
||||||
|
END AS cat,
|
||||||
|
COUNT(*) AS c
|
||||||
|
FROM index_jobs
|
||||||
|
WHERE status IN ('success', 'failed')
|
||||||
|
AND finished_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
|
||||||
|
GROUP BY DATE_TRUNC('week', finished_at), cat
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
GROUP BY d.dt
|
||||||
|
ORDER BY label ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM') AS label,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'scan'), 0)::BIGINT AS scan,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'rebuild'), 0)::BIGINT AS rebuild,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'thumbnail'), 0)::BIGINT AS thumbnail,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'other'), 0)::BIGINT AS other
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
|
||||||
|
DATE_TRUNC('month', NOW()),
|
||||||
|
'1 month'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT
|
||||||
|
DATE_TRUNC('month', finished_at) AS dt,
|
||||||
|
CASE
|
||||||
|
WHEN type = 'scan' THEN 'scan'
|
||||||
|
WHEN type IN ('rebuild', 'full_rebuild', 'rescan') THEN 'rebuild'
|
||||||
|
WHEN type IN ('thumbnail_rebuild', 'thumbnail_regenerate') THEN 'thumbnail'
|
||||||
|
ELSE 'other'
|
||||||
|
END AS cat,
|
||||||
|
COUNT(*) AS c
|
||||||
|
FROM index_jobs
|
||||||
|
WHERE status IN ('success', 'failed')
|
||||||
|
AND finished_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
||||||
|
GROUP BY DATE_TRUNC('month', finished_at), cat
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
GROUP BY d.dt
|
||||||
|
ORDER BY label ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let jobs_over_time: Vec<JobTimePoint> = jobs_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| JobTimePoint {
|
||||||
|
label: r.get("label"),
|
||||||
|
scan: r.get("scan"),
|
||||||
|
rebuild: r.get("rebuild"),
|
||||||
|
thumbnail: r.get("thumbnail"),
|
||||||
|
other: r.get("other"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(Json(StatsResponse {
|
||||||
|
overview,
|
||||||
|
reading_status,
|
||||||
|
currently_reading,
|
||||||
|
recently_read,
|
||||||
|
reading_over_time,
|
||||||
|
by_format,
|
||||||
|
by_language,
|
||||||
|
by_library,
|
||||||
|
top_series,
|
||||||
|
additions_over_time,
|
||||||
|
jobs_over_time,
|
||||||
|
metadata,
|
||||||
|
users_reading_over_time,
|
||||||
|
}))
|
||||||
|
}
|
||||||
46
apps/api/src/telegram.rs
Normal file
46
apps/api/src/telegram.rs
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
use axum::{extract::State, Json};
|
||||||
|
use serde::Serialize;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct TelegramTestResponse {
|
||||||
|
pub success: bool,
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test Telegram connection by sending a test message
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/telegram/test",
|
||||||
|
tag = "notifications",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = TelegramTestResponse),
|
||||||
|
(status = 400, description = "Telegram not configured"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn test_telegram(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
) -> Result<Json<TelegramTestResponse>, ApiError> {
|
||||||
|
let config = notifications::load_telegram_config(&state.pool)
|
||||||
|
.await
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::bad_request(
|
||||||
|
"Telegram is not configured or disabled. Set bot_token, chat_id, and enable it.",
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
match notifications::send_test_message(&config).await {
|
||||||
|
Ok(()) => Ok(Json(TelegramTestResponse {
|
||||||
|
success: true,
|
||||||
|
message: "Test message sent successfully".to_string(),
|
||||||
|
})),
|
||||||
|
Err(e) => Ok(Json(TelegramTestResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("Failed to send: {e}"),
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -16,6 +16,8 @@ pub struct CreateTokenRequest {
|
|||||||
pub name: String,
|
pub name: String,
|
||||||
#[schema(value_type = Option<String>, example = "read")]
|
#[schema(value_type = Option<String>, example = "read")]
|
||||||
pub scope: Option<String>,
|
pub scope: Option<String>,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub user_id: Option<Uuid>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
@@ -26,6 +28,9 @@ pub struct TokenResponse {
|
|||||||
pub scope: String,
|
pub scope: String,
|
||||||
pub prefix: String,
|
pub prefix: String,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub user_id: Option<Uuid>,
|
||||||
|
pub username: Option<String>,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
pub last_used_at: Option<DateTime<Utc>>,
|
pub last_used_at: Option<DateTime<Utc>>,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
pub revoked_at: Option<DateTime<Utc>>,
|
pub revoked_at: Option<DateTime<Utc>>,
|
||||||
@@ -71,6 +76,10 @@ pub async fn create_token(
|
|||||||
_ => return Err(ApiError::bad_request("scope must be 'admin' or 'read'")),
|
_ => return Err(ApiError::bad_request("scope must be 'admin' or 'read'")),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if scope == "read" && input.user_id.is_none() {
|
||||||
|
return Err(ApiError::bad_request("user_id is required for read-scoped tokens"));
|
||||||
|
}
|
||||||
|
|
||||||
let mut random = [0u8; 24];
|
let mut random = [0u8; 24];
|
||||||
OsRng.fill_bytes(&mut random);
|
OsRng.fill_bytes(&mut random);
|
||||||
let secret = URL_SAFE_NO_PAD.encode(random);
|
let secret = URL_SAFE_NO_PAD.encode(random);
|
||||||
@@ -85,13 +94,14 @@ pub async fn create_token(
|
|||||||
|
|
||||||
let id = Uuid::new_v4();
|
let id = Uuid::new_v4();
|
||||||
sqlx::query(
|
sqlx::query(
|
||||||
"INSERT INTO api_tokens (id, name, prefix, token_hash, scope) VALUES ($1, $2, $3, $4, $5)",
|
"INSERT INTO api_tokens (id, name, prefix, token_hash, scope, user_id) VALUES ($1, $2, $3, $4, $5, $6)",
|
||||||
)
|
)
|
||||||
.bind(id)
|
.bind(id)
|
||||||
.bind(input.name.trim())
|
.bind(input.name.trim())
|
||||||
.bind(&prefix)
|
.bind(&prefix)
|
||||||
.bind(token_hash)
|
.bind(token_hash)
|
||||||
.bind(scope)
|
.bind(scope)
|
||||||
|
.bind(input.user_id)
|
||||||
.execute(&state.pool)
|
.execute(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -118,7 +128,13 @@ pub async fn create_token(
|
|||||||
)]
|
)]
|
||||||
pub async fn list_tokens(State(state): State<AppState>) -> Result<Json<Vec<TokenResponse>>, ApiError> {
|
pub async fn list_tokens(State(state): State<AppState>) -> Result<Json<Vec<TokenResponse>>, ApiError> {
|
||||||
let rows = sqlx::query(
|
let rows = sqlx::query(
|
||||||
"SELECT id, name, scope, prefix, last_used_at, revoked_at, created_at FROM api_tokens ORDER BY created_at DESC",
|
r#"
|
||||||
|
SELECT t.id, t.name, t.scope, t.prefix, t.user_id, u.username,
|
||||||
|
t.last_used_at, t.revoked_at, t.created_at
|
||||||
|
FROM api_tokens t
|
||||||
|
LEFT JOIN users u ON u.id = t.user_id
|
||||||
|
ORDER BY t.created_at DESC
|
||||||
|
"#,
|
||||||
)
|
)
|
||||||
.fetch_all(&state.pool)
|
.fetch_all(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
@@ -130,6 +146,8 @@ pub async fn list_tokens(State(state): State<AppState>) -> Result<Json<Vec<Token
|
|||||||
name: row.get("name"),
|
name: row.get("name"),
|
||||||
scope: row.get("scope"),
|
scope: row.get("scope"),
|
||||||
prefix: row.get("prefix"),
|
prefix: row.get("prefix"),
|
||||||
|
user_id: row.get("user_id"),
|
||||||
|
username: row.get("username"),
|
||||||
last_used_at: row.get("last_used_at"),
|
last_used_at: row.get("last_used_at"),
|
||||||
revoked_at: row.get("revoked_at"),
|
revoked_at: row.get("revoked_at"),
|
||||||
created_at: row.get("created_at"),
|
created_at: row.get("created_at"),
|
||||||
@@ -170,3 +188,76 @@ pub async fn revoke_token(
|
|||||||
|
|
||||||
Ok(Json(serde_json::json!({"revoked": true, "id": id})))
|
Ok(Json(serde_json::json!({"revoked": true, "id": id})))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct UpdateTokenRequest {
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub user_id: Option<Uuid>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update a token's assigned user
|
||||||
|
#[utoipa::path(
|
||||||
|
patch,
|
||||||
|
path = "/admin/tokens/{id}",
|
||||||
|
tag = "tokens",
|
||||||
|
params(
|
||||||
|
("id" = String, Path, description = "Token UUID"),
|
||||||
|
),
|
||||||
|
request_body = UpdateTokenRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Token updated"),
|
||||||
|
(status = 404, description = "Token not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn update_token(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
Json(input): Json<UpdateTokenRequest>,
|
||||||
|
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||||
|
let result = sqlx::query("UPDATE api_tokens SET user_id = $1 WHERE id = $2")
|
||||||
|
.bind(input.user_id)
|
||||||
|
.bind(id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if result.rows_affected() == 0 {
|
||||||
|
return Err(ApiError::not_found("token not found"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({"updated": true, "id": id})))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Permanently delete a revoked API token
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/admin/tokens/{id}/delete",
|
||||||
|
tag = "tokens",
|
||||||
|
params(
|
||||||
|
("id" = String, Path, description = "Token UUID"),
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Token permanently deleted"),
|
||||||
|
(status = 404, description = "Token not found or not revoked"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn delete_token(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||||
|
let result = sqlx::query("DELETE FROM api_tokens WHERE id = $1 AND revoked_at IS NOT NULL")
|
||||||
|
.bind(id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if result.rows_affected() == 0 {
|
||||||
|
return Err(ApiError::not_found("token not found or not revoked"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({"deleted": true, "id": id})))
|
||||||
|
}
|
||||||
|
|||||||
195
apps/api/src/users.rs
Normal file
195
apps/api/src/users.rs
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
use axum::{extract::{Path, State}, Json};
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use uuid::Uuid;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct UserResponse {
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub id: Uuid,
|
||||||
|
pub username: String,
|
||||||
|
pub token_count: i64,
|
||||||
|
pub books_read: i64,
|
||||||
|
pub books_reading: i64,
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub created_at: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct CreateUserRequest {
|
||||||
|
pub username: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List all reader users with their associated token count
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/admin/users",
|
||||||
|
tag = "users",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = Vec<UserResponse>),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn list_users(State(state): State<AppState>) -> Result<Json<Vec<UserResponse>>, ApiError> {
|
||||||
|
let rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT u.id, u.username, u.created_at,
|
||||||
|
COUNT(DISTINCT t.id) AS token_count,
|
||||||
|
COUNT(DISTINCT brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read,
|
||||||
|
COUNT(DISTINCT brp.book_id) FILTER (WHERE brp.status = 'reading') AS books_reading
|
||||||
|
FROM users u
|
||||||
|
LEFT JOIN api_tokens t ON t.user_id = u.id AND t.revoked_at IS NULL
|
||||||
|
LEFT JOIN book_reading_progress brp ON brp.user_id = u.id
|
||||||
|
GROUP BY u.id, u.username, u.created_at
|
||||||
|
ORDER BY u.created_at DESC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let items = rows
|
||||||
|
.into_iter()
|
||||||
|
.map(|row| UserResponse {
|
||||||
|
id: row.get("id"),
|
||||||
|
username: row.get("username"),
|
||||||
|
token_count: row.get("token_count"),
|
||||||
|
books_read: row.get("books_read"),
|
||||||
|
books_reading: row.get("books_reading"),
|
||||||
|
created_at: row.get("created_at"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(Json(items))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new reader user
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/admin/users",
|
||||||
|
tag = "users",
|
||||||
|
request_body = CreateUserRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = UserResponse, description = "User created"),
|
||||||
|
(status = 400, description = "Invalid input"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn create_user(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(input): Json<CreateUserRequest>,
|
||||||
|
) -> Result<Json<UserResponse>, ApiError> {
|
||||||
|
if input.username.trim().is_empty() {
|
||||||
|
return Err(ApiError::bad_request("username is required"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let id = Uuid::new_v4();
|
||||||
|
let row = sqlx::query(
|
||||||
|
"INSERT INTO users (id, username) VALUES ($1, $2) RETURNING id, username, created_at",
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.bind(input.username.trim())
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
if let sqlx::Error::Database(ref db_err) = e {
|
||||||
|
if db_err.constraint() == Some("users_username_key") {
|
||||||
|
return ApiError::bad_request("username already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ApiError::from(e)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Json(UserResponse {
|
||||||
|
id: row.get("id"),
|
||||||
|
username: row.get("username"),
|
||||||
|
token_count: 0,
|
||||||
|
books_read: 0,
|
||||||
|
books_reading: 0,
|
||||||
|
created_at: row.get("created_at"),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update a reader user's username
|
||||||
|
#[utoipa::path(
|
||||||
|
patch,
|
||||||
|
path = "/admin/users/{id}",
|
||||||
|
tag = "users",
|
||||||
|
request_body = CreateUserRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = UserResponse, description = "User updated"),
|
||||||
|
(status = 400, description = "Invalid input"),
|
||||||
|
(status = 404, description = "User not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn update_user(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
Json(input): Json<CreateUserRequest>,
|
||||||
|
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||||
|
if input.username.trim().is_empty() {
|
||||||
|
return Err(ApiError::bad_request("username is required"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = sqlx::query("UPDATE users SET username = $1 WHERE id = $2")
|
||||||
|
.bind(input.username.trim())
|
||||||
|
.bind(id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
if let sqlx::Error::Database(ref db_err) = e {
|
||||||
|
if db_err.constraint() == Some("users_username_key") {
|
||||||
|
return ApiError::bad_request("username already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ApiError::from(e)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if result.rows_affected() == 0 {
|
||||||
|
return Err(ApiError::not_found("user not found"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({"updated": true, "id": id})))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete a reader user (cascades on tokens and reading progress)
|
||||||
|
#[utoipa::path(
|
||||||
|
delete,
|
||||||
|
path = "/admin/users/{id}",
|
||||||
|
tag = "users",
|
||||||
|
params(
|
||||||
|
("id" = String, Path, description = "User UUID"),
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "User deleted"),
|
||||||
|
(status = 404, description = "User not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn delete_user(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||||
|
let result = sqlx::query("DELETE FROM users WHERE id = $1")
|
||||||
|
.bind(id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if result.rows_affected() == 0 {
|
||||||
|
return Err(ApiError::not_found("user not found"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({"deleted": true, "id": id})))
|
||||||
|
}
|
||||||
135
apps/backoffice/app/(app)/authors/[name]/page.tsx
Normal file
135
apps/backoffice/app/(app)/authors/[name]/page.tsx
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
import { fetchBooks, fetchAllSeries, BooksPageDto, SeriesPageDto, getBookCoverUrl } from "@/lib/api";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import { BooksGrid } from "@/app/components/BookCard";
|
||||||
|
import { OffsetPagination } from "@/app/components/ui";
|
||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function AuthorDetailPage({
|
||||||
|
params,
|
||||||
|
searchParams,
|
||||||
|
}: {
|
||||||
|
params: Promise<{ name: string }>;
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const { name: encodedName } = await params;
|
||||||
|
const authorName = decodeURIComponent(encodedName);
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||||
|
|
||||||
|
// Fetch books by this author (server-side filtering via API) and series by this author
|
||||||
|
const [booksPage, seriesPage] = await Promise.all([
|
||||||
|
fetchBooks(undefined, undefined, page, limit, undefined, undefined, authorName).catch(
|
||||||
|
() => ({ items: [], total: 0, page: 1, limit }) as BooksPageDto
|
||||||
|
),
|
||||||
|
fetchAllSeries(undefined, undefined, undefined, 1, 200, undefined, undefined, undefined, undefined, authorName).catch(
|
||||||
|
() => ({ items: [], total: 0, page: 1, limit: 200 }) as SeriesPageDto
|
||||||
|
),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const totalPages = Math.ceil(booksPage.total / limit);
|
||||||
|
|
||||||
|
const authorSeries = seriesPage.items;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{/* Breadcrumb */}
|
||||||
|
<nav className="flex items-center gap-2 text-sm text-muted-foreground mb-6">
|
||||||
|
<Link href="/authors" className="hover:text-foreground transition-colors">
|
||||||
|
{t("authors.title")}
|
||||||
|
</Link>
|
||||||
|
<span>/</span>
|
||||||
|
<span className="text-foreground font-medium">{authorName}</span>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
{/* Author Header */}
|
||||||
|
<div className="flex items-center gap-4 mb-8">
|
||||||
|
<div className="w-16 h-16 rounded-full bg-accent/50 flex items-center justify-center flex-shrink-0">
|
||||||
|
<span className="text-2xl font-bold text-accent-foreground">
|
||||||
|
{authorName.charAt(0).toUpperCase()}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<h1 className="text-3xl font-bold text-foreground">{authorName}</h1>
|
||||||
|
<div className="flex items-center gap-4 mt-1">
|
||||||
|
<span className="text-sm text-muted-foreground">
|
||||||
|
{t("authors.bookCount", { count: String(booksPage.total), plural: booksPage.total !== 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
{authorSeries.length > 0 && (
|
||||||
|
<span className="text-sm text-muted-foreground">
|
||||||
|
{t("authors.seriesCount", { count: String(authorSeries.length), plural: authorSeries.length !== 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Series Section */}
|
||||||
|
{authorSeries.length > 0 && (
|
||||||
|
<section className="mb-8">
|
||||||
|
<h2 className="text-xl font-semibold text-foreground mb-4">
|
||||||
|
{t("authors.seriesBy", { name: authorName })}
|
||||||
|
</h2>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 xl:grid-cols-6 gap-4">
|
||||||
|
{authorSeries.map((s) => (
|
||||||
|
<Link
|
||||||
|
key={`${s.library_id}-${s.name}`}
|
||||||
|
href={`/libraries/${s.library_id}/series/${encodeURIComponent(s.name)}`}
|
||||||
|
className="group"
|
||||||
|
>
|
||||||
|
<div className="bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md hover:-translate-y-1 transition-all duration-200">
|
||||||
|
<div className="aspect-[2/3] relative bg-muted/50">
|
||||||
|
<Image
|
||||||
|
src={getBookCoverUrl(s.first_book_id)}
|
||||||
|
alt={s.name}
|
||||||
|
fill
|
||||||
|
className="object-cover"
|
||||||
|
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="p-3">
|
||||||
|
<h3 className="font-medium text-foreground truncate text-sm" title={s.name}>
|
||||||
|
{s.name}
|
||||||
|
</h3>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1">
|
||||||
|
{t("authors.bookCount", { count: String(s.book_count), plural: s.book_count !== 1 ? "s" : "" })}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Books Section */}
|
||||||
|
{booksPage.items.length > 0 && (
|
||||||
|
<section>
|
||||||
|
<h2 className="text-xl font-semibold text-foreground mb-4">
|
||||||
|
{t("authors.booksBy", { name: authorName })}
|
||||||
|
</h2>
|
||||||
|
<BooksGrid books={booksPage.items} />
|
||||||
|
<OffsetPagination
|
||||||
|
currentPage={page}
|
||||||
|
totalPages={totalPages}
|
||||||
|
pageSize={limit}
|
||||||
|
totalItems={booksPage.total}
|
||||||
|
/>
|
||||||
|
</section>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Empty State */}
|
||||||
|
{booksPage.items.length === 0 && authorSeries.length === 0 && (
|
||||||
|
<div className="flex flex-col items-center justify-center py-16 text-center">
|
||||||
|
<p className="text-muted-foreground text-lg">
|
||||||
|
{t("authors.noResults")}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
122
apps/backoffice/app/(app)/authors/page.tsx
Normal file
122
apps/backoffice/app/(app)/authors/page.tsx
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
import { fetchAuthors, AuthorsPageDto } from "@/lib/api";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import { LiveSearchForm } from "@/app/components/LiveSearchForm";
|
||||||
|
import { Card, CardContent, OffsetPagination } from "@/app/components/ui";
|
||||||
|
import Link from "next/link";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function AuthorsPage({
|
||||||
|
searchParams,
|
||||||
|
}: {
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const searchQuery = typeof searchParamsAwaited.q === "string" ? searchParamsAwaited.q : "";
|
||||||
|
const sort = typeof searchParamsAwaited.sort === "string" ? searchParamsAwaited.sort : undefined;
|
||||||
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||||
|
|
||||||
|
const authorsPage = await fetchAuthors(
|
||||||
|
searchQuery || undefined,
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
sort,
|
||||||
|
).catch(() => ({ items: [], total: 0, page: 1, limit }) as AuthorsPageDto);
|
||||||
|
|
||||||
|
const totalPages = Math.ceil(authorsPage.total / limit);
|
||||||
|
const hasFilters = searchQuery || sort;
|
||||||
|
|
||||||
|
const sortOptions = [
|
||||||
|
{ value: "", label: t("authors.sortName") },
|
||||||
|
{ value: "books", label: t("authors.sortBooks") },
|
||||||
|
];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="mb-6">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||||
|
<svg className="w-8 h-8 text-violet-500" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z" />
|
||||||
|
</svg>
|
||||||
|
{t("authors.title")}
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<LiveSearchForm
|
||||||
|
basePath="/authors"
|
||||||
|
fields={[
|
||||||
|
{ name: "q", type: "text", label: t("common.search"), placeholder: t("authors.searchPlaceholder") },
|
||||||
|
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Results count */}
|
||||||
|
<p className="text-sm text-muted-foreground mb-4">
|
||||||
|
{authorsPage.total} {t("authors.title").toLowerCase()}
|
||||||
|
{searchQuery && <> {t("authors.matchingQuery")} "{searchQuery}"</>}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
{/* Authors List */}
|
||||||
|
{authorsPage.items.length > 0 ? (
|
||||||
|
<>
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4 gap-4">
|
||||||
|
{authorsPage.items.map((author) => (
|
||||||
|
<Link
|
||||||
|
key={author.name}
|
||||||
|
href={`/authors/${encodeURIComponent(author.name)}`}
|
||||||
|
className="group"
|
||||||
|
>
|
||||||
|
<div className="bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md hover:-translate-y-1 transition-all duration-200 p-4">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<div className="w-10 h-10 rounded-full bg-accent/50 flex items-center justify-center flex-shrink-0">
|
||||||
|
<span className="text-lg font-semibold text-violet-500">
|
||||||
|
{author.name.charAt(0).toUpperCase()}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="min-w-0">
|
||||||
|
<h3 className="font-medium text-foreground truncate text-sm group-hover:text-violet-500 transition-colors" title={author.name}>
|
||||||
|
{author.name}
|
||||||
|
</h3>
|
||||||
|
<div className="flex items-center gap-3 mt-0.5">
|
||||||
|
<span className="text-xs text-muted-foreground">
|
||||||
|
{t("authors.bookCount", { count: String(author.book_count), plural: author.book_count !== 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
<span className="text-xs text-muted-foreground">
|
||||||
|
{t("authors.seriesCount", { count: String(author.series_count), plural: author.series_count !== 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<OffsetPagination
|
||||||
|
currentPage={page}
|
||||||
|
totalPages={totalPages}
|
||||||
|
pageSize={limit}
|
||||||
|
totalItems={authorsPage.total}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<div className="flex flex-col items-center justify-center py-16 text-center">
|
||||||
|
<div className="w-16 h-16 mb-4 text-muted-foreground/30">
|
||||||
|
<svg fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={1.5} d="M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z" />
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<p className="text-muted-foreground text-lg">
|
||||||
|
{hasFilters ? t("authors.noResults") : t("authors.noAuthors")}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
242
apps/backoffice/app/(app)/books/[id]/page.tsx
Normal file
242
apps/backoffice/app/(app)/books/[id]/page.tsx
Normal file
@@ -0,0 +1,242 @@
|
|||||||
|
import { fetchLibraries, getBookCoverUrl, BookDto, apiFetch, ReadingStatus } from "@/lib/api";
|
||||||
|
import { BookPreview } from "@/app/components/BookPreview";
|
||||||
|
import { ConvertButton } from "@/app/components/ConvertButton";
|
||||||
|
import { MarkBookReadButton } from "@/app/components/MarkBookReadButton";
|
||||||
|
import nextDynamic from "next/dynamic";
|
||||||
|
import { SafeHtml } from "@/app/components/SafeHtml";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
|
||||||
|
const EditBookForm = nextDynamic(
|
||||||
|
() => import("@/app/components/EditBookForm").then(m => m.EditBookForm)
|
||||||
|
);
|
||||||
|
import { notFound } from "next/navigation";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
const readingStatusClassNames: Record<ReadingStatus, string> = {
|
||||||
|
unread: "bg-muted/60 text-muted-foreground border border-border",
|
||||||
|
reading: "bg-amber-500/15 text-amber-600 dark:text-amber-400 border border-amber-500/30",
|
||||||
|
read: "bg-green-500/15 text-green-600 dark:text-green-400 border border-green-500/30",
|
||||||
|
};
|
||||||
|
|
||||||
|
async function fetchBook(bookId: string): Promise<BookDto | null> {
|
||||||
|
try {
|
||||||
|
return await apiFetch<BookDto>(`/books/${bookId}`);
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function BookDetailPage({
|
||||||
|
params
|
||||||
|
}: {
|
||||||
|
params: Promise<{ id: string }>;
|
||||||
|
}) {
|
||||||
|
const { id } = await params;
|
||||||
|
const [book, libraries] = await Promise.all([
|
||||||
|
fetchBook(id),
|
||||||
|
fetchLibraries().catch(() => [] as { id: string; name: string }[])
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!book) {
|
||||||
|
notFound();
|
||||||
|
}
|
||||||
|
|
||||||
|
const { t, locale } = await getServerTranslations();
|
||||||
|
|
||||||
|
const library = libraries.find(l => l.id === book.library_id);
|
||||||
|
const formatBadge = (book.format ?? book.kind).toUpperCase();
|
||||||
|
const formatColor =
|
||||||
|
formatBadge === "CBZ" ? "bg-success/10 text-success border-success/30" :
|
||||||
|
formatBadge === "CBR" ? "bg-warning/10 text-warning border-warning/30" :
|
||||||
|
formatBadge === "PDF" ? "bg-destructive/10 text-destructive border-destructive/30" :
|
||||||
|
"bg-muted/50 text-muted-foreground border-border";
|
||||||
|
const statusLabel = t(`status.${book.reading_status}` as "status.unread" | "status.reading" | "status.read");
|
||||||
|
const statusClassName = readingStatusClassNames[book.reading_status];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Breadcrumb */}
|
||||||
|
<div className="flex items-center gap-2 text-sm">
|
||||||
|
<Link href="/libraries" className="text-muted-foreground hover:text-primary transition-colors">
|
||||||
|
{t("bookDetail.libraries")}
|
||||||
|
</Link>
|
||||||
|
<span className="text-muted-foreground">/</span>
|
||||||
|
{library && (
|
||||||
|
<>
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${book.library_id}/series`}
|
||||||
|
className="text-muted-foreground hover:text-primary transition-colors"
|
||||||
|
>
|
||||||
|
{library.name}
|
||||||
|
</Link>
|
||||||
|
<span className="text-muted-foreground">/</span>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
{book.series && (
|
||||||
|
<>
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${book.library_id}/series/${encodeURIComponent(book.series)}`}
|
||||||
|
className="text-muted-foreground hover:text-primary transition-colors"
|
||||||
|
>
|
||||||
|
{book.series}
|
||||||
|
</Link>
|
||||||
|
<span className="text-muted-foreground">/</span>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
<span className="text-foreground font-medium truncate">{book.title}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Hero */}
|
||||||
|
<div className="flex flex-col sm:flex-row gap-6">
|
||||||
|
{/* Cover */}
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<div className="w-48 aspect-[2/3] relative rounded-xl overflow-hidden shadow-card border border-border">
|
||||||
|
<Image
|
||||||
|
src={getBookCoverUrl(book.id)}
|
||||||
|
alt={t("bookDetail.coverOf", { title: book.title })}
|
||||||
|
fill
|
||||||
|
className="object-cover"
|
||||||
|
sizes="192px"
|
||||||
|
loading="lazy"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Info */}
|
||||||
|
<div className="flex-1 space-y-4">
|
||||||
|
<div className="flex items-start justify-between gap-4">
|
||||||
|
<div>
|
||||||
|
<h1 className="text-3xl font-bold text-foreground">{book.title}</h1>
|
||||||
|
{book.author && (
|
||||||
|
<p className="text-base text-muted-foreground mt-1">{book.author}</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<EditBookForm book={book} />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Series + Volume link */}
|
||||||
|
{book.series && (
|
||||||
|
<div className="flex items-center gap-2 text-sm">
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${book.library_id}/series/${encodeURIComponent(book.series)}`}
|
||||||
|
className="text-primary hover:text-primary/80 transition-colors font-medium"
|
||||||
|
>
|
||||||
|
{book.series}
|
||||||
|
</Link>
|
||||||
|
{book.volume != null && (
|
||||||
|
<span className="px-2 py-0.5 bg-primary/10 text-primary rounded-md text-xs font-semibold">
|
||||||
|
Vol. {book.volume}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Reading status + actions */}
|
||||||
|
<div className="flex flex-wrap items-center gap-3">
|
||||||
|
<span className={`inline-flex items-center px-2.5 py-1 rounded-full text-xs font-semibold ${statusClassName}`}>
|
||||||
|
{statusLabel}
|
||||||
|
{book.reading_status === "reading" && book.reading_current_page != null && ` · p. ${book.reading_current_page}`}
|
||||||
|
</span>
|
||||||
|
{book.reading_last_read_at && (
|
||||||
|
<span className="text-xs text-muted-foreground">
|
||||||
|
{new Date(book.reading_last_read_at).toLocaleDateString(locale)}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
<MarkBookReadButton bookId={book.id} currentStatus={book.reading_status} />
|
||||||
|
{book.file_format === "cbr" && <ConvertButton bookId={book.id} />}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Metadata pills */}
|
||||||
|
<div className="flex flex-wrap items-center gap-2">
|
||||||
|
<span className={`inline-flex px-2.5 py-1 rounded-full text-xs font-semibold border ${formatColor}`}>
|
||||||
|
{formatBadge}
|
||||||
|
</span>
|
||||||
|
{book.page_count && (
|
||||||
|
<span className="inline-flex px-2.5 py-1 rounded-full text-xs font-medium bg-muted/50 text-muted-foreground border border-border">
|
||||||
|
{book.page_count} {t("dashboard.pages").toLowerCase()}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{book.language && (
|
||||||
|
<span className="inline-flex px-2.5 py-1 rounded-full text-xs font-medium bg-muted/50 text-muted-foreground border border-border">
|
||||||
|
{book.language.toUpperCase()}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{book.isbn && (
|
||||||
|
<span className="inline-flex px-2.5 py-1 rounded-full text-xs font-mono font-medium bg-muted/50 text-muted-foreground border border-border">
|
||||||
|
ISBN {book.isbn}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{book.publish_date && (
|
||||||
|
<span className="inline-flex px-2.5 py-1 rounded-full text-xs font-medium bg-muted/50 text-muted-foreground border border-border">
|
||||||
|
{book.publish_date}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Description */}
|
||||||
|
{book.summary && (
|
||||||
|
<SafeHtml html={book.summary} className="text-sm text-muted-foreground leading-relaxed" />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Technical info (collapsible) */}
|
||||||
|
<details className="group">
|
||||||
|
<summary className="cursor-pointer text-xs text-muted-foreground hover:text-foreground transition-colors select-none flex items-center gap-1.5">
|
||||||
|
<svg className="w-3.5 h-3.5 transition-transform group-open:rotate-90" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 5l7 7-7 7" />
|
||||||
|
</svg>
|
||||||
|
{t("bookDetail.technicalInfo")}
|
||||||
|
</summary>
|
||||||
|
<div className="mt-3 p-4 rounded-lg bg-muted/30 border border-border/50 space-y-2 text-xs">
|
||||||
|
{book.file_path && (
|
||||||
|
<div className="flex flex-col gap-0.5">
|
||||||
|
<span className="text-muted-foreground">{t("bookDetail.file")}</span>
|
||||||
|
<code className="font-mono text-foreground break-all">{book.file_path}</code>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{book.file_format && (
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<span className="text-muted-foreground">{t("bookDetail.fileFormat")}</span>
|
||||||
|
<span className="text-foreground">{book.file_format.toUpperCase()}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{book.file_parse_status && (
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<span className="text-muted-foreground">{t("bookDetail.parsing")}</span>
|
||||||
|
<span className={`inline-flex px-2 py-0.5 rounded-full text-xs font-medium ${
|
||||||
|
book.file_parse_status === "success" ? "bg-success/10 text-success" :
|
||||||
|
book.file_parse_status === "failed" ? "bg-destructive/10 text-destructive" :
|
||||||
|
"bg-muted/50 text-muted-foreground"
|
||||||
|
}`}>
|
||||||
|
{book.file_parse_status}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<span className="text-muted-foreground">Book ID</span>
|
||||||
|
<code className="font-mono text-foreground">{book.id}</code>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<span className="text-muted-foreground">Library ID</span>
|
||||||
|
<code className="font-mono text-foreground">{book.library_id}</code>
|
||||||
|
</div>
|
||||||
|
{book.updated_at && (
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<span className="text-muted-foreground">{t("bookDetail.updatedAt")}</span>
|
||||||
|
<span className="text-foreground">{new Date(book.updated_at).toLocaleString(locale)}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</details>
|
||||||
|
|
||||||
|
{/* Book Preview */}
|
||||||
|
{book.page_count && book.page_count > 0 && (
|
||||||
|
<BookPreview bookId={book.id} pageCount={book.page_count} />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
import { fetchBooks, searchBooks, fetchLibraries, BookDto, LibraryDto, SeriesHitDto, getBookCoverUrl } from "../../lib/api";
|
import { fetchBooks, searchBooks, fetchLibraries, BookDto, LibraryDto, SeriesHitDto, getBookCoverUrl } from "@/lib/api";
|
||||||
import { BooksGrid, EmptyState } from "../components/BookCard";
|
import { BooksGrid, EmptyState } from "@/app/components/BookCard";
|
||||||
import { Card, CardContent, Button, FormField, FormInput, FormSelect, FormRow, OffsetPagination } from "../components/ui";
|
import { LiveSearchForm } from "@/app/components/LiveSearchForm";
|
||||||
|
import { Card, CardContent, OffsetPagination } from "@/app/components/ui";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import Image from "next/image";
|
import Image from "next/image";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
export const dynamic = "force-dynamic";
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
@@ -11,9 +13,14 @@ export default async function BooksPage({
|
|||||||
}: {
|
}: {
|
||||||
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
}) {
|
}) {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
const searchParamsAwaited = await searchParams;
|
const searchParamsAwaited = await searchParams;
|
||||||
const libraryId = typeof searchParamsAwaited.library === "string" ? searchParamsAwaited.library : undefined;
|
const libraryId = typeof searchParamsAwaited.library === "string" ? searchParamsAwaited.library : undefined;
|
||||||
const searchQuery = typeof searchParamsAwaited.q === "string" ? searchParamsAwaited.q : "";
|
const searchQuery = typeof searchParamsAwaited.q === "string" ? searchParamsAwaited.q : "";
|
||||||
|
const readingStatus = typeof searchParamsAwaited.status === "string" ? searchParamsAwaited.status : undefined;
|
||||||
|
const format = typeof searchParamsAwaited.format === "string" ? searchParamsAwaited.format : undefined;
|
||||||
|
const metadataProvider = typeof searchParamsAwaited.metadata === "string" ? searchParamsAwaited.metadata : undefined;
|
||||||
|
const sort = typeof searchParamsAwaited.sort === "string" ? searchParamsAwaited.sort : undefined;
|
||||||
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||||
|
|
||||||
@@ -36,11 +43,13 @@ export default async function BooksPage({
|
|||||||
library_id: hit.library_id,
|
library_id: hit.library_id,
|
||||||
kind: hit.kind,
|
kind: hit.kind,
|
||||||
title: hit.title,
|
title: hit.title,
|
||||||
author: hit.author,
|
author: hit.authors?.[0] ?? null,
|
||||||
|
authors: hit.authors ?? [],
|
||||||
series: hit.series,
|
series: hit.series,
|
||||||
volume: hit.volume,
|
volume: hit.volume,
|
||||||
language: hit.language,
|
language: hit.language,
|
||||||
page_count: null,
|
page_count: null,
|
||||||
|
format: null,
|
||||||
file_path: null,
|
file_path: null,
|
||||||
file_format: null,
|
file_format: null,
|
||||||
file_parse_status: null,
|
file_parse_status: null,
|
||||||
@@ -48,11 +57,14 @@ export default async function BooksPage({
|
|||||||
reading_status: "unread" as const,
|
reading_status: "unread" as const,
|
||||||
reading_current_page: null,
|
reading_current_page: null,
|
||||||
reading_last_read_at: null,
|
reading_last_read_at: null,
|
||||||
|
summary: null,
|
||||||
|
isbn: null,
|
||||||
|
publish_date: null,
|
||||||
}));
|
}));
|
||||||
totalHits = searchResponse.estimated_total_hits;
|
totalHits = searchResponse.estimated_total_hits;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const booksPage = await fetchBooks(libraryId, undefined, page, limit).catch(() => ({
|
const booksPage = await fetchBooks(libraryId, undefined, page, limit, readingStatus, sort, undefined, format, metadataProvider).catch(() => ({
|
||||||
items: [] as BookDto[],
|
items: [] as BookDto[],
|
||||||
total: 0,
|
total: 0,
|
||||||
page: 1,
|
page: 1,
|
||||||
@@ -69,6 +81,39 @@ export default async function BooksPage({
|
|||||||
|
|
||||||
const totalPages = Math.ceil(total / limit);
|
const totalPages = Math.ceil(total / limit);
|
||||||
|
|
||||||
|
const libraryOptions = [
|
||||||
|
{ value: "", label: t("books.allLibraries") },
|
||||||
|
...libraries.map((lib) => ({ value: lib.id, label: lib.name })),
|
||||||
|
];
|
||||||
|
|
||||||
|
const statusOptions = [
|
||||||
|
{ value: "", label: t("common.all") },
|
||||||
|
{ value: "unread", label: t("status.unread") },
|
||||||
|
{ value: "reading", label: t("status.reading") },
|
||||||
|
{ value: "read", label: t("status.read") },
|
||||||
|
];
|
||||||
|
|
||||||
|
const formatOptions = [
|
||||||
|
{ value: "", label: t("books.allFormats") },
|
||||||
|
{ value: "cbz", label: "CBZ" },
|
||||||
|
{ value: "cbr", label: "CBR" },
|
||||||
|
{ value: "pdf", label: "PDF" },
|
||||||
|
{ value: "epub", label: "EPUB" },
|
||||||
|
];
|
||||||
|
|
||||||
|
const metadataOptions = [
|
||||||
|
{ value: "", label: t("series.metadataAll") },
|
||||||
|
{ value: "linked", label: t("series.metadataLinked") },
|
||||||
|
{ value: "unlinked", label: t("series.metadataUnlinked") },
|
||||||
|
];
|
||||||
|
|
||||||
|
const sortOptions = [
|
||||||
|
{ value: "", label: t("books.sortTitle") },
|
||||||
|
{ value: "latest", label: t("books.sortLatest") },
|
||||||
|
];
|
||||||
|
|
||||||
|
const hasFilters = searchQuery || libraryId || readingStatus || format || metadataProvider || sort;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="mb-6">
|
<div className="mb-6">
|
||||||
@@ -76,99 +121,64 @@ export default async function BooksPage({
|
|||||||
<svg className="w-8 h-8 text-success" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
<svg className="w-8 h-8 text-success" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253" />
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253" />
|
||||||
</svg>
|
</svg>
|
||||||
Books
|
{t("books.title")}
|
||||||
</h1>
|
</h1>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Search Bar - Style compact et propre */}
|
|
||||||
<Card className="mb-6">
|
<Card className="mb-6">
|
||||||
<CardContent className="pt-6">
|
<CardContent className="pt-6">
|
||||||
<form className="flex flex-col sm:flex-row gap-3 items-start sm:items-end">
|
<LiveSearchForm
|
||||||
<FormField className="flex-1 w-full">
|
basePath="/books"
|
||||||
<label className="block text-sm font-medium text-foreground mb-1.5">Search</label>
|
fields={[
|
||||||
<FormInput
|
{ name: "q", type: "text", label: t("common.search"), placeholder: t("books.searchPlaceholder") },
|
||||||
name="q"
|
{ name: "library", type: "select", label: t("books.library"), options: libraryOptions },
|
||||||
placeholder="Search by title, author, series..."
|
{ name: "status", type: "select", label: t("books.status"), options: statusOptions },
|
||||||
defaultValue={searchQuery}
|
{ name: "format", type: "select", label: t("books.format"), options: formatOptions },
|
||||||
className="w-full"
|
{ name: "metadata", type: "select", label: t("series.metadata"), options: metadataOptions },
|
||||||
|
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions },
|
||||||
|
]}
|
||||||
/>
|
/>
|
||||||
</FormField>
|
|
||||||
<FormField className="w-full sm:w-48">
|
|
||||||
<label className="block text-sm font-medium text-foreground mb-1.5">Library</label>
|
|
||||||
<FormSelect name="library" defaultValue={libraryId || ""}>
|
|
||||||
<option value="">All libraries</option>
|
|
||||||
{libraries.map((lib) => (
|
|
||||||
<option key={lib.id} value={lib.id}>
|
|
||||||
{lib.name}
|
|
||||||
</option>
|
|
||||||
))}
|
|
||||||
</FormSelect>
|
|
||||||
</FormField>
|
|
||||||
<div className="flex gap-2 w-full sm:w-auto">
|
|
||||||
<Button type="submit" className="flex-1 sm:flex-none">
|
|
||||||
<svg className="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
|
||||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
|
|
||||||
</svg>
|
|
||||||
Search
|
|
||||||
</Button>
|
|
||||||
{searchQuery && (
|
|
||||||
<Link
|
|
||||||
href="/books"
|
|
||||||
className="
|
|
||||||
inline-flex items-center justify-center
|
|
||||||
h-10 px-4
|
|
||||||
border border-input
|
|
||||||
text-sm font-medium
|
|
||||||
text-muted-foreground
|
|
||||||
bg-background
|
|
||||||
rounded-md
|
|
||||||
hover:bg-accent hover:text-accent-foreground
|
|
||||||
transition-colors duration-200
|
|
||||||
flex-1 sm:flex-none
|
|
||||||
"
|
|
||||||
>
|
|
||||||
Clear
|
|
||||||
</Link>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</form>
|
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
||||||
{/* Résultats */}
|
{/* Résultats */}
|
||||||
{searchQuery && totalHits !== null && (
|
{searchQuery && totalHits !== null ? (
|
||||||
<p className="text-sm text-muted-foreground mb-4">
|
<p className="text-sm text-muted-foreground mb-4">
|
||||||
Found {totalHits} result{totalHits !== 1 ? 's' : ''} for "{searchQuery}"
|
{t("books.resultCountFor", { count: String(totalHits), plural: totalHits !== 1 ? "s" : "", query: searchQuery })}
|
||||||
|
</p>
|
||||||
|
) : !searchQuery && (
|
||||||
|
<p className="text-sm text-muted-foreground mb-4">
|
||||||
|
{t("books.resultCount", { count: String(total), plural: total !== 1 ? "s" : "" })}
|
||||||
</p>
|
</p>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Séries matchantes */}
|
{/* Séries matchantes */}
|
||||||
{seriesHits.length > 0 && (
|
{seriesHits.length > 0 && (
|
||||||
<div className="mb-8">
|
<div className="mb-8">
|
||||||
<h2 className="text-lg font-semibold text-foreground mb-3">Series</h2>
|
<h2 className="text-lg font-semibold text-foreground mb-3">{t("books.seriesHeading")}</h2>
|
||||||
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-6 gap-4">
|
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-6 gap-4">
|
||||||
{seriesHits.map((s) => (
|
{seriesHits.map((s) => (
|
||||||
<Link
|
<Link
|
||||||
key={`${s.library_id}-${s.name}`}
|
key={`${s.library_id}-${s.name}`}
|
||||||
href={`/libraries/${s.library_id}/books?series=${encodeURIComponent(s.name)}`}
|
href={`/libraries/${s.library_id}/series/${encodeURIComponent(s.name)}`}
|
||||||
className="group"
|
className="group"
|
||||||
>
|
>
|
||||||
<div className="bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md transition-shadow duration-200">
|
<div className="bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md transition-shadow duration-200">
|
||||||
<div className="aspect-[2/3] relative bg-muted/50">
|
<div className="aspect-[2/3] relative bg-muted/50">
|
||||||
<Image
|
<Image
|
||||||
src={getBookCoverUrl(s.first_book_id)}
|
src={getBookCoverUrl(s.first_book_id)}
|
||||||
alt={`Cover of ${s.name}`}
|
alt={t("books.coverOf", { name: s.name })}
|
||||||
fill
|
fill
|
||||||
className="object-cover"
|
className="object-cover"
|
||||||
unoptimized
|
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
<div className="p-2">
|
<div className="p-2">
|
||||||
<h3 className="font-medium text-foreground truncate text-sm" title={s.name}>
|
<h3 className="font-medium text-foreground truncate text-sm" title={s.name}>
|
||||||
{s.name === "unclassified" ? "Unclassified" : s.name}
|
{s.name === "unclassified" ? t("books.unclassified") : s.name}
|
||||||
</h3>
|
</h3>
|
||||||
<p className="text-xs text-muted-foreground mt-0.5">
|
<p className="text-xs text-muted-foreground mt-0.5">
|
||||||
{s.book_count} book{s.book_count !== 1 ? 's' : ''}
|
{t("books.bookCount", { count: String(s.book_count), plural: s.book_count !== 1 ? "s" : "" })}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -181,7 +191,7 @@ export default async function BooksPage({
|
|||||||
{/* Grille de livres */}
|
{/* Grille de livres */}
|
||||||
{displayBooks.length > 0 ? (
|
{displayBooks.length > 0 ? (
|
||||||
<>
|
<>
|
||||||
{searchQuery && <h2 className="text-lg font-semibold text-foreground mb-3">Books</h2>}
|
{searchQuery && <h2 className="text-lg font-semibold text-foreground mb-3">{t("books.title")}</h2>}
|
||||||
<BooksGrid books={displayBooks} />
|
<BooksGrid books={displayBooks} />
|
||||||
|
|
||||||
{!searchQuery && (
|
{!searchQuery && (
|
||||||
@@ -194,7 +204,7 @@ export default async function BooksPage({
|
|||||||
)}
|
)}
|
||||||
</>
|
</>
|
||||||
) : (
|
) : (
|
||||||
<EmptyState message={searchQuery ? `No books found for "${searchQuery}"` : "No books available"} />
|
<EmptyState message={searchQuery ? t("books.noResults", { query: searchQuery }) : t("books.noBooks")} />
|
||||||
)}
|
)}
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
809
apps/backoffice/app/(app)/jobs/[id]/page.tsx
Normal file
809
apps/backoffice/app/(app)/jobs/[id]/page.tsx
Normal file
@@ -0,0 +1,809 @@
|
|||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
import { notFound } from "next/navigation";
|
||||||
|
import Link from "next/link";
|
||||||
|
import { apiFetch, getMetadataBatchReport, getMetadataBatchResults, getMetadataRefreshReport, MetadataBatchReportDto, MetadataBatchResultDto, MetadataRefreshReportDto } from "@/lib/api";
|
||||||
|
import {
|
||||||
|
Card, CardHeader, CardTitle, CardDescription, CardContent,
|
||||||
|
StatusBadge, JobTypeBadge, StatBox, ProgressBar
|
||||||
|
} from "@/app/components/ui";
|
||||||
|
import { JobDetailLive } from "@/app/components/JobDetailLive";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
|
interface JobDetailPageProps {
|
||||||
|
params: Promise<{ id: string }>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface JobDetails {
|
||||||
|
id: string;
|
||||||
|
library_id: string | null;
|
||||||
|
book_id: string | null;
|
||||||
|
type: string;
|
||||||
|
status: string;
|
||||||
|
created_at: string;
|
||||||
|
started_at: string | null;
|
||||||
|
finished_at: string | null;
|
||||||
|
phase2_started_at: string | null;
|
||||||
|
generating_thumbnails_started_at: string | null;
|
||||||
|
current_file: string | null;
|
||||||
|
progress_percent: number | null;
|
||||||
|
processed_files: number | null;
|
||||||
|
total_files: number | null;
|
||||||
|
stats_json: {
|
||||||
|
scanned_files: number;
|
||||||
|
indexed_files: number;
|
||||||
|
removed_files: number;
|
||||||
|
errors: number;
|
||||||
|
warnings: number;
|
||||||
|
} | null;
|
||||||
|
error_opt: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface JobError {
|
||||||
|
id: string;
|
||||||
|
file_path: string;
|
||||||
|
error_message: string;
|
||||||
|
created_at: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function getJobDetails(jobId: string): Promise<JobDetails | null> {
|
||||||
|
try {
|
||||||
|
return await apiFetch<JobDetails>(`/index/jobs/${jobId}`);
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getJobErrors(jobId: string): Promise<JobError[]> {
|
||||||
|
try {
|
||||||
|
return await apiFetch<JobError[]>(`/index/jobs/${jobId}/errors`);
|
||||||
|
} catch {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatDuration(start: string, end: string | null): string {
|
||||||
|
const startDate = new Date(start);
|
||||||
|
const endDate = end ? new Date(end) : new Date();
|
||||||
|
const diff = endDate.getTime() - startDate.getTime();
|
||||||
|
|
||||||
|
if (diff < 60000) return `${Math.floor(diff / 1000)}s`;
|
||||||
|
if (diff < 3600000) return `${Math.floor(diff / 60000)}m ${Math.floor((diff % 60000) / 1000)}s`;
|
||||||
|
return `${Math.floor(diff / 3600000)}h ${Math.floor((diff % 3600000) / 60000)}m`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatSpeed(count: number, durationMs: number): string {
|
||||||
|
if (durationMs === 0 || count === 0) return "-";
|
||||||
|
return `${(count / (durationMs / 1000)).toFixed(1)}/s`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||||
|
const { id } = await params;
|
||||||
|
const [job, errors] = await Promise.all([
|
||||||
|
getJobDetails(id),
|
||||||
|
getJobErrors(id),
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!job) {
|
||||||
|
notFound();
|
||||||
|
}
|
||||||
|
|
||||||
|
const { t, locale } = await getServerTranslations();
|
||||||
|
|
||||||
|
const JOB_TYPE_INFO: Record<string, { label: string; description: string; isThumbnailOnly: boolean }> = {
|
||||||
|
rebuild: {
|
||||||
|
label: t("jobType.rebuildLabel"),
|
||||||
|
description: t("jobType.rebuildDesc"),
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
},
|
||||||
|
full_rebuild: {
|
||||||
|
label: t("jobType.full_rebuildLabel"),
|
||||||
|
description: t("jobType.full_rebuildDesc"),
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
},
|
||||||
|
rescan: {
|
||||||
|
label: t("jobType.rescanLabel"),
|
||||||
|
description: t("jobType.rescanDesc"),
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
},
|
||||||
|
thumbnail_rebuild: {
|
||||||
|
label: t("jobType.thumbnail_rebuildLabel"),
|
||||||
|
description: t("jobType.thumbnail_rebuildDesc"),
|
||||||
|
isThumbnailOnly: true,
|
||||||
|
},
|
||||||
|
thumbnail_regenerate: {
|
||||||
|
label: t("jobType.thumbnail_regenerateLabel"),
|
||||||
|
description: t("jobType.thumbnail_regenerateDesc"),
|
||||||
|
isThumbnailOnly: true,
|
||||||
|
},
|
||||||
|
cbr_to_cbz: {
|
||||||
|
label: t("jobType.cbr_to_cbzLabel"),
|
||||||
|
description: t("jobType.cbr_to_cbzDesc"),
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
},
|
||||||
|
metadata_batch: {
|
||||||
|
label: t("jobType.metadata_batchLabel"),
|
||||||
|
description: t("jobType.metadata_batchDesc"),
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
},
|
||||||
|
metadata_refresh: {
|
||||||
|
label: t("jobType.metadata_refreshLabel"),
|
||||||
|
description: t("jobType.metadata_refreshDesc"),
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const isMetadataBatch = job.type === "metadata_batch";
|
||||||
|
const isMetadataRefresh = job.type === "metadata_refresh";
|
||||||
|
|
||||||
|
// Fetch batch report & results for metadata_batch jobs
|
||||||
|
let batchReport: MetadataBatchReportDto | null = null;
|
||||||
|
let batchResults: MetadataBatchResultDto[] = [];
|
||||||
|
if (isMetadataBatch) {
|
||||||
|
[batchReport, batchResults] = await Promise.all([
|
||||||
|
getMetadataBatchReport(id).catch(() => null),
|
||||||
|
getMetadataBatchResults(id).catch(() => []),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch refresh report for metadata_refresh jobs
|
||||||
|
let refreshReport: MetadataRefreshReportDto | null = null;
|
||||||
|
if (isMetadataRefresh) {
|
||||||
|
refreshReport = await getMetadataRefreshReport(id).catch(() => null);
|
||||||
|
}
|
||||||
|
|
||||||
|
const typeInfo = JOB_TYPE_INFO[job.type] ?? {
|
||||||
|
label: job.type,
|
||||||
|
description: null,
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
const durationMs = job.started_at
|
||||||
|
? new Date(job.finished_at || new Date()).getTime() - new Date(job.started_at).getTime()
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
const isCompleted = job.status === "success";
|
||||||
|
const isFailed = job.status === "failed";
|
||||||
|
const isCancelled = job.status === "cancelled";
|
||||||
|
const isTerminal = isCompleted || isFailed || isCancelled;
|
||||||
|
const isExtractingPages = job.status === "extracting_pages";
|
||||||
|
const isThumbnailPhase = job.status === "generating_thumbnails";
|
||||||
|
const isPhase2 = isExtractingPages || isThumbnailPhase;
|
||||||
|
const { isThumbnailOnly } = typeInfo;
|
||||||
|
|
||||||
|
// Which label to use for the progress card
|
||||||
|
const progressTitle = isMetadataBatch
|
||||||
|
? t("jobDetail.metadataSearch")
|
||||||
|
: isMetadataRefresh
|
||||||
|
? t("jobDetail.metadataRefresh")
|
||||||
|
: isThumbnailOnly
|
||||||
|
? t("jobType.thumbnail_rebuild")
|
||||||
|
: isExtractingPages
|
||||||
|
? t("jobDetail.phase2a")
|
||||||
|
: isThumbnailPhase
|
||||||
|
? t("jobDetail.phase2b")
|
||||||
|
: t("jobDetail.phase1");
|
||||||
|
|
||||||
|
const progressDescription = isMetadataBatch
|
||||||
|
? t("jobDetail.metadataSearchDesc")
|
||||||
|
: isMetadataRefresh
|
||||||
|
? t("jobDetail.metadataRefreshDesc")
|
||||||
|
: isThumbnailOnly
|
||||||
|
? undefined
|
||||||
|
: isExtractingPages
|
||||||
|
? t("jobDetail.phase2aDesc")
|
||||||
|
: isThumbnailPhase
|
||||||
|
? t("jobDetail.phase2bDesc")
|
||||||
|
: t("jobDetail.phase1Desc");
|
||||||
|
|
||||||
|
// Speed metric: thumbnail count for thumbnail jobs, scanned files for index jobs
|
||||||
|
const speedCount = isThumbnailOnly
|
||||||
|
? (job.processed_files ?? 0)
|
||||||
|
: (job.stats_json?.scanned_files ?? 0);
|
||||||
|
|
||||||
|
const showProgressCard =
|
||||||
|
(isCompleted || isFailed || job.status === "running" || isPhase2) &&
|
||||||
|
(job.total_files != null || !!job.current_file);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<JobDetailLive jobId={id} isTerminal={isTerminal} />
|
||||||
|
<div className="mb-6">
|
||||||
|
<Link
|
||||||
|
href="/jobs"
|
||||||
|
className="inline-flex items-center text-sm text-muted-foreground hover:text-primary transition-colors duration-200"
|
||||||
|
>
|
||||||
|
<svg className="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M15 19l-7-7 7-7" />
|
||||||
|
</svg>
|
||||||
|
{t("jobDetail.backToJobs")}
|
||||||
|
</Link>
|
||||||
|
<h1 className="text-3xl font-bold text-foreground mt-2">{t("jobDetail.title")}</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Summary banner — completed */}
|
||||||
|
{isCompleted && job.started_at && (
|
||||||
|
<div className="mb-6 p-4 rounded-xl bg-success/10 border border-success/30 flex items-start gap-3">
|
||||||
|
<svg className="w-5 h-5 text-success mt-0.5 shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z" />
|
||||||
|
</svg>
|
||||||
|
<div className="text-sm text-success">
|
||||||
|
<span className="font-semibold">{t("jobDetail.completedIn", { duration: formatDuration(job.started_at, job.finished_at) })}</span>
|
||||||
|
{isMetadataBatch && batchReport && (
|
||||||
|
<span className="ml-2 text-success/80">
|
||||||
|
— {batchReport.auto_matched} {t("jobDetail.autoMatched").toLowerCase()}, {batchReport.already_linked} {t("jobDetail.alreadyLinked").toLowerCase()}, {batchReport.no_results} {t("jobDetail.noResults").toLowerCase()}, {batchReport.errors} {t("jobDetail.errors").toLowerCase()}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{isMetadataRefresh && refreshReport && (
|
||||||
|
<span className="ml-2 text-success/80">
|
||||||
|
— {refreshReport.refreshed} {t("jobDetail.refreshed").toLowerCase()}, {refreshReport.unchanged} {t("jobDetail.unchanged").toLowerCase()}, {refreshReport.errors} {t("jobDetail.errors").toLowerCase()}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{!isMetadataBatch && !isMetadataRefresh && job.stats_json && (
|
||||||
|
<span className="ml-2 text-success/80">
|
||||||
|
— {job.stats_json.scanned_files} {t("jobDetail.scanned").toLowerCase()}, {job.stats_json.indexed_files} {t("jobDetail.indexed").toLowerCase()}
|
||||||
|
{job.stats_json.removed_files > 0 && `, ${job.stats_json.removed_files} ${t("jobDetail.removed").toLowerCase()}`}
|
||||||
|
{(job.stats_json.warnings ?? 0) > 0 && `, ${job.stats_json.warnings} ${t("jobDetail.warnings").toLowerCase()}`}
|
||||||
|
{job.stats_json.errors > 0 && `, ${job.stats_json.errors} ${t("jobDetail.errors").toLowerCase()}`}
|
||||||
|
{job.total_files != null && job.total_files > 0 && `, ${job.total_files} ${t("jobType.thumbnail_rebuild").toLowerCase()}`}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{!isMetadataBatch && !isMetadataRefresh && !job.stats_json && isThumbnailOnly && job.total_files != null && (
|
||||||
|
<span className="ml-2 text-success/80">
|
||||||
|
— {job.processed_files ?? job.total_files} {t("jobDetail.generated").toLowerCase()}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Summary banner — failed */}
|
||||||
|
{isFailed && (
|
||||||
|
<div className="mb-6 p-4 rounded-xl bg-destructive/10 border border-destructive/30 flex items-start gap-3">
|
||||||
|
<svg className="w-5 h-5 text-destructive mt-0.5 shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 8v4m0 4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" />
|
||||||
|
</svg>
|
||||||
|
<div className="text-sm text-destructive">
|
||||||
|
<span className="font-semibold">{t("jobDetail.jobFailed")}</span>
|
||||||
|
{job.started_at && (
|
||||||
|
<span className="ml-2 text-destructive/80">{t("jobDetail.failedAfter", { duration: formatDuration(job.started_at, job.finished_at) })}</span>
|
||||||
|
)}
|
||||||
|
{job.error_opt && (
|
||||||
|
<p className="mt-1 text-destructive/70 font-mono text-xs break-all">{job.error_opt}</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Summary banner — cancelled */}
|
||||||
|
{isCancelled && (
|
||||||
|
<div className="mb-6 p-4 rounded-xl bg-muted border border-border flex items-start gap-3">
|
||||||
|
<svg className="w-5 h-5 text-muted-foreground mt-0.5 shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M18.364 18.364A9 9 0 005.636 5.636m12.728 12.728A9 9 0 015.636 5.636m12.728 12.728L5.636 5.636" />
|
||||||
|
</svg>
|
||||||
|
<span className="text-sm text-muted-foreground">
|
||||||
|
<span className="font-semibold">{t("jobDetail.cancelled")}</span>
|
||||||
|
{job.started_at && (
|
||||||
|
<span className="ml-2">{t("jobDetail.failedAfter", { duration: formatDuration(job.started_at, job.finished_at) })}</span>
|
||||||
|
)}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
|
{/* Overview Card */}
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.overview")}</CardTitle>
|
||||||
|
{typeInfo.description && (
|
||||||
|
<CardDescription>{typeInfo.description}</CardDescription>
|
||||||
|
)}
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-3">
|
||||||
|
<div className="flex items-center justify-between py-2 border-b border-border/60">
|
||||||
|
<span className="text-sm text-muted-foreground">ID</span>
|
||||||
|
<code className="px-2 py-1 bg-muted rounded font-mono text-sm text-foreground">{job.id}</code>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center justify-between py-2 border-b border-border/60">
|
||||||
|
<span className="text-sm text-muted-foreground">{t("jobsList.type")}</span>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<JobTypeBadge type={job.type} />
|
||||||
|
<span className="text-sm text-muted-foreground">{typeInfo.label}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center justify-between py-2 border-b border-border/60">
|
||||||
|
<span className="text-sm text-muted-foreground">{t("jobsList.status")}</span>
|
||||||
|
<StatusBadge status={job.status} />
|
||||||
|
</div>
|
||||||
|
<div className={`flex items-center justify-between py-2 ${(job.book_id || job.started_at) ? "border-b border-border/60" : ""}`}>
|
||||||
|
<span className="text-sm text-muted-foreground">{t("jobDetail.library")}</span>
|
||||||
|
<span className="text-sm text-foreground">{job.library_id || t("jobDetail.allLibraries")}</span>
|
||||||
|
</div>
|
||||||
|
{job.book_id && (
|
||||||
|
<div className={`flex items-center justify-between py-2 ${job.started_at ? "border-b border-border/60" : ""}`}>
|
||||||
|
<span className="text-sm text-muted-foreground">{t("jobDetail.book")}</span>
|
||||||
|
<Link
|
||||||
|
href={`/books/${job.book_id}`}
|
||||||
|
className="text-sm text-primary hover:text-primary/80 font-mono hover:underline"
|
||||||
|
>
|
||||||
|
{job.book_id.slice(0, 8)}…
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{job.started_at && (
|
||||||
|
<div className="flex items-center justify-between py-2">
|
||||||
|
<span className="text-sm text-muted-foreground">{t("jobsList.duration")}</span>
|
||||||
|
<span className="text-sm font-semibold text-foreground">
|
||||||
|
{formatDuration(job.started_at, job.finished_at)}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Timeline Card */}
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.timeline")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="relative">
|
||||||
|
{/* Vertical line */}
|
||||||
|
<div className="absolute left-[7px] top-2 bottom-2 w-px bg-border" />
|
||||||
|
|
||||||
|
<div className="space-y-5">
|
||||||
|
{/* Created */}
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className="w-3.5 h-3.5 rounded-full mt-0.5 bg-muted border-2 border-border shrink-0 z-10" />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">{t("jobDetail.created")}</span>
|
||||||
|
<p className="text-xs text-muted-foreground">{new Date(job.created_at).toLocaleString(locale)}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Phase 1 start — for index jobs that have two phases */}
|
||||||
|
{job.started_at && job.phase2_started_at && (
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className="w-3.5 h-3.5 rounded-full mt-0.5 bg-primary shrink-0 z-10" />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">{t("jobDetail.phase1")}</span>
|
||||||
|
<p className="text-xs text-muted-foreground">{new Date(job.started_at).toLocaleString(locale)}</p>
|
||||||
|
<p className="text-xs text-primary/80 font-medium mt-0.5">
|
||||||
|
{t("jobDetail.duration", { duration: formatDuration(job.started_at, job.phase2_started_at) })}
|
||||||
|
{job.stats_json && (
|
||||||
|
<span className="text-muted-foreground font-normal ml-1">
|
||||||
|
· {job.stats_json.scanned_files} {t("jobDetail.scanned").toLowerCase()}, {job.stats_json.indexed_files} {t("jobDetail.indexed").toLowerCase()}
|
||||||
|
{job.stats_json.removed_files > 0 && `, ${job.stats_json.removed_files} ${t("jobDetail.removed").toLowerCase()}`}
|
||||||
|
{(job.stats_json.warnings ?? 0) > 0 && `, ${job.stats_json.warnings} ${t("jobDetail.warnings").toLowerCase()}`}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Phase 2a — Extracting pages (index jobs with phase2) */}
|
||||||
|
{job.phase2_started_at && !isThumbnailOnly && (
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className={`w-3.5 h-3.5 rounded-full mt-0.5 shrink-0 z-10 ${
|
||||||
|
job.generating_thumbnails_started_at || job.finished_at ? "bg-primary" : "bg-primary animate-pulse"
|
||||||
|
}`} />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">{t("jobDetail.phase2a")}</span>
|
||||||
|
<p className="text-xs text-muted-foreground">{new Date(job.phase2_started_at).toLocaleString(locale)}</p>
|
||||||
|
<p className="text-xs text-primary/80 font-medium mt-0.5">
|
||||||
|
{t("jobDetail.duration", { duration: formatDuration(job.phase2_started_at, job.generating_thumbnails_started_at ?? job.finished_at ?? null) })}
|
||||||
|
{!job.generating_thumbnails_started_at && !job.finished_at && isExtractingPages && (
|
||||||
|
<span className="text-muted-foreground font-normal ml-1">· {t("jobDetail.inProgress")}</span>
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Phase 2b — Generating thumbnails */}
|
||||||
|
{(job.generating_thumbnails_started_at || (job.phase2_started_at && isThumbnailOnly)) && (
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className={`w-3.5 h-3.5 rounded-full mt-0.5 shrink-0 z-10 ${
|
||||||
|
job.finished_at ? "bg-primary" : "bg-primary animate-pulse"
|
||||||
|
}`} />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">
|
||||||
|
{isThumbnailOnly ? t("jobType.thumbnail_rebuild") : t("jobDetail.phase2b")}
|
||||||
|
</span>
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{(job.generating_thumbnails_started_at ? new Date(job.generating_thumbnails_started_at) : job.phase2_started_at ? new Date(job.phase2_started_at) : null)?.toLocaleString(locale)}
|
||||||
|
</p>
|
||||||
|
{(job.generating_thumbnails_started_at || job.finished_at) && (
|
||||||
|
<p className="text-xs text-primary/80 font-medium mt-0.5">
|
||||||
|
{t("jobDetail.duration", { duration: formatDuration(
|
||||||
|
job.generating_thumbnails_started_at ?? job.phase2_started_at!,
|
||||||
|
job.finished_at ?? null
|
||||||
|
) })}
|
||||||
|
{job.total_files != null && job.total_files > 0 && (
|
||||||
|
<span className="text-muted-foreground font-normal ml-1">
|
||||||
|
· {job.processed_files ?? job.total_files} {t("jobType.thumbnail_rebuild").toLowerCase()}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
{!job.finished_at && isThumbnailPhase && (
|
||||||
|
<span className="text-xs text-muted-foreground">{t("jobDetail.inProgress")}</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Started — for jobs without phase2 (cbr_to_cbz, or no phase yet) */}
|
||||||
|
{job.started_at && !job.phase2_started_at && (
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className={`w-3.5 h-3.5 rounded-full mt-0.5 shrink-0 z-10 ${
|
||||||
|
job.finished_at ? "bg-primary" : "bg-primary animate-pulse"
|
||||||
|
}`} />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">{t("jobDetail.started")}</span>
|
||||||
|
<p className="text-xs text-muted-foreground">{new Date(job.started_at).toLocaleString(locale)}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Pending — not started yet */}
|
||||||
|
{!job.started_at && (
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className="w-3.5 h-3.5 rounded-full mt-0.5 bg-warning shrink-0 z-10" />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">{t("jobDetail.pendingStart")}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Finished */}
|
||||||
|
{job.finished_at && (
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className={`w-3.5 h-3.5 rounded-full mt-0.5 shrink-0 z-10 ${
|
||||||
|
isCompleted ? "bg-success" : isFailed ? "bg-destructive" : "bg-muted"
|
||||||
|
}`} />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">
|
||||||
|
{isCompleted ? t("jobDetail.finished") : isFailed ? t("jobDetail.failed") : t("jobDetail.cancelled")}
|
||||||
|
</span>
|
||||||
|
<p className="text-xs text-muted-foreground">{new Date(job.finished_at).toLocaleString(locale)}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Progress Card */}
|
||||||
|
{showProgressCard && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{progressTitle}</CardTitle>
|
||||||
|
{progressDescription && <CardDescription>{progressDescription}</CardDescription>}
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
{job.total_files != null && job.total_files > 0 && (
|
||||||
|
<>
|
||||||
|
<ProgressBar value={job.progress_percent || 0} showLabel size="lg" className="mb-4" />
|
||||||
|
<div className="grid grid-cols-3 gap-4">
|
||||||
|
<StatBox
|
||||||
|
value={job.processed_files ?? 0}
|
||||||
|
label={isThumbnailOnly || isPhase2 ? t("jobDetail.generated") : t("jobDetail.processed")}
|
||||||
|
variant="primary"
|
||||||
|
/>
|
||||||
|
<StatBox value={job.total_files} label={t("jobDetail.total")} />
|
||||||
|
<StatBox
|
||||||
|
value={Math.max(0, job.total_files - (job.processed_files ?? 0))}
|
||||||
|
label={t("jobDetail.remaining")}
|
||||||
|
variant={isCompleted ? "default" : "warning"}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
{job.current_file && (
|
||||||
|
<div className="mt-4 p-3 bg-muted/50 rounded-lg">
|
||||||
|
<span className="text-xs text-muted-foreground uppercase tracking-wide">{t("jobDetail.currentFile")}</span>
|
||||||
|
<code className="block mt-1 text-xs font-mono text-foreground break-all">{job.current_file}</code>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Index Statistics — index jobs only */}
|
||||||
|
{job.stats_json && !isThumbnailOnly && !isMetadataBatch && !isMetadataRefresh && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.indexStats")}</CardTitle>
|
||||||
|
{job.started_at && (
|
||||||
|
<CardDescription>
|
||||||
|
{formatDuration(job.started_at, job.finished_at)}
|
||||||
|
{speedCount > 0 && ` · ${formatSpeed(speedCount, durationMs)} scan rate`}
|
||||||
|
</CardDescription>
|
||||||
|
)}
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-5 gap-4">
|
||||||
|
<StatBox value={job.stats_json.scanned_files} label={t("jobDetail.scanned")} variant="success" />
|
||||||
|
<StatBox value={job.stats_json.indexed_files} label={t("jobDetail.indexed")} variant="primary" />
|
||||||
|
<StatBox value={job.stats_json.removed_files} label={t("jobDetail.removed")} variant="warning" />
|
||||||
|
<StatBox value={job.stats_json.warnings ?? 0} label={t("jobDetail.warnings")} variant={(job.stats_json.warnings ?? 0) > 0 ? "warning" : "default"} />
|
||||||
|
<StatBox value={job.stats_json.errors} label={t("jobDetail.errors")} variant={job.stats_json.errors > 0 ? "error" : "default"} />
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Thumbnail statistics — thumbnail-only jobs, completed */}
|
||||||
|
{isThumbnailOnly && isCompleted && job.total_files != null && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.thumbnailStats")}</CardTitle>
|
||||||
|
{job.started_at && (
|
||||||
|
<CardDescription>
|
||||||
|
{formatDuration(job.started_at, job.finished_at)}
|
||||||
|
{speedCount > 0 && ` · ${formatSpeed(speedCount, durationMs)} thumbnails/s`}
|
||||||
|
</CardDescription>
|
||||||
|
)}
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="grid grid-cols-2 gap-4">
|
||||||
|
<StatBox value={job.processed_files ?? job.total_files} label={t("jobDetail.generated")} variant="success" />
|
||||||
|
<StatBox value={job.total_files} label={t("jobDetail.total")} />
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Metadata batch report */}
|
||||||
|
{isMetadataBatch && batchReport && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.batchReport")}</CardTitle>
|
||||||
|
<CardDescription>{t("jobDetail.seriesAnalyzed", { count: String(batchReport.total_series) })}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-3 gap-4">
|
||||||
|
<StatBox value={batchReport.auto_matched} label={t("jobDetail.autoMatched")} variant="success" />
|
||||||
|
<StatBox value={batchReport.already_linked} label={t("jobDetail.alreadyLinked")} variant="primary" />
|
||||||
|
<StatBox value={batchReport.no_results} label={t("jobDetail.noResults")} />
|
||||||
|
<StatBox value={batchReport.too_many_results} label={t("jobDetail.tooManyResults")} variant="warning" />
|
||||||
|
<StatBox value={batchReport.low_confidence} label={t("jobDetail.lowConfidence")} variant="warning" />
|
||||||
|
<StatBox value={batchReport.errors} label={t("jobDetail.errors")} variant={batchReport.errors > 0 ? "error" : "default"} />
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Metadata refresh report */}
|
||||||
|
{isMetadataRefresh && refreshReport && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.refreshReport")}</CardTitle>
|
||||||
|
<CardDescription>{t("jobDetail.refreshReportDesc", { count: String(refreshReport.total_links) })}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-4 gap-4">
|
||||||
|
<StatBox
|
||||||
|
value={refreshReport.refreshed}
|
||||||
|
label={t("jobDetail.refreshed")}
|
||||||
|
variant="success"
|
||||||
|
icon={
|
||||||
|
<svg className="w-6 h-6 text-success" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||||
|
</svg>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<StatBox value={refreshReport.unchanged} label={t("jobDetail.unchanged")} />
|
||||||
|
<StatBox value={refreshReport.errors} label={t("jobDetail.errors")} variant={refreshReport.errors > 0 ? "error" : "default"} />
|
||||||
|
<StatBox value={refreshReport.total_links} label={t("jobDetail.total")} />
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Metadata refresh changes detail */}
|
||||||
|
{isMetadataRefresh && refreshReport && refreshReport.changes.length > 0 && (
|
||||||
|
<Card className="lg:col-span-2">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.refreshChanges")}</CardTitle>
|
||||||
|
<CardDescription>{t("jobDetail.refreshChangesDesc", { count: String(refreshReport.changes.length) })}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-3 max-h-[600px] overflow-y-auto">
|
||||||
|
{refreshReport.changes.map((r, idx) => (
|
||||||
|
<div
|
||||||
|
key={idx}
|
||||||
|
className={`p-3 rounded-lg border ${
|
||||||
|
r.status === "updated" ? "bg-success/10 border-success/20" :
|
||||||
|
r.status === "error" ? "bg-destructive/10 border-destructive/20" :
|
||||||
|
"bg-muted/50 border-border/60"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between gap-2">
|
||||||
|
{job.library_id ? (
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${job.library_id}/series/${encodeURIComponent(r.series_name)}`}
|
||||||
|
className="font-medium text-sm text-primary hover:underline truncate"
|
||||||
|
>
|
||||||
|
{r.series_name}
|
||||||
|
</Link>
|
||||||
|
) : (
|
||||||
|
<span className="font-medium text-sm text-foreground truncate">{r.series_name}</span>
|
||||||
|
)}
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<span className="text-[10px] text-muted-foreground">{r.provider}</span>
|
||||||
|
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap ${
|
||||||
|
r.status === "updated" ? "bg-success/20 text-success" :
|
||||||
|
r.status === "error" ? "bg-destructive/20 text-destructive" :
|
||||||
|
"bg-muted text-muted-foreground"
|
||||||
|
}`}>
|
||||||
|
{r.status === "updated" ? t("jobDetail.refreshed") :
|
||||||
|
r.status === "error" ? t("common.error") :
|
||||||
|
t("jobDetail.unchanged")}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{r.error && (
|
||||||
|
<p className="text-xs text-destructive/80 mt-1">{r.error}</p>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Series field changes */}
|
||||||
|
{r.series_changes.length > 0 && (
|
||||||
|
<div className="mt-2">
|
||||||
|
<span className="text-[10px] uppercase tracking-wide text-muted-foreground font-semibold">{t("metadata.seriesLabel")}</span>
|
||||||
|
<div className="mt-1 space-y-1">
|
||||||
|
{r.series_changes.map((c, ci) => (
|
||||||
|
<div key={ci} className="flex items-start gap-2 text-xs">
|
||||||
|
<span className="font-medium text-foreground shrink-0 w-24">{t(`field.${c.field}` as never) || c.field}</span>
|
||||||
|
<span className="text-muted-foreground line-through truncate max-w-[200px]" title={String(c.old ?? "—")}>
|
||||||
|
{c.old != null ? (Array.isArray(c.old) ? (c.old as string[]).join(", ") : String(c.old)) : "—"}
|
||||||
|
</span>
|
||||||
|
<span className="text-success shrink-0">→</span>
|
||||||
|
<span className="text-success truncate max-w-[200px]" title={String(c.new ?? "—")}>
|
||||||
|
{c.new != null ? (Array.isArray(c.new) ? (c.new as string[]).join(", ") : String(c.new)) : "—"}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Book field changes */}
|
||||||
|
{r.book_changes.length > 0 && (
|
||||||
|
<div className="mt-2">
|
||||||
|
<span className="text-[10px] uppercase tracking-wide text-muted-foreground font-semibold">
|
||||||
|
{t("metadata.booksLabel")} ({r.book_changes.length})
|
||||||
|
</span>
|
||||||
|
<div className="mt-1 space-y-2">
|
||||||
|
{r.book_changes.map((b, bi) => (
|
||||||
|
<div key={bi} className="pl-2 border-l-2 border-border/60">
|
||||||
|
<Link
|
||||||
|
href={`/books/${b.book_id}`}
|
||||||
|
className="text-xs text-primary hover:underline font-medium"
|
||||||
|
>
|
||||||
|
{b.volume != null && <span className="text-muted-foreground mr-1">T.{b.volume}</span>}
|
||||||
|
{b.title}
|
||||||
|
</Link>
|
||||||
|
<div className="mt-0.5 space-y-0.5">
|
||||||
|
{b.changes.map((c, ci) => (
|
||||||
|
<div key={ci} className="flex items-start gap-2 text-xs">
|
||||||
|
<span className="font-medium text-foreground shrink-0 w-24">{t(`field.${c.field}` as never) || c.field}</span>
|
||||||
|
<span className="text-muted-foreground line-through truncate max-w-[150px]" title={String(c.old ?? "—")}>
|
||||||
|
{c.old != null ? (Array.isArray(c.old) ? (c.old as string[]).join(", ") : String(c.old).substring(0, 60)) : "—"}
|
||||||
|
</span>
|
||||||
|
<span className="text-success shrink-0">→</span>
|
||||||
|
<span className="text-success truncate max-w-[150px]" title={String(c.new ?? "—")}>
|
||||||
|
{c.new != null ? (Array.isArray(c.new) ? (c.new as string[]).join(", ") : String(c.new).substring(0, 60)) : "—"}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Metadata batch results */}
|
||||||
|
{isMetadataBatch && batchResults.length > 0 && (
|
||||||
|
<Card className="lg:col-span-2">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.resultsBySeries")}</CardTitle>
|
||||||
|
<CardDescription>{t("jobDetail.seriesProcessed", { count: String(batchResults.length) })}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-2 max-h-[600px] overflow-y-auto">
|
||||||
|
{batchResults.map((r) => (
|
||||||
|
<div
|
||||||
|
key={r.id}
|
||||||
|
className={`p-3 rounded-lg border ${
|
||||||
|
r.status === "auto_matched" ? "bg-success/10 border-success/20" :
|
||||||
|
r.status === "already_linked" ? "bg-primary/10 border-primary/20" :
|
||||||
|
r.status === "error" ? "bg-destructive/10 border-destructive/20" :
|
||||||
|
"bg-muted/50 border-border/60"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between gap-2">
|
||||||
|
{job.library_id ? (
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${job.library_id}/series/${encodeURIComponent(r.series_name)}`}
|
||||||
|
className="font-medium text-sm text-primary hover:underline truncate"
|
||||||
|
>
|
||||||
|
{r.series_name}
|
||||||
|
</Link>
|
||||||
|
) : (
|
||||||
|
<span className="font-medium text-sm text-foreground truncate">{r.series_name}</span>
|
||||||
|
)}
|
||||||
|
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap ${
|
||||||
|
r.status === "auto_matched" ? "bg-success/20 text-success" :
|
||||||
|
r.status === "already_linked" ? "bg-primary/20 text-primary" :
|
||||||
|
r.status === "no_results" ? "bg-muted text-muted-foreground" :
|
||||||
|
r.status === "too_many_results" ? "bg-amber-500/15 text-amber-600" :
|
||||||
|
r.status === "low_confidence" ? "bg-amber-500/15 text-amber-600" :
|
||||||
|
r.status === "error" ? "bg-destructive/20 text-destructive" :
|
||||||
|
"bg-muted text-muted-foreground"
|
||||||
|
}`}>
|
||||||
|
{r.status === "auto_matched" ? t("jobDetail.autoMatched") :
|
||||||
|
r.status === "already_linked" ? t("jobDetail.alreadyLinked") :
|
||||||
|
r.status === "no_results" ? t("jobDetail.noResults") :
|
||||||
|
r.status === "too_many_results" ? t("jobDetail.tooManyResults") :
|
||||||
|
r.status === "low_confidence" ? t("jobDetail.lowConfidence") :
|
||||||
|
r.status === "error" ? t("common.error") :
|
||||||
|
r.status}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-3 mt-1 text-xs text-muted-foreground">
|
||||||
|
{r.provider_used && (
|
||||||
|
<span>{r.provider_used}{r.fallback_used ? ` ${t("metadata.fallbackUsed")}` : ""}</span>
|
||||||
|
)}
|
||||||
|
{r.candidates_count > 0 && (
|
||||||
|
<span>{r.candidates_count} {t("jobDetail.candidates", { plural: r.candidates_count > 1 ? "s" : "" })}</span>
|
||||||
|
)}
|
||||||
|
{r.best_confidence != null && (
|
||||||
|
<span>{Math.round(r.best_confidence * 100)}% {t("jobDetail.confidence")}</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
{r.best_candidate_json && (
|
||||||
|
<p className="text-xs text-muted-foreground mt-1">
|
||||||
|
{t("jobDetail.match", { title: (r.best_candidate_json as { title?: string }).title || r.best_candidate_json.toString() })}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
{r.error_message && (
|
||||||
|
<p className="text-xs text-destructive/80 mt-1">{r.error_message}</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* File errors */}
|
||||||
|
{errors.length > 0 && (
|
||||||
|
<Card className="lg:col-span-2">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.fileErrors", { count: String(errors.length) })}</CardTitle>
|
||||||
|
<CardDescription>{t("jobDetail.fileErrorsDesc")}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-2 max-h-80 overflow-y-auto">
|
||||||
|
{errors.map((error) => (
|
||||||
|
<div key={error.id} className="p-3 bg-destructive/10 rounded-lg border border-destructive/20">
|
||||||
|
<code className="block text-sm font-mono text-destructive mb-1">{error.file_path}</code>
|
||||||
|
<p className="text-sm text-destructive/80">{error.error_message}</p>
|
||||||
|
<span className="text-xs text-muted-foreground">{new Date(error.created_at).toLocaleString(locale)}</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
269
apps/backoffice/app/(app)/jobs/page.tsx
Normal file
269
apps/backoffice/app/(app)/jobs/page.tsx
Normal file
@@ -0,0 +1,269 @@
|
|||||||
|
import { revalidatePath } from "next/cache";
|
||||||
|
import { redirect } from "next/navigation";
|
||||||
|
import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, startMetadataRefresh, IndexJobDto, LibraryDto } from "@/lib/api";
|
||||||
|
import { JobsList } from "@/app/components/JobsList";
|
||||||
|
import { Card, CardHeader, CardTitle, CardDescription, CardContent, FormField, FormSelect } from "@/app/components/ui";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function JobsPage({ searchParams }: { searchParams: Promise<{ highlight?: string }> }) {
|
||||||
|
const { highlight } = await searchParams;
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const [jobs, libraries] = await Promise.all([
|
||||||
|
listJobs().catch(() => [] as IndexJobDto[]),
|
||||||
|
fetchLibraries().catch(() => [] as LibraryDto[])
|
||||||
|
]);
|
||||||
|
|
||||||
|
const libraryMap = new Map(libraries.map(l => [l.id, l.name]));
|
||||||
|
|
||||||
|
async function triggerRebuild(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
const result = await rebuildIndex(libraryId || undefined);
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function triggerFullRebuild(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
const result = await rebuildIndex(libraryId || undefined, true);
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function triggerRescan(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
const result = await rebuildIndex(libraryId || undefined, false, true);
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function triggerThumbnailsRebuild(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
const result = await rebuildThumbnails(libraryId || undefined);
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function triggerThumbnailsRegenerate(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
const result = await regenerateThumbnails(libraryId || undefined);
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function triggerMetadataBatch(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
if (libraryId) {
|
||||||
|
let result;
|
||||||
|
try {
|
||||||
|
result = await startMetadataBatch(libraryId);
|
||||||
|
} catch {
|
||||||
|
// Library may have metadata disabled — ignore silently
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
} else {
|
||||||
|
// All libraries — skip those with metadata disabled
|
||||||
|
const allLibraries = await fetchLibraries().catch(() => [] as LibraryDto[]);
|
||||||
|
let lastId: string | undefined;
|
||||||
|
for (const lib of allLibraries) {
|
||||||
|
if (lib.metadata_provider === "none") continue;
|
||||||
|
try {
|
||||||
|
const result = await startMetadataBatch(lib.id);
|
||||||
|
if (result.status !== "already_running") lastId = result.id;
|
||||||
|
} catch {
|
||||||
|
// Library may have metadata disabled or other issue — skip
|
||||||
|
}
|
||||||
|
}
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(lastId ? `/jobs?highlight=${lastId}` : "/jobs");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function triggerMetadataRefresh(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
if (libraryId) {
|
||||||
|
let result;
|
||||||
|
try {
|
||||||
|
result = await startMetadataRefresh(libraryId);
|
||||||
|
} catch {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
} else {
|
||||||
|
// All libraries — skip those with metadata disabled
|
||||||
|
const allLibraries = await fetchLibraries().catch(() => [] as LibraryDto[]);
|
||||||
|
let lastId: string | undefined;
|
||||||
|
for (const lib of allLibraries) {
|
||||||
|
if (lib.metadata_provider === "none") continue;
|
||||||
|
try {
|
||||||
|
const result = await startMetadataRefresh(lib.id);
|
||||||
|
if (result.status !== "already_running") lastId = result.id;
|
||||||
|
} catch {
|
||||||
|
// Library may have metadata disabled or no approved links — skip
|
||||||
|
}
|
||||||
|
}
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(lastId ? `/jobs?highlight=${lastId}` : "/jobs");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="mb-6">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||||
|
<svg className="w-8 h-8 text-warning" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M13 10V3L4 14h7v7l9-11h-7z" />
|
||||||
|
</svg>
|
||||||
|
{t("jobs.title")}
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobs.startJob")}</CardTitle>
|
||||||
|
<CardDescription>{t("jobs.startJobDescription")}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<form>
|
||||||
|
<div className="mb-6">
|
||||||
|
<FormField className="max-w-xs">
|
||||||
|
<FormSelect name="library_id" defaultValue="">
|
||||||
|
<option value="">{t("jobs.allLibraries")}</option>
|
||||||
|
{libraries.map((lib) => (
|
||||||
|
<option key={lib.id} value={lib.id}>{lib.name}</option>
|
||||||
|
))}
|
||||||
|
</FormSelect>
|
||||||
|
</FormField>
|
||||||
|
</div>
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
||||||
|
|
||||||
|
{/* Indexation group */}
|
||||||
|
<div className="space-y-3">
|
||||||
|
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
|
||||||
|
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" />
|
||||||
|
</svg>
|
||||||
|
{t("jobs.groupIndexation")}
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2">
|
||||||
|
<button type="submit" formAction={triggerRebuild}
|
||||||
|
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-foreground">{t("jobs.rebuild")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.rebuildShort")}</p>
|
||||||
|
</button>
|
||||||
|
<button type="submit" formAction={triggerRescan}
|
||||||
|
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-foreground">{t("jobs.rescan")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.rescanShort")}</p>
|
||||||
|
</button>
|
||||||
|
<button type="submit" formAction={triggerFullRebuild}
|
||||||
|
className="w-full text-left rounded-lg border border-destructive/30 bg-destructive/5 p-3 hover:bg-destructive/10 transition-colors group cursor-pointer">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-destructive shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-destructive">{t("jobs.fullRebuild")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.fullRebuildShort")}</p>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Thumbnails group */}
|
||||||
|
<div className="space-y-3">
|
||||||
|
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
|
||||||
|
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" />
|
||||||
|
</svg>
|
||||||
|
{t("jobs.groupThumbnails")}
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2">
|
||||||
|
<button type="submit" formAction={triggerThumbnailsRebuild}
|
||||||
|
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6v6m0 0v6m0-6h6m-6 0H6" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-foreground">{t("jobs.generateThumbnails")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.generateThumbnailsShort")}</p>
|
||||||
|
</button>
|
||||||
|
<button type="submit" formAction={triggerThumbnailsRegenerate}
|
||||||
|
className="w-full text-left rounded-lg border border-warning/30 bg-warning/5 p-3 hover:bg-warning/10 transition-colors group cursor-pointer">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-warning shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-warning">{t("jobs.regenerateThumbnails")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.regenerateThumbnailsShort")}</p>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Metadata group */}
|
||||||
|
<div className="space-y-3">
|
||||||
|
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
|
||||||
|
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M7 7h.01M7 3h5c.512 0 1.024.195 1.414.586l7 7a2 2 0 010 2.828l-7 7a2 2 0 01-2.828 0l-7-7A1.994 1.994 0 013 12V7a4 4 0 014-4z" />
|
||||||
|
</svg>
|
||||||
|
{t("jobs.groupMetadata")}
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2">
|
||||||
|
<button type="submit" formAction={triggerMetadataBatch}
|
||||||
|
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer disabled:opacity-50 disabled:cursor-not-allowed disabled:hover:bg-background">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-foreground">{t("jobs.batchMetadata")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.batchMetadataShort")}</p>
|
||||||
|
</button>
|
||||||
|
<button type="submit" formAction={triggerMetadataRefresh}
|
||||||
|
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer disabled:opacity-50 disabled:cursor-not-allowed disabled:hover:bg-background">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-foreground">{t("jobs.refreshMetadata")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.refreshMetadataShort")}</p>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<JobsList
|
||||||
|
initialJobs={jobs}
|
||||||
|
libraries={libraryMap}
|
||||||
|
highlightJobId={highlight}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
127
apps/backoffice/app/(app)/layout.tsx
Normal file
127
apps/backoffice/app/(app)/layout.tsx
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
import type { ReactNode } from "react";
|
||||||
|
import { cookies } from "next/headers";
|
||||||
|
import { revalidatePath } from "next/cache";
|
||||||
|
import { ThemeToggle } from "@/app/theme-toggle";
|
||||||
|
import { JobsIndicator } from "@/app/components/JobsIndicator";
|
||||||
|
import { NavIcon, Icon } from "@/app/components/ui";
|
||||||
|
import { LogoutButton } from "@/app/components/LogoutButton";
|
||||||
|
import { MobileNav } from "@/app/components/MobileNav";
|
||||||
|
import { UserSwitcher } from "@/app/components/UserSwitcher";
|
||||||
|
import { fetchUsers } from "@/lib/api";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import type { TranslationKey } from "@/lib/i18n/fr";
|
||||||
|
|
||||||
|
type NavItem = {
|
||||||
|
href: "/" | "/books" | "/series" | "/authors" | "/libraries" | "/jobs" | "/tokens" | "/settings";
|
||||||
|
labelKey: TranslationKey;
|
||||||
|
icon: "dashboard" | "books" | "series" | "authors" | "libraries" | "jobs" | "tokens" | "settings";
|
||||||
|
};
|
||||||
|
|
||||||
|
const navItems: NavItem[] = [
|
||||||
|
{ href: "/", labelKey: "nav.dashboard", icon: "dashboard" },
|
||||||
|
{ href: "/books", labelKey: "nav.books", icon: "books" },
|
||||||
|
{ href: "/series", labelKey: "nav.series", icon: "series" },
|
||||||
|
{ href: "/authors", labelKey: "nav.authors", icon: "authors" },
|
||||||
|
{ href: "/libraries", labelKey: "nav.libraries", icon: "libraries" },
|
||||||
|
{ href: "/jobs", labelKey: "nav.jobs", icon: "jobs" },
|
||||||
|
{ href: "/tokens", labelKey: "nav.tokens", icon: "tokens" },
|
||||||
|
];
|
||||||
|
|
||||||
|
export default async function AppLayout({ children }: { children: ReactNode }) {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const cookieStore = await cookies();
|
||||||
|
const activeUserId = cookieStore.get("as_user_id")?.value || null;
|
||||||
|
const users = await fetchUsers().catch(() => []);
|
||||||
|
|
||||||
|
async function setActiveUserAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const userId = formData.get("user_id") as string;
|
||||||
|
const store = await cookies();
|
||||||
|
if (userId) {
|
||||||
|
store.set("as_user_id", userId, { path: "/", httpOnly: false, sameSite: "lax" });
|
||||||
|
} else {
|
||||||
|
store.delete("as_user_id");
|
||||||
|
}
|
||||||
|
revalidatePath("/", "layout");
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<header className="sticky top-0 z-50 w-full border-b border-border/40 bg-background/70 backdrop-blur-xl backdrop-saturate-150 supports-[backdrop-filter]:bg-background/60">
|
||||||
|
<nav className="container mx-auto flex h-16 items-center justify-between px-4">
|
||||||
|
<Link
|
||||||
|
href="/"
|
||||||
|
className="flex items-center gap-3 hover:opacity-80 transition-opacity duration-200"
|
||||||
|
>
|
||||||
|
<Image src="/logo.png" alt="StripStream" width={36} height={36} className="rounded-lg" />
|
||||||
|
<div className="flex items-baseline gap-2">
|
||||||
|
<span className="text-xl font-bold tracking-tight text-foreground">StripStream</span>
|
||||||
|
<span className="text-sm text-muted-foreground font-medium hidden xl:inline">
|
||||||
|
{t("common.backoffice")}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<div className="hidden md:flex items-center gap-1">
|
||||||
|
{navItems.map((item) => (
|
||||||
|
<NavLink key={item.href} href={item.href} title={t(item.labelKey)}>
|
||||||
|
<NavIcon name={item.icon} />
|
||||||
|
<span className="ml-2 hidden xl:inline">{t(item.labelKey)}</span>
|
||||||
|
</NavLink>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<UserSwitcher
|
||||||
|
users={users}
|
||||||
|
activeUserId={activeUserId}
|
||||||
|
setActiveUserAction={setActiveUserAction}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-1 pl-4 ml-2 border-l border-border/60">
|
||||||
|
<JobsIndicator />
|
||||||
|
<Link
|
||||||
|
href="/settings"
|
||||||
|
className="hidden xl:flex p-2 rounded-lg text-muted-foreground hover:text-foreground hover:bg-accent transition-colors"
|
||||||
|
title={t("nav.settings")}
|
||||||
|
>
|
||||||
|
<Icon name="settings" size="md" />
|
||||||
|
</Link>
|
||||||
|
<ThemeToggle />
|
||||||
|
<LogoutButton />
|
||||||
|
<MobileNav navItems={navItems.map(item => ({ ...item, label: t(item.labelKey) }))} />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<main className="container mx-auto px-4 sm:px-6 lg:px-8 py-8 pb-16">
|
||||||
|
{children}
|
||||||
|
</main>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function NavLink({ href, title, children }: { href: NavItem["href"]; title?: string; children: React.ReactNode }) {
|
||||||
|
return (
|
||||||
|
<Link
|
||||||
|
href={href}
|
||||||
|
title={title}
|
||||||
|
className="
|
||||||
|
flex items-center
|
||||||
|
px-2 lg:px-3 py-2
|
||||||
|
rounded-lg
|
||||||
|
text-sm font-medium
|
||||||
|
text-muted-foreground
|
||||||
|
hover:text-foreground
|
||||||
|
hover:bg-accent
|
||||||
|
transition-colors duration-200
|
||||||
|
active:scale-[0.98]
|
||||||
|
"
|
||||||
|
>
|
||||||
|
{children}
|
||||||
|
</Link>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
import { fetchLibraries, fetchBooks, getBookCoverUrl, LibraryDto, BookDto } from "../../../../lib/api";
|
import { fetchLibraries, fetchBooks, getBookCoverUrl, LibraryDto, BookDto } from "@/lib/api";
|
||||||
import { BooksGrid, EmptyState } from "../../../components/BookCard";
|
import { BooksGrid, EmptyState } from "@/app/components/BookCard";
|
||||||
import { LibrarySubPageHeader } from "../../../components/LibrarySubPageHeader";
|
import { LibrarySubPageHeader } from "@/app/components/LibrarySubPageHeader";
|
||||||
import { OffsetPagination } from "../../../components/ui";
|
import { OffsetPagination } from "@/app/components/ui";
|
||||||
import { notFound } from "next/navigation";
|
import { notFound } from "next/navigation";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
export const dynamic = "force-dynamic";
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
@@ -14,6 +15,7 @@ export default async function LibraryBooksPage({
|
|||||||
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
}) {
|
}) {
|
||||||
const { id } = await params;
|
const { id } = await params;
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
const searchParamsAwaited = await searchParams;
|
const searchParamsAwaited = await searchParams;
|
||||||
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
const series = typeof searchParamsAwaited.series === "string" ? searchParamsAwaited.series : undefined;
|
const series = typeof searchParamsAwaited.series === "string" ? searchParamsAwaited.series : undefined;
|
||||||
@@ -38,14 +40,14 @@ export default async function LibraryBooksPage({
|
|||||||
coverUrl: getBookCoverUrl(book.id)
|
coverUrl: getBookCoverUrl(book.id)
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const seriesDisplayName = series === "unclassified" ? "Unclassified" : series;
|
const seriesDisplayName = series === "unclassified" ? t("books.unclassified") : (series ?? "");
|
||||||
const totalPages = Math.ceil(booksPage.total / limit);
|
const totalPages = Math.ceil(booksPage.total / limit);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-6">
|
<div className="space-y-6">
|
||||||
<LibrarySubPageHeader
|
<LibrarySubPageHeader
|
||||||
library={library}
|
library={library}
|
||||||
title={series ? `Books in "${seriesDisplayName}"` : "All Books"}
|
title={series ? t("libraryBooks.booksOfSeries", { series: seriesDisplayName }) : t("libraryBooks.allBooks")}
|
||||||
icon={
|
icon={
|
||||||
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253" />
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253" />
|
||||||
@@ -53,9 +55,9 @@ export default async function LibraryBooksPage({
|
|||||||
}
|
}
|
||||||
iconColor="text-success"
|
iconColor="text-success"
|
||||||
filterInfo={series ? {
|
filterInfo={series ? {
|
||||||
label: `Showing books from series "${seriesDisplayName}"`,
|
label: t("libraryBooks.filterLabel", { series: seriesDisplayName }),
|
||||||
clearHref: `/libraries/${id}/books`,
|
clearHref: `/libraries/${id}/books`,
|
||||||
clearLabel: "View all books"
|
clearLabel: t("libraryBooks.viewAll")
|
||||||
} : undefined}
|
} : undefined}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
@@ -71,7 +73,7 @@ export default async function LibraryBooksPage({
|
|||||||
/>
|
/>
|
||||||
</>
|
</>
|
||||||
) : (
|
) : (
|
||||||
<EmptyState message={series ? `No books in series "${seriesDisplayName}"` : "No books in this library yet"} />
|
<EmptyState message={series ? t("libraryBooks.noBooksInSeries", { series: seriesDisplayName }) : t("libraryBooks.noBooks")} />
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
229
apps/backoffice/app/(app)/libraries/[id]/series/[name]/page.tsx
Normal file
229
apps/backoffice/app/(app)/libraries/[id]/series/[name]/page.tsx
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
import { fetchLibraries, fetchBooks, fetchSeriesMetadata, getBookCoverUrl, getMetadataLink, getMissingBooks, BookDto, SeriesMetadataDto, ExternalMetadataLinkDto, MissingBooksDto } from "@/lib/api";
|
||||||
|
import { BooksGrid, EmptyState } from "@/app/components/BookCard";
|
||||||
|
import { MarkSeriesReadButton } from "@/app/components/MarkSeriesReadButton";
|
||||||
|
import { MarkBookReadButton } from "@/app/components/MarkBookReadButton";
|
||||||
|
import nextDynamic from "next/dynamic";
|
||||||
|
import { OffsetPagination } from "@/app/components/ui";
|
||||||
|
import { SafeHtml } from "@/app/components/SafeHtml";
|
||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
|
||||||
|
const EditSeriesForm = nextDynamic(
|
||||||
|
() => import("@/app/components/EditSeriesForm").then(m => m.EditSeriesForm)
|
||||||
|
);
|
||||||
|
const MetadataSearchModal = nextDynamic(
|
||||||
|
() => import("@/app/components/MetadataSearchModal").then(m => m.MetadataSearchModal)
|
||||||
|
);
|
||||||
|
const ProwlarrSearchModal = nextDynamic(
|
||||||
|
() => import("@/app/components/ProwlarrSearchModal").then(m => m.ProwlarrSearchModal)
|
||||||
|
);
|
||||||
|
import { notFound } from "next/navigation";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function SeriesDetailPage({
|
||||||
|
params,
|
||||||
|
searchParams,
|
||||||
|
}: {
|
||||||
|
params: Promise<{ id: string; name: string }>;
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const { id, name } = await params;
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 50;
|
||||||
|
|
||||||
|
const seriesName = decodeURIComponent(name);
|
||||||
|
|
||||||
|
const [library, booksPage, seriesMeta, metadataLinks] = await Promise.all([
|
||||||
|
fetchLibraries().then((libs) => libs.find((l) => l.id === id)),
|
||||||
|
fetchBooks(id, seriesName, page, limit).catch(() => ({
|
||||||
|
items: [] as BookDto[],
|
||||||
|
total: 0,
|
||||||
|
page: 1,
|
||||||
|
limit,
|
||||||
|
})),
|
||||||
|
fetchSeriesMetadata(id, seriesName).catch(() => null as SeriesMetadataDto | null),
|
||||||
|
getMetadataLink(id, seriesName).catch(() => [] as ExternalMetadataLinkDto[]),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const existingLink = metadataLinks.find((l) => l.status === "approved") ?? metadataLinks[0] ?? null;
|
||||||
|
let missingData: MissingBooksDto | null = null;
|
||||||
|
if (existingLink && existingLink.status === "approved") {
|
||||||
|
missingData = await getMissingBooks(existingLink.id).catch(() => null);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!library) {
|
||||||
|
notFound();
|
||||||
|
}
|
||||||
|
|
||||||
|
const books = booksPage.items.map((book) => ({
|
||||||
|
...book,
|
||||||
|
coverUrl: getBookCoverUrl(book.id),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const totalPages = Math.ceil(booksPage.total / limit);
|
||||||
|
const booksReadCount = booksPage.items.filter((b) => b.reading_status === "read").length;
|
||||||
|
const displayName = seriesName === "unclassified" ? t("books.unclassified") : seriesName;
|
||||||
|
|
||||||
|
// Use first book cover as series cover
|
||||||
|
const coverBookId = booksPage.items[0]?.id;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Breadcrumb */}
|
||||||
|
<div className="flex items-center gap-2 text-sm">
|
||||||
|
<Link
|
||||||
|
href="/libraries"
|
||||||
|
className="text-muted-foreground hover:text-primary transition-colors"
|
||||||
|
>
|
||||||
|
{t("nav.libraries")}
|
||||||
|
</Link>
|
||||||
|
<span className="text-muted-foreground">/</span>
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${id}/series`}
|
||||||
|
className="text-muted-foreground hover:text-primary transition-colors"
|
||||||
|
>
|
||||||
|
{library.name}
|
||||||
|
</Link>
|
||||||
|
<span className="text-muted-foreground">/</span>
|
||||||
|
<span className="text-foreground font-medium">{displayName}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Series Header */}
|
||||||
|
<div className="flex flex-col sm:flex-row gap-6">
|
||||||
|
{coverBookId && (
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<div className="w-40 aspect-[2/3] relative rounded-xl overflow-hidden shadow-card border border-border">
|
||||||
|
<Image
|
||||||
|
src={getBookCoverUrl(coverBookId)}
|
||||||
|
alt={t("books.coverOf", { name: displayName })}
|
||||||
|
fill
|
||||||
|
className="object-cover"
|
||||||
|
sizes="160px"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="flex-1 space-y-4">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground">{displayName}</h1>
|
||||||
|
|
||||||
|
<div className="flex flex-wrap items-center gap-3">
|
||||||
|
{seriesMeta && seriesMeta.authors.length > 0 && (
|
||||||
|
<p className="text-base text-muted-foreground">{seriesMeta.authors.join(", ")}</p>
|
||||||
|
)}
|
||||||
|
{seriesMeta?.status && (
|
||||||
|
<span className={`inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium ${
|
||||||
|
seriesMeta.status === "ongoing" ? "bg-blue-500/15 text-blue-600" :
|
||||||
|
seriesMeta.status === "ended" ? "bg-green-500/15 text-green-600" :
|
||||||
|
seriesMeta.status === "hiatus" ? "bg-amber-500/15 text-amber-600" :
|
||||||
|
seriesMeta.status === "cancelled" ? "bg-red-500/15 text-red-600" :
|
||||||
|
"bg-muted text-muted-foreground"
|
||||||
|
}`}>
|
||||||
|
{t(`seriesStatus.${seriesMeta.status}` as any) || seriesMeta.status}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{seriesMeta?.description && (
|
||||||
|
<SafeHtml html={seriesMeta.description} className="text-sm text-muted-foreground leading-relaxed" />
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="flex flex-wrap items-center gap-4 text-sm">
|
||||||
|
{seriesMeta && seriesMeta.publishers.length > 0 && (
|
||||||
|
<span className="text-muted-foreground">
|
||||||
|
<span className="font-semibold text-foreground">{seriesMeta.publishers.join(", ")}</span>
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{seriesMeta?.start_year && (
|
||||||
|
<span className="text-muted-foreground">{seriesMeta.start_year}</span>
|
||||||
|
)}
|
||||||
|
{((seriesMeta && seriesMeta.publishers.length > 0) || seriesMeta?.start_year) && <span className="w-px h-4 bg-border" />}
|
||||||
|
<span className="text-muted-foreground">
|
||||||
|
<span className="font-semibold text-foreground">{booksPage.total}</span> {t("dashboard.books").toLowerCase()}
|
||||||
|
</span>
|
||||||
|
<span className="w-px h-4 bg-border" />
|
||||||
|
<span className="text-muted-foreground">
|
||||||
|
{t("series.readCount", { read: String(booksReadCount), total: String(booksPage.total), plural: booksPage.total !== 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
|
||||||
|
{/* Reading progress bar */}
|
||||||
|
<div className="flex items-center gap-2 flex-1 min-w-[120px] max-w-[200px]">
|
||||||
|
<div className="flex-1 h-2 bg-muted rounded-full overflow-hidden">
|
||||||
|
<div
|
||||||
|
className="h-full bg-green-500 rounded-full transition-all"
|
||||||
|
style={{ width: `${booksPage.total > 0 ? (booksReadCount / booksPage.total) * 100 : 0}%` }}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Collection progress bar (owned / expected) */}
|
||||||
|
{missingData && missingData.total_external > 0 && (
|
||||||
|
<>
|
||||||
|
<span className="w-px h-4 bg-border" />
|
||||||
|
<span className="text-muted-foreground">
|
||||||
|
{booksPage.total}/{missingData.total_external} — {t("series.missingCount", { count: missingData.missing_count, plural: missingData.missing_count !== 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
<div className="w-[150px] h-2 bg-muted rounded-full overflow-hidden">
|
||||||
|
<div
|
||||||
|
className="h-full bg-amber-500 rounded-full transition-all"
|
||||||
|
style={{ width: `${Math.round((booksPage.total / missingData.total_external) * 100)}%` }}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex flex-wrap items-center gap-3">
|
||||||
|
<MarkSeriesReadButton
|
||||||
|
seriesName={seriesName}
|
||||||
|
bookCount={booksPage.total}
|
||||||
|
booksReadCount={booksReadCount}
|
||||||
|
/>
|
||||||
|
<EditSeriesForm
|
||||||
|
libraryId={id}
|
||||||
|
seriesName={seriesName}
|
||||||
|
currentAuthors={seriesMeta?.authors ?? []}
|
||||||
|
currentPublishers={seriesMeta?.publishers ?? []}
|
||||||
|
currentBookAuthor={seriesMeta?.book_author ?? booksPage.items[0]?.author ?? null}
|
||||||
|
currentBookLanguage={seriesMeta?.book_language ?? booksPage.items[0]?.language ?? null}
|
||||||
|
currentDescription={seriesMeta?.description ?? null}
|
||||||
|
currentStartYear={seriesMeta?.start_year ?? null}
|
||||||
|
currentTotalVolumes={seriesMeta?.total_volumes ?? null}
|
||||||
|
currentStatus={seriesMeta?.status ?? null}
|
||||||
|
currentLockedFields={seriesMeta?.locked_fields ?? {}}
|
||||||
|
/>
|
||||||
|
<ProwlarrSearchModal
|
||||||
|
seriesName={seriesName}
|
||||||
|
missingBooks={missingData?.missing_books ?? null}
|
||||||
|
/>
|
||||||
|
<MetadataSearchModal
|
||||||
|
libraryId={id}
|
||||||
|
seriesName={seriesName}
|
||||||
|
existingLink={existingLink}
|
||||||
|
initialMissing={missingData}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Books Grid */}
|
||||||
|
{books.length > 0 ? (
|
||||||
|
<>
|
||||||
|
<BooksGrid books={books} />
|
||||||
|
<OffsetPagination
|
||||||
|
currentPage={page}
|
||||||
|
totalPages={totalPages}
|
||||||
|
pageSize={limit}
|
||||||
|
totalItems={booksPage.total}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<EmptyState message={t("librarySeries.noBooksInSeries")} />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
144
apps/backoffice/app/(app)/libraries/[id]/series/page.tsx
Normal file
144
apps/backoffice/app/(app)/libraries/[id]/series/page.tsx
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
import { fetchLibraries, fetchSeries, fetchSeriesStatuses, getBookCoverUrl, LibraryDto, SeriesDto, SeriesPageDto } from "@/lib/api";
|
||||||
|
import { OffsetPagination } from "@/app/components/ui";
|
||||||
|
import { MarkSeriesReadButton } from "@/app/components/MarkSeriesReadButton";
|
||||||
|
import { SeriesFilters } from "@/app/components/SeriesFilters";
|
||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
import { notFound } from "next/navigation";
|
||||||
|
import { LibrarySubPageHeader } from "@/app/components/LibrarySubPageHeader";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function LibrarySeriesPage({
|
||||||
|
params,
|
||||||
|
searchParams
|
||||||
|
}: {
|
||||||
|
params: Promise<{ id: string }>;
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const { id } = await params;
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||||
|
const seriesStatus = typeof searchParamsAwaited.series_status === "string" ? searchParamsAwaited.series_status : undefined;
|
||||||
|
const hasMissing = searchParamsAwaited.has_missing === "true";
|
||||||
|
|
||||||
|
const [library, seriesPage, dbStatuses] = await Promise.all([
|
||||||
|
fetchLibraries().then(libs => libs.find(l => l.id === id)),
|
||||||
|
fetchSeries(id, page, limit, seriesStatus, hasMissing).catch(() => ({ items: [] as SeriesDto[], total: 0, page: 1, limit }) as SeriesPageDto),
|
||||||
|
fetchSeriesStatuses().catch(() => [] as string[]),
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!library) {
|
||||||
|
notFound();
|
||||||
|
}
|
||||||
|
|
||||||
|
const series = seriesPage.items;
|
||||||
|
const totalPages = Math.ceil(seriesPage.total / limit);
|
||||||
|
|
||||||
|
const KNOWN_STATUSES: Record<string, string> = {
|
||||||
|
ongoing: t("seriesStatus.ongoing"),
|
||||||
|
ended: t("seriesStatus.ended"),
|
||||||
|
hiatus: t("seriesStatus.hiatus"),
|
||||||
|
cancelled: t("seriesStatus.cancelled"),
|
||||||
|
upcoming: t("seriesStatus.upcoming"),
|
||||||
|
};
|
||||||
|
const seriesStatusOptions = [
|
||||||
|
{ value: "", label: t("seriesStatus.allStatuses") },
|
||||||
|
...dbStatuses.map((s) => ({ value: s, label: KNOWN_STATUSES[s] || s })),
|
||||||
|
];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<LibrarySubPageHeader
|
||||||
|
library={library}
|
||||||
|
title={t("series.title")}
|
||||||
|
icon={
|
||||||
|
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" />
|
||||||
|
</svg>
|
||||||
|
}
|
||||||
|
iconColor="text-primary"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<SeriesFilters
|
||||||
|
basePath={`/libraries/${id}/series`}
|
||||||
|
currentSeriesStatus={seriesStatus}
|
||||||
|
currentHasMissing={hasMissing}
|
||||||
|
seriesStatusOptions={seriesStatusOptions}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{series.length > 0 ? (
|
||||||
|
<>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 gap-6">
|
||||||
|
{series.map((s) => (
|
||||||
|
<Link
|
||||||
|
key={s.name}
|
||||||
|
href={`/libraries/${id}/series/${encodeURIComponent(s.name)}`}
|
||||||
|
className="group"
|
||||||
|
>
|
||||||
|
<div className={`bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md transition-shadow duration-200 ${s.books_read_count >= s.book_count ? "opacity-50" : ""}`}>
|
||||||
|
<div className="aspect-[2/3] relative bg-muted/50">
|
||||||
|
<Image
|
||||||
|
src={getBookCoverUrl(s.first_book_id)}
|
||||||
|
alt={t("books.coverOf", { name: s.name })}
|
||||||
|
fill
|
||||||
|
className="object-cover"
|
||||||
|
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 20vw"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="p-3">
|
||||||
|
<h3 className="font-medium text-foreground truncate text-sm" title={s.name}>
|
||||||
|
{s.name === "unclassified" ? t("books.unclassified") : s.name}
|
||||||
|
</h3>
|
||||||
|
<div className="flex items-center justify-between mt-1">
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{t("series.readCount", { read: String(s.books_read_count), total: String(s.book_count), plural: s.book_count !== 1 ? "s" : "" })}
|
||||||
|
</p>
|
||||||
|
<MarkSeriesReadButton
|
||||||
|
seriesName={s.name}
|
||||||
|
bookCount={s.book_count}
|
||||||
|
booksReadCount={s.books_read_count}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-1 mt-1.5 flex-wrap">
|
||||||
|
{s.series_status && (
|
||||||
|
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium ${
|
||||||
|
s.series_status === "ongoing" ? "bg-blue-500/15 text-blue-600" :
|
||||||
|
s.series_status === "ended" ? "bg-green-500/15 text-green-600" :
|
||||||
|
s.series_status === "hiatus" ? "bg-amber-500/15 text-amber-600" :
|
||||||
|
s.series_status === "cancelled" ? "bg-red-500/15 text-red-600" :
|
||||||
|
"bg-muted text-muted-foreground"
|
||||||
|
}`}>
|
||||||
|
{KNOWN_STATUSES[s.series_status] || s.series_status}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{s.missing_count != null && s.missing_count > 0 && (
|
||||||
|
<span className="text-[10px] px-1.5 py-0.5 rounded-full font-medium bg-yellow-500/15 text-yellow-600">
|
||||||
|
{t("series.missingCount", { count: String(s.missing_count) })}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<OffsetPagination
|
||||||
|
currentPage={page}
|
||||||
|
totalPages={totalPages}
|
||||||
|
pageSize={limit}
|
||||||
|
totalItems={seriesPage.total}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<div className="text-center py-12 text-muted-foreground">
|
||||||
|
<p>{t("librarySeries.noSeries")}</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
227
apps/backoffice/app/(app)/libraries/page.tsx
Normal file
227
apps/backoffice/app/(app)/libraries/page.tsx
Normal file
@@ -0,0 +1,227 @@
|
|||||||
|
import { revalidatePath } from "next/cache";
|
||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
import { listFolders, createLibrary, deleteLibrary, fetchLibraries, getBookCoverUrl, LibraryDto, FolderItem } from "@/lib/api";
|
||||||
|
import type { TranslationKey } from "@/lib/i18n/fr";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import { LibraryActions } from "@/app/components/LibraryActions";
|
||||||
|
import { LibraryForm } from "@/app/components/LibraryForm";
|
||||||
|
import { ProviderIcon } from "@/app/components/ProviderIcon";
|
||||||
|
import {
|
||||||
|
Card, CardHeader, CardTitle, CardDescription, CardContent,
|
||||||
|
Button, Badge
|
||||||
|
} from "@/app/components/ui";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
function formatNextScan(nextScanAt: string | null, imminentLabel: string): string {
|
||||||
|
if (!nextScanAt) return "-";
|
||||||
|
const date = new Date(nextScanAt);
|
||||||
|
const now = new Date();
|
||||||
|
const diff = date.getTime() - now.getTime();
|
||||||
|
|
||||||
|
if (diff < 0) return imminentLabel;
|
||||||
|
if (diff < 60000) return "< 1 min";
|
||||||
|
if (diff < 3600000) return `${Math.floor(diff / 60000)}m`;
|
||||||
|
if (diff < 86400000) return `${Math.floor(diff / 3600000)}h`;
|
||||||
|
return `${Math.floor(diff / 86400000)}d`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function LibrariesPage() {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const [libraries, folders] = await Promise.all([
|
||||||
|
fetchLibraries().catch(() => [] as LibraryDto[]),
|
||||||
|
listFolders().catch(() => [] as FolderItem[])
|
||||||
|
]);
|
||||||
|
|
||||||
|
const thumbnailMap = new Map(
|
||||||
|
libraries.map(lib => [
|
||||||
|
lib.id,
|
||||||
|
(lib.thumbnail_book_ids || []).map(bookId => getBookCoverUrl(bookId)),
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
async function addLibrary(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const name = formData.get("name") as string;
|
||||||
|
const rootPath = formData.get("root_path") as string;
|
||||||
|
if (name && rootPath) {
|
||||||
|
await createLibrary(name, rootPath);
|
||||||
|
revalidatePath("/libraries");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function removeLibrary(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const id = formData.get("id") as string;
|
||||||
|
await deleteLibrary(id);
|
||||||
|
revalidatePath("/libraries");
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="mb-6">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||||
|
<svg className="w-8 h-8 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" />
|
||||||
|
</svg>
|
||||||
|
{t("libraries.title")}
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Add Library Form */}
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("libraries.addLibrary")}</CardTitle>
|
||||||
|
<CardDescription>{t("libraries.addLibraryDescription")}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<LibraryForm initialFolders={folders} action={addLibrary} />
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Libraries Grid */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
|
||||||
|
{libraries.map((lib) => {
|
||||||
|
const thumbnails = thumbnailMap.get(lib.id) || [];
|
||||||
|
return (
|
||||||
|
<Card key={lib.id} className="flex flex-col overflow-hidden">
|
||||||
|
{/* Thumbnail fan */}
|
||||||
|
{thumbnails.length > 0 ? (
|
||||||
|
<Link href={`/libraries/${lib.id}/series`} className="block relative h-48 overflow-hidden bg-muted/10">
|
||||||
|
<Image
|
||||||
|
src={thumbnails[0]}
|
||||||
|
alt=""
|
||||||
|
fill
|
||||||
|
className="object-cover blur-xl scale-110 opacity-40"
|
||||||
|
sizes="(max-width: 768px) 100vw, 33vw"
|
||||||
|
loading="lazy"
|
||||||
|
/>
|
||||||
|
<div className="absolute inset-0 flex items-end justify-center">
|
||||||
|
{thumbnails.map((url, i) => {
|
||||||
|
const count = thumbnails.length;
|
||||||
|
const mid = (count - 1) / 2;
|
||||||
|
const angle = (i - mid) * 12;
|
||||||
|
const radius = 220;
|
||||||
|
const rad = ((angle - 90) * Math.PI) / 180;
|
||||||
|
const cx = Math.cos(rad) * radius;
|
||||||
|
const cy = Math.sin(rad) * radius;
|
||||||
|
return (
|
||||||
|
<Image
|
||||||
|
key={i}
|
||||||
|
src={url}
|
||||||
|
alt=""
|
||||||
|
width={96}
|
||||||
|
height={144}
|
||||||
|
className="absolute object-cover shadow-lg"
|
||||||
|
style={{
|
||||||
|
transform: `translate(${cx}px, ${cy}px) rotate(${angle}deg)`,
|
||||||
|
transformOrigin: 'bottom center',
|
||||||
|
zIndex: count - Math.abs(Math.round(i - mid)),
|
||||||
|
bottom: '-185px',
|
||||||
|
}}
|
||||||
|
sizes="96px"
|
||||||
|
loading="lazy"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
) : (
|
||||||
|
<div className="h-8 bg-muted/10" />
|
||||||
|
)}
|
||||||
|
|
||||||
|
<CardHeader className="pb-2">
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div>
|
||||||
|
<CardTitle className="text-lg">{lib.name}</CardTitle>
|
||||||
|
{!lib.enabled && <Badge variant="muted" className="mt-1">{t("libraries.disabled")}</Badge>}
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
<LibraryActions
|
||||||
|
libraryId={lib.id}
|
||||||
|
monitorEnabled={lib.monitor_enabled}
|
||||||
|
scanMode={lib.scan_mode}
|
||||||
|
watcherEnabled={lib.watcher_enabled}
|
||||||
|
metadataProvider={lib.metadata_provider}
|
||||||
|
fallbackMetadataProvider={lib.fallback_metadata_provider}
|
||||||
|
metadataRefreshMode={lib.metadata_refresh_mode}
|
||||||
|
/>
|
||||||
|
<form>
|
||||||
|
<input type="hidden" name="id" value={lib.id} />
|
||||||
|
<Button type="submit" variant="ghost" size="sm" formAction={removeLibrary} className="text-muted-foreground hover:text-destructive">
|
||||||
|
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
|
||||||
|
</svg>
|
||||||
|
</Button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<code className="text-xs font-mono text-muted-foreground break-all">{lib.root_path}</code>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="flex-1 pt-0">
|
||||||
|
{/* Stats */}
|
||||||
|
<div className="grid grid-cols-2 gap-3 mb-3">
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${lib.id}/books`}
|
||||||
|
className="text-center p-2.5 bg-muted/50 rounded-lg hover:bg-accent transition-colors duration-200"
|
||||||
|
>
|
||||||
|
<span className="block text-2xl font-bold text-primary">{lib.book_count}</span>
|
||||||
|
<span className="text-xs text-muted-foreground">{t("libraries.books")}</span>
|
||||||
|
</Link>
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${lib.id}/series`}
|
||||||
|
className="text-center p-2.5 bg-muted/50 rounded-lg hover:bg-accent transition-colors duration-200"
|
||||||
|
>
|
||||||
|
<span className="block text-2xl font-bold text-foreground">{lib.series_count}</span>
|
||||||
|
<span className="text-xs text-muted-foreground">{t("libraries.series")}</span>
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Configuration tags */}
|
||||||
|
<div className="flex flex-wrap gap-1.5">
|
||||||
|
<span className={`inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium ${
|
||||||
|
lib.monitor_enabled
|
||||||
|
? 'bg-success/10 text-success'
|
||||||
|
: 'bg-muted/50 text-muted-foreground'
|
||||||
|
}`}>
|
||||||
|
<span className="text-[9px]">{lib.monitor_enabled ? '●' : '○'}</span>
|
||||||
|
{t("libraries.scanLabel", { mode: t(`monitoring.${lib.scan_mode}` as TranslationKey) })}
|
||||||
|
</span>
|
||||||
|
|
||||||
|
<span className={`inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium ${
|
||||||
|
lib.watcher_enabled
|
||||||
|
? 'bg-warning/10 text-warning'
|
||||||
|
: 'bg-muted/50 text-muted-foreground'
|
||||||
|
}`}>
|
||||||
|
<span>{lib.watcher_enabled ? '⚡' : '○'}</span>
|
||||||
|
<span>{t("libraries.watcherLabel")}</span>
|
||||||
|
</span>
|
||||||
|
|
||||||
|
{lib.metadata_provider && lib.metadata_provider !== "none" && (
|
||||||
|
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-primary/10 text-primary">
|
||||||
|
<ProviderIcon provider={lib.metadata_provider} size={11} />
|
||||||
|
{lib.metadata_provider.replace('_', ' ')}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{lib.metadata_refresh_mode !== "manual" && (
|
||||||
|
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-muted/50 text-muted-foreground">
|
||||||
|
{t("libraries.metaRefreshLabel", { mode: t(`monitoring.${lib.metadata_refresh_mode}` as TranslationKey) })}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{lib.monitor_enabled && lib.next_scan_at && (
|
||||||
|
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-muted/50 text-muted-foreground">
|
||||||
|
{t("libraries.nextScan", { time: formatNextScan(lib.next_scan_at, t("libraries.imminent")) })}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
514
apps/backoffice/app/(app)/page.tsx
Normal file
514
apps/backoffice/app/(app)/page.tsx
Normal file
@@ -0,0 +1,514 @@
|
|||||||
|
import React from "react";
|
||||||
|
import { fetchStats, fetchUsers, StatsResponse, UserDto } from "@/lib/api";
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from "@/app/components/ui";
|
||||||
|
import { RcDonutChart, RcBarChart, RcAreaChart, RcStackedBar, RcHorizontalBar, RcMultiLineChart } from "@/app/components/DashboardCharts";
|
||||||
|
import { PeriodToggle } from "@/app/components/PeriodToggle";
|
||||||
|
import { CurrentlyReadingList, RecentlyReadList } from "@/app/components/ReadingUserFilter";
|
||||||
|
import Link from "next/link";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import type { TranslateFunction } from "@/lib/i18n/dictionaries";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
function formatBytes(bytes: number): string {
|
||||||
|
if (bytes === 0) return "0 B";
|
||||||
|
const k = 1024;
|
||||||
|
const sizes = ["B", "KB", "MB", "GB", "TB"];
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||||
|
return `${(bytes / Math.pow(k, i)).toFixed(1)} ${sizes[i]}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatNumber(n: number, locale: string): string {
|
||||||
|
return n.toLocaleString(locale === "fr" ? "fr-FR" : "en-US");
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatChartLabel(raw: string, period: "day" | "week" | "month", locale: string): string {
|
||||||
|
const loc = locale === "fr" ? "fr-FR" : "en-US";
|
||||||
|
if (period === "month") {
|
||||||
|
// raw = "YYYY-MM"
|
||||||
|
const [y, m] = raw.split("-");
|
||||||
|
const d = new Date(Number(y), Number(m) - 1, 1);
|
||||||
|
return d.toLocaleDateString(loc, { month: "short" });
|
||||||
|
}
|
||||||
|
if (period === "week") {
|
||||||
|
// raw = "YYYY-MM-DD" (Monday of the week)
|
||||||
|
const d = new Date(raw + "T00:00:00");
|
||||||
|
return d.toLocaleDateString(loc, { day: "numeric", month: "short" });
|
||||||
|
}
|
||||||
|
// day: raw = "YYYY-MM-DD"
|
||||||
|
const d = new Date(raw + "T00:00:00");
|
||||||
|
return d.toLocaleDateString(loc, { weekday: "short", day: "numeric" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Horizontal progress bar for metadata quality (stays server-rendered, no recharts needed)
|
||||||
|
function HorizontalBar({ label, value, max, subLabel, color = "var(--color-primary)" }: { label: string; value: number; max: number; subLabel?: string; color?: string }) {
|
||||||
|
const pct = max > 0 ? (value / max) * 100 : 0;
|
||||||
|
return (
|
||||||
|
<div className="space-y-1">
|
||||||
|
<div className="flex justify-between text-sm">
|
||||||
|
<span className="font-medium text-foreground truncate">{label}</span>
|
||||||
|
<span className="text-muted-foreground shrink-0 ml-2">{subLabel || value}</span>
|
||||||
|
</div>
|
||||||
|
<div className="h-2 bg-muted rounded-full overflow-hidden">
|
||||||
|
<div
|
||||||
|
className="h-full rounded-full transition-all duration-500"
|
||||||
|
style={{ width: `${pct}%`, backgroundColor: color }}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function DashboardPage({
|
||||||
|
searchParams,
|
||||||
|
}: {
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const rawPeriod = searchParamsAwaited.period;
|
||||||
|
const period = rawPeriod === "day" ? "day" as const : rawPeriod === "week" ? "week" as const : "month" as const;
|
||||||
|
const { t, locale } = await getServerTranslations();
|
||||||
|
|
||||||
|
let stats: StatsResponse | null = null;
|
||||||
|
let users: UserDto[] = [];
|
||||||
|
try {
|
||||||
|
[stats, users] = await Promise.all([
|
||||||
|
fetchStats(period),
|
||||||
|
fetchUsers().catch(() => []),
|
||||||
|
]);
|
||||||
|
} catch (e) {
|
||||||
|
console.error("Failed to fetch stats:", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!stats) {
|
||||||
|
return (
|
||||||
|
<div className="max-w-5xl mx-auto">
|
||||||
|
<div className="text-center mb-12">
|
||||||
|
<h1 className="text-4xl font-bold tracking-tight mb-4 text-foreground">StripStream Backoffice</h1>
|
||||||
|
<p className="text-lg text-muted-foreground">{t("dashboard.loadError")}</p>
|
||||||
|
</div>
|
||||||
|
<QuickLinks t={t} />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const {
|
||||||
|
overview,
|
||||||
|
reading_status,
|
||||||
|
currently_reading = [],
|
||||||
|
recently_read = [],
|
||||||
|
reading_over_time = [],
|
||||||
|
users_reading_over_time = [],
|
||||||
|
by_format,
|
||||||
|
by_library,
|
||||||
|
top_series,
|
||||||
|
additions_over_time,
|
||||||
|
jobs_over_time = [],
|
||||||
|
metadata = { total_series: 0, series_linked: 0, series_unlinked: 0, books_with_summary: 0, books_with_isbn: 0, by_provider: [] },
|
||||||
|
} = stats;
|
||||||
|
|
||||||
|
const readingColors = ["hsl(220 13% 70%)", "hsl(45 93% 47%)", "hsl(142 60% 45%)"];
|
||||||
|
const formatColors = [
|
||||||
|
"hsl(198 78% 37%)", "hsl(142 60% 45%)", "hsl(45 93% 47%)",
|
||||||
|
"hsl(2 72% 48%)", "hsl(280 60% 50%)", "hsl(32 80% 50%)",
|
||||||
|
"hsl(170 60% 45%)", "hsl(220 60% 50%)",
|
||||||
|
];
|
||||||
|
|
||||||
|
const noDataLabel = t("common.noData");
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto space-y-6">
|
||||||
|
{/* Header */}
|
||||||
|
<div className="mb-2">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||||
|
<svg className="w-8 h-8 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 19v-6a2 2 0 00-2-2H5a2 2 0 00-2 2v6a2 2 0 002 2h2a2 2 0 002-2zm0 0V9a2 2 0 012-2h2a2 2 0 012 2v10m-6 0a2 2 0 002 2h2a2 2 0 002-2m0 0V5a2 2 0 012-2h2a2 2 0 012 2v14a2 2 0 01-2 2h-2a2 2 0 01-2-2z" />
|
||||||
|
</svg>
|
||||||
|
{t("dashboard.title")}
|
||||||
|
</h1>
|
||||||
|
<p className="text-muted-foreground mt-2 max-w-2xl">
|
||||||
|
{t("dashboard.subtitle")}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overview stat cards */}
|
||||||
|
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-6 gap-4">
|
||||||
|
<StatCard icon="book" label={t("dashboard.books")} value={formatNumber(overview.total_books, locale)} color="success" />
|
||||||
|
<StatCard icon="series" label={t("dashboard.series")} value={formatNumber(overview.total_series, locale)} color="primary" />
|
||||||
|
<StatCard icon="library" label={t("dashboard.libraries")} value={formatNumber(overview.total_libraries, locale)} color="warning" />
|
||||||
|
<StatCard icon="pages" label={t("dashboard.pages")} value={formatNumber(overview.total_pages, locale)} color="primary" />
|
||||||
|
<StatCard icon="author" label={t("dashboard.authors")} value={formatNumber(overview.total_authors, locale)} color="success" />
|
||||||
|
<StatCard icon="size" label={t("dashboard.totalSize")} value={formatBytes(overview.total_size_bytes)} color="warning" />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Currently reading + Recently read */}
|
||||||
|
{(currently_reading.length > 0 || recently_read.length > 0) && (
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
|
{/* Currently reading */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.currentlyReading")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<CurrentlyReadingList
|
||||||
|
items={currently_reading}
|
||||||
|
allLabel={t("dashboard.allUsers")}
|
||||||
|
emptyLabel={t("dashboard.noCurrentlyReading")}
|
||||||
|
pageProgressTemplate={t("dashboard.pageProgress")}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Recently read */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.recentlyRead")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RecentlyReadList
|
||||||
|
items={recently_read}
|
||||||
|
allLabel={t("dashboard.allUsers")}
|
||||||
|
emptyLabel={t("dashboard.noRecentlyRead")}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Reading activity line chart */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader className="flex flex-row items-center justify-between space-y-0">
|
||||||
|
<CardTitle className="text-base">{t("dashboard.readingActivity")}</CardTitle>
|
||||||
|
<PeriodToggle labels={{ day: t("dashboard.periodDay"), week: t("dashboard.periodWeek"), month: t("dashboard.periodMonth") }} />
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
{(() => {
|
||||||
|
const userColors = [
|
||||||
|
"hsl(142 60% 45%)", "hsl(198 78% 37%)", "hsl(45 93% 47%)",
|
||||||
|
"hsl(2 72% 48%)", "hsl(280 60% 50%)", "hsl(32 80% 50%)",
|
||||||
|
];
|
||||||
|
const usernames = [...new Set(users_reading_over_time.map(r => r.username))];
|
||||||
|
if (usernames.length === 0) {
|
||||||
|
return (
|
||||||
|
<RcAreaChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={reading_over_time.map((m) => ({ label: formatChartLabel(m.month, period, locale), value: m.books_read }))}
|
||||||
|
color="hsl(142 60% 45%)"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Pivot: { label, username1: n, username2: n, ... }
|
||||||
|
const byMonth = new Map<string, Record<string, unknown>>();
|
||||||
|
for (const row of users_reading_over_time) {
|
||||||
|
const label = formatChartLabel(row.month, period, locale);
|
||||||
|
if (!byMonth.has(row.month)) byMonth.set(row.month, { label });
|
||||||
|
byMonth.get(row.month)![row.username] = row.books_read;
|
||||||
|
}
|
||||||
|
const chartData = [...byMonth.values()];
|
||||||
|
const lines = usernames.map((u, i) => ({
|
||||||
|
key: u,
|
||||||
|
label: u,
|
||||||
|
color: userColors[i % userColors.length],
|
||||||
|
}));
|
||||||
|
return <RcMultiLineChart data={chartData} lines={lines} noDataLabel={noDataLabel} />;
|
||||||
|
})()}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Charts row */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
|
||||||
|
{/* Reading status par lecteur */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.readingStatus")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
{users.length === 0 ? (
|
||||||
|
<RcDonutChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={[
|
||||||
|
{ name: t("status.unread"), value: reading_status.unread, color: readingColors[0] },
|
||||||
|
{ name: t("status.reading"), value: reading_status.reading, color: readingColors[1] },
|
||||||
|
{ name: t("status.read"), value: reading_status.read, color: readingColors[2] },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<div className="space-y-3">
|
||||||
|
{users.map((user) => {
|
||||||
|
const total = overview.total_books;
|
||||||
|
const read = user.books_read;
|
||||||
|
const reading = user.books_reading;
|
||||||
|
const unread = Math.max(0, total - read - reading);
|
||||||
|
const readPct = total > 0 ? (read / total) * 100 : 0;
|
||||||
|
const readingPct = total > 0 ? (reading / total) * 100 : 0;
|
||||||
|
return (
|
||||||
|
<div key={user.id} className="space-y-1">
|
||||||
|
<div className="flex items-center justify-between text-sm">
|
||||||
|
<span className="font-medium text-foreground truncate">{user.username}</span>
|
||||||
|
<span className="text-xs text-muted-foreground shrink-0 ml-2">
|
||||||
|
<span className="text-success font-medium">{read}</span>
|
||||||
|
{reading > 0 && <span className="text-amber-500 font-medium"> · {reading}</span>}
|
||||||
|
<span className="text-muted-foreground/60"> / {total}</span>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="h-2 bg-muted rounded-full overflow-hidden flex">
|
||||||
|
<div className="h-full bg-success transition-all duration-500" style={{ width: `${readPct}%` }} />
|
||||||
|
<div className="h-full bg-amber-500 transition-all duration-500" style={{ width: `${readingPct}%` }} />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* By format donut */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.byFormat")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcDonutChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={by_format.slice(0, 6).map((f, i) => ({
|
||||||
|
name: (f.format || t("dashboard.unknown")).toUpperCase(),
|
||||||
|
value: f.count,
|
||||||
|
color: formatColors[i % formatColors.length],
|
||||||
|
}))}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* By library donut */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.byLibrary")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcDonutChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={by_library.slice(0, 6).map((l, i) => ({
|
||||||
|
name: l.library_name,
|
||||||
|
value: l.book_count,
|
||||||
|
color: formatColors[i % formatColors.length],
|
||||||
|
}))}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Metadata row */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
|
||||||
|
{/* Series metadata coverage donut */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.metadataCoverage")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcDonutChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={[
|
||||||
|
{ name: t("dashboard.seriesLinked"), value: metadata.series_linked, color: "hsl(142 60% 45%)" },
|
||||||
|
{ name: t("dashboard.seriesUnlinked"), value: metadata.series_unlinked, color: "hsl(220 13% 70%)" },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* By provider donut */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.byProvider")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcDonutChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={metadata.by_provider.map((p, i) => ({
|
||||||
|
name: p.provider.replace(/_/g, " ").replace(/\b\w/g, (c) => c.toUpperCase()),
|
||||||
|
value: p.count,
|
||||||
|
color: formatColors[i % formatColors.length],
|
||||||
|
}))}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Book metadata quality */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.bookMetadata")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="space-y-4">
|
||||||
|
<HorizontalBar
|
||||||
|
label={t("dashboard.withSummary")}
|
||||||
|
value={metadata.books_with_summary}
|
||||||
|
max={overview.total_books}
|
||||||
|
subLabel={overview.total_books > 0 ? `${Math.round((metadata.books_with_summary / overview.total_books) * 100)}%` : "0%"}
|
||||||
|
color="hsl(198 78% 37%)"
|
||||||
|
/>
|
||||||
|
<HorizontalBar
|
||||||
|
label={t("dashboard.withIsbn")}
|
||||||
|
value={metadata.books_with_isbn}
|
||||||
|
max={overview.total_books}
|
||||||
|
subLabel={overview.total_books > 0 ? `${Math.round((metadata.books_with_isbn / overview.total_books) * 100)}%` : "0%"}
|
||||||
|
color="hsl(280 60% 50%)"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Libraries breakdown + Top series */}
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
|
{by_library.length > 0 && (
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.libraries")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcStackedBar
|
||||||
|
data={by_library.map((lib) => ({
|
||||||
|
name: lib.library_name,
|
||||||
|
read: lib.read_count,
|
||||||
|
reading: lib.reading_count,
|
||||||
|
unread: lib.unread_count,
|
||||||
|
sizeLabel: formatBytes(lib.size_bytes),
|
||||||
|
}))}
|
||||||
|
labels={{
|
||||||
|
read: t("status.read"),
|
||||||
|
reading: t("status.reading"),
|
||||||
|
unread: t("status.unread"),
|
||||||
|
books: t("dashboard.books"),
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Top series */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.popularSeries")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcHorizontalBar
|
||||||
|
noDataLabel={t("dashboard.noSeries")}
|
||||||
|
data={top_series.slice(0, 8).map((s) => ({
|
||||||
|
name: s.series,
|
||||||
|
value: s.book_count,
|
||||||
|
subLabel: t("dashboard.readCount", { read: s.read_count, total: s.book_count }),
|
||||||
|
}))}
|
||||||
|
color="hsl(142 60% 45%)"
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Additions line chart – full width */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader className="flex flex-row items-center justify-between space-y-0">
|
||||||
|
<CardTitle className="text-base">{t("dashboard.booksAdded")}</CardTitle>
|
||||||
|
<PeriodToggle labels={{ day: t("dashboard.periodDay"), week: t("dashboard.periodWeek"), month: t("dashboard.periodMonth") }} />
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcAreaChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={additions_over_time.map((m) => ({ label: formatChartLabel(m.month, period, locale), value: m.books_added }))}
|
||||||
|
color="hsl(198 78% 37%)"
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Jobs over time – multi-line chart */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader className="flex flex-row items-center justify-between space-y-0">
|
||||||
|
<CardTitle className="text-base">{t("dashboard.jobsOverTime")}</CardTitle>
|
||||||
|
<PeriodToggle labels={{ day: t("dashboard.periodDay"), week: t("dashboard.periodWeek"), month: t("dashboard.periodMonth") }} />
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcMultiLineChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={jobs_over_time.map((j) => ({
|
||||||
|
label: formatChartLabel(j.label, period, locale),
|
||||||
|
scan: j.scan,
|
||||||
|
rebuild: j.rebuild,
|
||||||
|
thumbnail: j.thumbnail,
|
||||||
|
other: j.other,
|
||||||
|
}))}
|
||||||
|
lines={[
|
||||||
|
{ key: "scan", label: t("dashboard.jobScan"), color: "hsl(198 78% 37%)" },
|
||||||
|
{ key: "rebuild", label: t("dashboard.jobRebuild"), color: "hsl(142 60% 45%)" },
|
||||||
|
{ key: "thumbnail", label: t("dashboard.jobThumbnail"), color: "hsl(45 93% 47%)" },
|
||||||
|
{ key: "other", label: t("dashboard.jobOther"), color: "hsl(280 60% 50%)" },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Quick links */}
|
||||||
|
<QuickLinks t={t} />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function StatCard({ icon, label, value, color }: { icon: string; label: string; value: string; color: string }) {
|
||||||
|
const icons: Record<string, React.ReactNode> = {
|
||||||
|
book: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253" />,
|
||||||
|
series: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" />,
|
||||||
|
library: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" />,
|
||||||
|
pages: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z" />,
|
||||||
|
author: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M16 7a4 4 0 11-8 0 4 4 0 018 0zM12 14a7 7 0 00-7 7h14a7 7 0 00-7-7z" />,
|
||||||
|
size: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 7v10c0 2.21 3.582 4 8 4s8-1.79 8-4V7M4 7c0 2.21 3.582 4 8 4s8-1.79 8-4M4 7c0-2.21 3.582-4 8-4s8 1.79 8 4m0 5c0 2.21-3.582 4-8 4s-8-1.79-8-4" />,
|
||||||
|
};
|
||||||
|
|
||||||
|
const colorClasses: Record<string, string> = {
|
||||||
|
primary: "bg-primary/10 text-primary",
|
||||||
|
success: "bg-success/10 text-success",
|
||||||
|
warning: "bg-warning/10 text-warning",
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card hover={false} className="p-4">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<div className={`w-10 h-10 rounded-lg flex items-center justify-center shrink-0 ${colorClasses[color]}`}>
|
||||||
|
<svg className="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
{icons[icon]}
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<div className="min-w-0">
|
||||||
|
<p className="text-xl font-bold text-foreground leading-tight">{value}</p>
|
||||||
|
<p className="text-xs text-muted-foreground">{label}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function QuickLinks({ t }: { t: TranslateFunction }) {
|
||||||
|
const links = [
|
||||||
|
{ href: "/libraries", label: t("nav.libraries"), bg: "bg-primary/10", text: "text-primary", hoverBg: "group-hover:bg-primary", hoverText: "group-hover:text-primary-foreground", icon: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" /> },
|
||||||
|
{ href: "/books", label: t("nav.books"), bg: "bg-success/10", text: "text-success", hoverBg: "group-hover:bg-success", hoverText: "group-hover:text-white", icon: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253" /> },
|
||||||
|
{ href: "/series", label: t("nav.series"), bg: "bg-warning/10", text: "text-warning", hoverBg: "group-hover:bg-warning", hoverText: "group-hover:text-white", icon: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" /> },
|
||||||
|
{ href: "/jobs", label: t("nav.jobs"), bg: "bg-destructive/10", text: "text-destructive", hoverBg: "group-hover:bg-destructive", hoverText: "group-hover:text-destructive-foreground", icon: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M13 10V3L4 14h7v7l9-11h-7z" /> },
|
||||||
|
];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="grid grid-cols-2 md:grid-cols-4 gap-4">
|
||||||
|
{links.map((l) => (
|
||||||
|
<Link
|
||||||
|
key={l.href}
|
||||||
|
href={l.href as any}
|
||||||
|
className="group p-4 bg-card/80 backdrop-blur-sm rounded-xl border border-border/50 shadow-sm hover:shadow-md hover:-translate-y-0.5 transition-all duration-200 flex items-center gap-3"
|
||||||
|
>
|
||||||
|
<div className={`w-9 h-9 rounded-lg flex items-center justify-center transition-colors duration-200 ${l.bg} ${l.hoverBg}`}>
|
||||||
|
<svg className={`w-5 h-5 ${l.text} ${l.hoverText}`} fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
{l.icon}
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<span className="font-medium text-foreground text-sm">{l.label}</span>
|
||||||
|
</Link>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
208
apps/backoffice/app/(app)/series/page.tsx
Normal file
208
apps/backoffice/app/(app)/series/page.tsx
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
import { fetchAllSeries, fetchLibraries, fetchSeriesStatuses, LibraryDto, SeriesDto, SeriesPageDto, getBookCoverUrl } from "@/lib/api";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import { MarkSeriesReadButton } from "@/app/components/MarkSeriesReadButton";
|
||||||
|
import { LiveSearchForm } from "@/app/components/LiveSearchForm";
|
||||||
|
import { Card, CardContent, OffsetPagination } from "@/app/components/ui";
|
||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
import { ProviderIcon } from "@/app/components/ProviderIcon";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function SeriesPage({
|
||||||
|
searchParams,
|
||||||
|
}: {
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const libraryId = typeof searchParamsAwaited.library === "string" ? searchParamsAwaited.library : undefined;
|
||||||
|
const searchQuery = typeof searchParamsAwaited.q === "string" ? searchParamsAwaited.q : "";
|
||||||
|
const readingStatus = typeof searchParamsAwaited.status === "string" ? searchParamsAwaited.status : undefined;
|
||||||
|
const sort = typeof searchParamsAwaited.sort === "string" ? searchParamsAwaited.sort : undefined;
|
||||||
|
const seriesStatus = typeof searchParamsAwaited.series_status === "string" ? searchParamsAwaited.series_status : undefined;
|
||||||
|
const hasMissing = searchParamsAwaited.has_missing === "true";
|
||||||
|
const metadataProvider = typeof searchParamsAwaited.metadata_provider === "string" ? searchParamsAwaited.metadata_provider : undefined;
|
||||||
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||||
|
|
||||||
|
const [libraries, seriesPage, dbStatuses] = await Promise.all([
|
||||||
|
fetchLibraries().catch(() => [] as LibraryDto[]),
|
||||||
|
fetchAllSeries(libraryId, searchQuery || undefined, readingStatus, page, limit, sort, seriesStatus, hasMissing, metadataProvider).catch(
|
||||||
|
() => ({ items: [] as SeriesDto[], total: 0, page: 1, limit }) as SeriesPageDto
|
||||||
|
),
|
||||||
|
fetchSeriesStatuses().catch(() => [] as string[]),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const series = seriesPage.items;
|
||||||
|
const totalPages = Math.ceil(seriesPage.total / limit);
|
||||||
|
const sortOptions = [
|
||||||
|
{ value: "", label: t("books.sortTitle") },
|
||||||
|
{ value: "latest", label: t("books.sortLatest") },
|
||||||
|
];
|
||||||
|
|
||||||
|
const hasFilters = searchQuery || libraryId || readingStatus || sort || seriesStatus || hasMissing || metadataProvider;
|
||||||
|
|
||||||
|
const libraryOptions = [
|
||||||
|
{ value: "", label: t("books.allLibraries") },
|
||||||
|
...libraries.map((lib) => ({ value: lib.id, label: lib.name })),
|
||||||
|
];
|
||||||
|
|
||||||
|
const statusOptions = [
|
||||||
|
{ value: "", label: t("common.all") },
|
||||||
|
{ value: "unread", label: t("status.unread") },
|
||||||
|
{ value: "reading", label: t("status.reading") },
|
||||||
|
{ value: "read", label: t("status.read") },
|
||||||
|
];
|
||||||
|
|
||||||
|
const KNOWN_STATUSES: Record<string, string> = {
|
||||||
|
ongoing: t("seriesStatus.ongoing"),
|
||||||
|
ended: t("seriesStatus.ended"),
|
||||||
|
hiatus: t("seriesStatus.hiatus"),
|
||||||
|
cancelled: t("seriesStatus.cancelled"),
|
||||||
|
upcoming: t("seriesStatus.upcoming"),
|
||||||
|
};
|
||||||
|
const seriesStatusOptions = [
|
||||||
|
{ value: "", label: t("seriesStatus.allStatuses") },
|
||||||
|
...dbStatuses.map((s) => ({ value: s, label: KNOWN_STATUSES[s] || s })),
|
||||||
|
];
|
||||||
|
|
||||||
|
const missingOptions = [
|
||||||
|
{ value: "", label: t("common.all") },
|
||||||
|
{ value: "true", label: t("series.missingBooks") },
|
||||||
|
];
|
||||||
|
|
||||||
|
const metadataOptions = [
|
||||||
|
{ value: "", label: t("series.metadataAll") },
|
||||||
|
{ value: "linked", label: t("series.metadataLinked") },
|
||||||
|
{ value: "unlinked", label: t("series.metadataUnlinked") },
|
||||||
|
{ value: "google_books", label: "Google Books" },
|
||||||
|
{ value: "open_library", label: "Open Library" },
|
||||||
|
{ value: "comicvine", label: "ComicVine" },
|
||||||
|
{ value: "anilist", label: "AniList" },
|
||||||
|
{ value: "bedetheque", label: "Bédéthèque" },
|
||||||
|
];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="mb-6">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||||
|
<svg className="w-8 h-8 text-warning" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" />
|
||||||
|
</svg>
|
||||||
|
{t("series.title")}
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<LiveSearchForm
|
||||||
|
basePath="/series"
|
||||||
|
fields={[
|
||||||
|
{ name: "q", type: "text", label: t("common.search"), placeholder: t("series.searchPlaceholder") },
|
||||||
|
{ name: "library", type: "select", label: t("books.library"), options: libraryOptions },
|
||||||
|
{ name: "status", type: "select", label: t("series.reading"), options: statusOptions },
|
||||||
|
{ name: "series_status", type: "select", label: t("editSeries.status"), options: seriesStatusOptions },
|
||||||
|
{ name: "has_missing", type: "select", label: t("series.missing"), options: missingOptions },
|
||||||
|
{ name: "metadata_provider", type: "select", label: t("series.metadata"), options: metadataOptions },
|
||||||
|
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Results count */}
|
||||||
|
<p className="text-sm text-muted-foreground mb-4">
|
||||||
|
{seriesPage.total} {t("series.title").toLowerCase()}
|
||||||
|
{searchQuery && <> {t("series.matchingQuery")} "{searchQuery}"</>}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
{/* Series Grid */}
|
||||||
|
{series.length > 0 ? (
|
||||||
|
<>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 xl:grid-cols-6 gap-4">
|
||||||
|
{series.map((s) => (
|
||||||
|
<Link
|
||||||
|
key={s.name}
|
||||||
|
href={`/libraries/${s.library_id}/series/${encodeURIComponent(s.name)}`}
|
||||||
|
className="group"
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
className={`bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md hover:-translate-y-1 transition-all duration-200 ${
|
||||||
|
s.books_read_count >= s.book_count ? "opacity-50" : ""
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<div className="aspect-[2/3] relative bg-muted/50">
|
||||||
|
<Image
|
||||||
|
src={getBookCoverUrl(s.first_book_id)}
|
||||||
|
alt={t("books.coverOf", { name: s.name })}
|
||||||
|
fill
|
||||||
|
className="object-cover"
|
||||||
|
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="p-3">
|
||||||
|
<h3 className="font-medium text-foreground truncate text-sm" title={s.name}>
|
||||||
|
{s.name === "unclassified" ? t("books.unclassified") : s.name}
|
||||||
|
</h3>
|
||||||
|
<div className="flex items-center justify-between mt-1">
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{t("series.readCount", { read: String(s.books_read_count), total: String(s.book_count), plural: s.book_count !== 1 ? "s" : "" })}
|
||||||
|
</p>
|
||||||
|
<MarkSeriesReadButton
|
||||||
|
seriesName={s.name}
|
||||||
|
bookCount={s.book_count}
|
||||||
|
booksReadCount={s.books_read_count}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-1 mt-1.5 flex-wrap">
|
||||||
|
{s.series_status && (
|
||||||
|
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium ${
|
||||||
|
s.series_status === "ongoing" ? "bg-blue-500/15 text-blue-600" :
|
||||||
|
s.series_status === "ended" ? "bg-green-500/15 text-green-600" :
|
||||||
|
s.series_status === "hiatus" ? "bg-amber-500/15 text-amber-600" :
|
||||||
|
s.series_status === "cancelled" ? "bg-red-500/15 text-red-600" :
|
||||||
|
"bg-muted text-muted-foreground"
|
||||||
|
}`}>
|
||||||
|
{KNOWN_STATUSES[s.series_status] || s.series_status}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{s.missing_count != null && s.missing_count > 0 && (
|
||||||
|
<span className="text-[10px] px-1.5 py-0.5 rounded-full font-medium bg-yellow-500/15 text-yellow-600">
|
||||||
|
{t("series.missingCount", { count: String(s.missing_count), plural: s.missing_count > 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{s.metadata_provider && (
|
||||||
|
<span className="text-[10px] px-1.5 py-0.5 rounded-full font-medium bg-purple-500/15 text-purple-600 inline-flex items-center gap-0.5">
|
||||||
|
<ProviderIcon provider={s.metadata_provider} size={10} />
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<OffsetPagination
|
||||||
|
currentPage={page}
|
||||||
|
totalPages={totalPages}
|
||||||
|
pageSize={limit}
|
||||||
|
totalItems={seriesPage.total}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<div className="flex flex-col items-center justify-center py-16 text-center">
|
||||||
|
<div className="w-16 h-16 mb-4 text-muted-foreground/30">
|
||||||
|
<svg fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={1.5} d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" />
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<p className="text-muted-foreground text-lg">
|
||||||
|
{hasFilters ? t("series.noResults") : t("series.noSeries")}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
1755
apps/backoffice/app/(app)/settings/SettingsPage.tsx
Normal file
1755
apps/backoffice/app/(app)/settings/SettingsPage.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
|||||||
import { getSettings, getCacheStats, getThumbnailStats } from "../../lib/api";
|
import { getSettings, getCacheStats, getThumbnailStats, fetchUsers } from "@/lib/api";
|
||||||
import SettingsPage from "./SettingsPage";
|
import SettingsPage from "./SettingsPage";
|
||||||
|
|
||||||
export const dynamic = "force-dynamic";
|
export const dynamic = "force-dynamic";
|
||||||
@@ -23,5 +23,7 @@ export default async function SettingsPageWrapper() {
|
|||||||
directory: "/data/thumbnails"
|
directory: "/data/thumbnails"
|
||||||
}));
|
}));
|
||||||
|
|
||||||
return <SettingsPage initialSettings={settings} initialCacheStats={cacheStats} initialThumbnailStats={thumbnailStats} />;
|
const users = await fetchUsers().catch(() => []);
|
||||||
|
|
||||||
|
return <SettingsPage initialSettings={settings} initialCacheStats={cacheStats} initialThumbnailStats={thumbnailStats} users={users} />;
|
||||||
}
|
}
|
||||||
316
apps/backoffice/app/(app)/tokens/page.tsx
Normal file
316
apps/backoffice/app/(app)/tokens/page.tsx
Normal file
@@ -0,0 +1,316 @@
|
|||||||
|
import { revalidatePath } from "next/cache";
|
||||||
|
import { redirect } from "next/navigation";
|
||||||
|
import { listTokens, createToken, revokeToken, deleteToken, updateToken, fetchUsers, createUser, deleteUser, updateUser, TokenDto, UserDto } from "@/lib/api";
|
||||||
|
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, Badge, FormField, FormInput, FormSelect, FormRow } from "@/app/components/ui";
|
||||||
|
import { TokenUserSelect } from "@/app/components/TokenUserSelect";
|
||||||
|
import { UsernameEdit } from "@/app/components/UsernameEdit";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function TokensPage({
|
||||||
|
searchParams
|
||||||
|
}: {
|
||||||
|
searchParams: Promise<{ created?: string }>;
|
||||||
|
}) {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const params = await searchParams;
|
||||||
|
const tokens = await listTokens().catch(() => [] as TokenDto[]);
|
||||||
|
const users = await fetchUsers().catch(() => [] as UserDto[]);
|
||||||
|
|
||||||
|
async function createTokenAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const name = formData.get("name") as string;
|
||||||
|
const scope = formData.get("scope") as string;
|
||||||
|
const userId = (formData.get("user_id") as string) || undefined;
|
||||||
|
if (name) {
|
||||||
|
const result = await createToken(name, scope, userId);
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
redirect(`/tokens?created=${encodeURIComponent(result.token)}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function revokeTokenAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const id = formData.get("id") as string;
|
||||||
|
await revokeToken(id);
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteTokenAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const id = formData.get("id") as string;
|
||||||
|
await deleteToken(id);
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createUserAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const username = formData.get("username") as string;
|
||||||
|
if (username) {
|
||||||
|
await createUser(username);
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteUserAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const id = formData.get("id") as string;
|
||||||
|
await deleteUser(id);
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
}
|
||||||
|
|
||||||
|
async function renameUserAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const id = formData.get("id") as string;
|
||||||
|
const username = formData.get("username") as string;
|
||||||
|
if (username?.trim()) {
|
||||||
|
await updateUser(id, username.trim());
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function reassignTokenAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const id = formData.get("id") as string;
|
||||||
|
const userId = (formData.get("user_id") as string) || null;
|
||||||
|
await updateToken(id, userId);
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="mb-6">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||||
|
<svg className="w-8 h-8 text-destructive" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M15 7a2 2 0 012 2m4 0a6 6 0 01-7.743 5.743L11 17H9v2H7v2H4a1 1 0 01-1-1v-2.586a1 1 0 01.293-.707l5.964-5.964A6 6 0 1121 9z" />
|
||||||
|
</svg>
|
||||||
|
{t("tokens.title")}
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* ── Lecteurs ─────────────────────────────────────────── */}
|
||||||
|
<div className="mb-2">
|
||||||
|
<h2 className="text-xl font-semibold text-foreground">{t("users.title")}</h2>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("users.createNew")}</CardTitle>
|
||||||
|
<CardDescription>{t("users.createDescription")}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<form action={createUserAction}>
|
||||||
|
<FormRow>
|
||||||
|
<FormField className="flex-1 min-w-48">
|
||||||
|
<FormInput name="username" placeholder={t("users.username")} required autoComplete="off" />
|
||||||
|
</FormField>
|
||||||
|
<Button type="submit">{t("users.createButton")}</Button>
|
||||||
|
</FormRow>
|
||||||
|
</form>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card className="overflow-hidden mb-10">
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="w-full">
|
||||||
|
<thead>
|
||||||
|
<tr className="border-b border-border/60 bg-muted/50">
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("users.name")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("users.tokenCount")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("status.read")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("status.reading")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("users.createdAt")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("users.actions")}</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-border/60">
|
||||||
|
{/* Ligne admin synthétique */}
|
||||||
|
<tr className="hover:bg-accent/50 transition-colors bg-destructive/5">
|
||||||
|
<td className="px-4 py-3 text-sm font-medium text-foreground flex items-center gap-2">
|
||||||
|
{process.env.ADMIN_USERNAME ?? "admin"}
|
||||||
|
<Badge variant="destructive">{t("tokens.scopeAdmin")}</Badge>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground">
|
||||||
|
{tokens.filter(tok => tok.scope === "admin" && !tok.revoked_at).length}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
</tr>
|
||||||
|
{/* Ligne tokens read non assignés */}
|
||||||
|
{(() => {
|
||||||
|
const unassigned = tokens.filter(tok => tok.scope === "read" && !tok.user_id && !tok.revoked_at);
|
||||||
|
if (unassigned.length === 0) return null;
|
||||||
|
return (
|
||||||
|
<tr className="hover:bg-accent/50 transition-colors bg-warning/5">
|
||||||
|
<td className="px-4 py-3 text-sm font-medium text-muted-foreground italic">
|
||||||
|
{t("tokens.noUser")}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-warning font-medium">{unassigned.length}</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
</tr>
|
||||||
|
);
|
||||||
|
})()}
|
||||||
|
{users.map((user) => (
|
||||||
|
<tr key={user.id} className="hover:bg-accent/50 transition-colors">
|
||||||
|
<td className="px-4 py-3">
|
||||||
|
<UsernameEdit userId={user.id} currentUsername={user.username} action={renameUserAction} />
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground">{user.token_count}</td>
|
||||||
|
<td className="px-4 py-3 text-sm">
|
||||||
|
{user.books_read > 0
|
||||||
|
? <span className="font-medium text-success">{user.books_read}</span>
|
||||||
|
: <span className="text-muted-foreground/50">—</span>}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm">
|
||||||
|
{user.books_reading > 0
|
||||||
|
? <span className="font-medium text-amber-500">{user.books_reading}</span>
|
||||||
|
: <span className="text-muted-foreground/50">—</span>}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground">
|
||||||
|
{new Date(user.created_at).toLocaleDateString()}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3">
|
||||||
|
<form action={deleteUserAction}>
|
||||||
|
<input type="hidden" name="id" value={user.id} />
|
||||||
|
<Button type="submit" variant="destructive" size="sm">
|
||||||
|
<svg className="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
|
||||||
|
</svg>
|
||||||
|
{t("common.delete")}
|
||||||
|
</Button>
|
||||||
|
</form>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* ── Tokens API ───────────────────────────────────────── */}
|
||||||
|
<div className="mb-2">
|
||||||
|
<h2 className="text-xl font-semibold text-foreground">{t("tokens.apiTokens")}</h2>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{params.created ? (
|
||||||
|
<Card className="mb-6 border-success/50 bg-success/5">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-success">{t("tokens.created")}</CardTitle>
|
||||||
|
<CardDescription>{t("tokens.createdDescription")}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<pre className="p-4 bg-background rounded-lg text-sm font-mono text-foreground overflow-x-auto border">{params.created}</pre>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
) : null}
|
||||||
|
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("tokens.createNew")}</CardTitle>
|
||||||
|
<CardDescription>{t("tokens.createDescription")}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<form action={createTokenAction}>
|
||||||
|
<FormRow>
|
||||||
|
<FormField className="flex-1 min-w-48">
|
||||||
|
<FormInput name="name" placeholder={t("tokens.tokenName")} required autoComplete="off" />
|
||||||
|
</FormField>
|
||||||
|
<FormField className="w-32">
|
||||||
|
<FormSelect name="scope" defaultValue="read">
|
||||||
|
<option value="read">{t("tokens.scopeRead")}</option>
|
||||||
|
<option value="admin">{t("tokens.scopeAdmin")}</option>
|
||||||
|
</FormSelect>
|
||||||
|
</FormField>
|
||||||
|
<FormField className="w-48">
|
||||||
|
<FormSelect name="user_id" defaultValue="">
|
||||||
|
<option value="">{t("tokens.noUser")}</option>
|
||||||
|
{users.map((user) => (
|
||||||
|
<option key={user.id} value={user.id}>{user.username}</option>
|
||||||
|
))}
|
||||||
|
</FormSelect>
|
||||||
|
</FormField>
|
||||||
|
<Button type="submit">{t("tokens.createButton")}</Button>
|
||||||
|
</FormRow>
|
||||||
|
</form>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card className="overflow-hidden">
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="w-full">
|
||||||
|
<thead>
|
||||||
|
<tr className="border-b border-border/60 bg-muted/50">
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.name")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.user")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.scope")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.prefix")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.status")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.actions")}</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-border/60">
|
||||||
|
{tokens.map((token) => (
|
||||||
|
<tr key={token.id} className="hover:bg-accent/50 transition-colors">
|
||||||
|
<td className="px-4 py-3 text-sm text-foreground">{token.name}</td>
|
||||||
|
<td className="px-4 py-3 text-sm">
|
||||||
|
<TokenUserSelect
|
||||||
|
tokenId={token.id}
|
||||||
|
currentUserId={token.user_id}
|
||||||
|
users={users}
|
||||||
|
action={reassignTokenAction}
|
||||||
|
noUserLabel={t("tokens.noUser")}
|
||||||
|
/>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm">
|
||||||
|
<Badge variant={token.scope === "admin" ? "destructive" : "secondary"}>
|
||||||
|
{token.scope}
|
||||||
|
</Badge>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm">
|
||||||
|
<code className="px-2 py-1 bg-muted rounded font-mono text-foreground">{token.prefix}</code>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm">
|
||||||
|
{token.revoked_at ? (
|
||||||
|
<Badge variant="error">{t("tokens.revoked")}</Badge>
|
||||||
|
) : (
|
||||||
|
<Badge variant="success">{t("tokens.active")}</Badge>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3">
|
||||||
|
{!token.revoked_at ? (
|
||||||
|
<form action={revokeTokenAction}>
|
||||||
|
<input type="hidden" name="id" value={token.id} />
|
||||||
|
<Button type="submit" variant="destructive" size="sm">
|
||||||
|
<svg className="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z" />
|
||||||
|
</svg>
|
||||||
|
{t("tokens.revoke")}
|
||||||
|
</Button>
|
||||||
|
</form>
|
||||||
|
) : (
|
||||||
|
<form action={deleteTokenAction}>
|
||||||
|
<input type="hidden" name="id" value={token.id} />
|
||||||
|
<Button type="submit" variant="destructive" size="sm">
|
||||||
|
<svg className="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
|
||||||
|
</svg>
|
||||||
|
{t("common.delete")}
|
||||||
|
</Button>
|
||||||
|
</form>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
31
apps/backoffice/app/api/auth/login/route.ts
Normal file
31
apps/backoffice/app/api/auth/login/route.ts
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { createSessionToken, SESSION_COOKIE } from "@/lib/session";
|
||||||
|
|
||||||
|
export async function POST(req: NextRequest) {
|
||||||
|
const body = await req.json().catch(() => null);
|
||||||
|
if (!body || typeof body.username !== "string" || typeof body.password !== "string") {
|
||||||
|
return NextResponse.json({ error: "Invalid request" }, { status: 400 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const expectedUsername = process.env.ADMIN_USERNAME || "admin";
|
||||||
|
const expectedPassword = process.env.ADMIN_PASSWORD;
|
||||||
|
|
||||||
|
if (!expectedPassword) {
|
||||||
|
return NextResponse.json({ error: "Server misconfiguration" }, { status: 500 });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.username !== expectedUsername || body.password !== expectedPassword) {
|
||||||
|
return NextResponse.json({ error: "Invalid credentials" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const token = await createSessionToken();
|
||||||
|
const response = NextResponse.json({ success: true });
|
||||||
|
response.cookies.set(SESSION_COOKIE, token, {
|
||||||
|
httpOnly: true,
|
||||||
|
secure: process.env.NODE_ENV === "production",
|
||||||
|
sameSite: "lax",
|
||||||
|
maxAge: 7 * 24 * 60 * 60,
|
||||||
|
path: "/",
|
||||||
|
});
|
||||||
|
return response;
|
||||||
|
}
|
||||||
8
apps/backoffice/app/api/auth/logout/route.ts
Normal file
8
apps/backoffice/app/api/auth/logout/route.ts
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
import { NextResponse } from "next/server";
|
||||||
|
import { SESSION_COOKIE } from "@/lib/session";
|
||||||
|
|
||||||
|
export async function POST() {
|
||||||
|
const response = NextResponse.json({ success: true });
|
||||||
|
response.cookies.delete(SESSION_COOKIE);
|
||||||
|
return response;
|
||||||
|
}
|
||||||
@@ -28,12 +28,9 @@ export async function GET(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Récupérer le content-type et les données
|
|
||||||
const contentType = response.headers.get("content-type") || "image/webp";
|
const contentType = response.headers.get("content-type") || "image/webp";
|
||||||
const imageBuffer = await response.arrayBuffer();
|
|
||||||
|
|
||||||
// Retourner l'image avec le bon content-type
|
return new NextResponse(response.body, {
|
||||||
return new NextResponse(imageBuffer, {
|
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": contentType,
|
"Content-Type": contentType,
|
||||||
"Cache-Control": "public, max-age=300",
|
"Cache-Control": "public, max-age=300",
|
||||||
|
|||||||
17
apps/backoffice/app/api/books/[bookId]/progress/route.ts
Normal file
17
apps/backoffice/app/api/books/[bookId]/progress/route.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { updateReadingProgress } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function PATCH(
|
||||||
|
request: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ bookId: string }> }
|
||||||
|
) {
|
||||||
|
const { bookId } = await params;
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await updateReadingProgress(bookId, body.status, body.current_page ?? undefined);
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to update reading progress";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
17
apps/backoffice/app/api/books/[bookId]/route.ts
Normal file
17
apps/backoffice/app/api/books/[bookId]/route.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { updateBook } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function PATCH(
|
||||||
|
request: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ bookId: string }> }
|
||||||
|
) {
|
||||||
|
const { bookId } = await params;
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await updateBook(bookId, body);
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to update book";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,10 +9,25 @@ export async function GET(
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const { baseUrl, token } = config();
|
const { baseUrl, token } = config();
|
||||||
|
const ifNoneMatch = request.headers.get("if-none-match");
|
||||||
|
|
||||||
|
const fetchHeaders: Record<string, string> = {
|
||||||
|
Authorization: `Bearer ${token}`,
|
||||||
|
};
|
||||||
|
if (ifNoneMatch) {
|
||||||
|
fetchHeaders["If-None-Match"] = ifNoneMatch;
|
||||||
|
}
|
||||||
|
|
||||||
const response = await fetch(`${baseUrl}/books/${bookId}/thumbnail`, {
|
const response = await fetch(`${baseUrl}/books/${bookId}/thumbnail`, {
|
||||||
headers: { Authorization: `Bearer ${token}` },
|
headers: fetchHeaders,
|
||||||
|
next: { revalidate: 86400 },
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Forward 304 Not Modified as-is
|
||||||
|
if (response.status === 304) {
|
||||||
|
return new NextResponse(null, { status: 304 });
|
||||||
|
}
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
return new NextResponse(`Failed to fetch thumbnail: ${response.status}`, {
|
return new NextResponse(`Failed to fetch thumbnail: ${response.status}`, {
|
||||||
status: response.status
|
status: response.status
|
||||||
@@ -20,14 +35,17 @@ export async function GET(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const contentType = response.headers.get("content-type") || "image/webp";
|
const contentType = response.headers.get("content-type") || "image/webp";
|
||||||
const imageBuffer = await response.arrayBuffer();
|
const etag = response.headers.get("etag");
|
||||||
|
|
||||||
return new NextResponse(imageBuffer, {
|
const headers: Record<string, string> = {
|
||||||
headers: {
|
|
||||||
"Content-Type": contentType,
|
"Content-Type": contentType,
|
||||||
"Cache-Control": "public, max-age=31536000, immutable",
|
"Cache-Control": "public, max-age=31536000, immutable",
|
||||||
},
|
};
|
||||||
});
|
if (etag) {
|
||||||
|
headers["ETag"] = etag;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new NextResponse(response.body, { headers });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error fetching thumbnail:", error);
|
console.error("Error fetching thumbnail:", error);
|
||||||
return new NextResponse("Failed to fetch thumbnail", { status: 500 });
|
return new NextResponse("Failed to fetch thumbnail", { status: 500 });
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ export async function GET(
|
|||||||
|
|
||||||
let lastData: string | null = null;
|
let lastData: string | null = null;
|
||||||
let isActive = true;
|
let isActive = true;
|
||||||
|
let consecutiveErrors = 0;
|
||||||
|
|
||||||
const fetchJob = async () => {
|
const fetchJob = async () => {
|
||||||
if (!isActive) return;
|
if (!isActive) return;
|
||||||
@@ -25,6 +26,7 @@ export async function GET(
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (response.ok && isActive) {
|
if (response.ok && isActive) {
|
||||||
|
consecutiveErrors = 0;
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
const dataStr = JSON.stringify(data);
|
const dataStr = JSON.stringify(data);
|
||||||
|
|
||||||
@@ -54,7 +56,11 @@ export async function GET(
|
|||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (isActive) {
|
if (isActive) {
|
||||||
console.error("SSE fetch error:", error);
|
consecutiveErrors++;
|
||||||
|
// Only log first failure and every 60th to avoid spam
|
||||||
|
if (consecutiveErrors === 1 || consecutiveErrors % 60 === 0) {
|
||||||
|
console.warn(`SSE fetch error (${consecutiveErrors} consecutive):`, error);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
let lastData: string | null = null;
|
let lastData: string | null = null;
|
||||||
let isActive = true;
|
let isActive = true;
|
||||||
|
let consecutiveErrors = 0;
|
||||||
|
let intervalId: ReturnType<typeof setInterval> | null = null;
|
||||||
|
|
||||||
const fetchJobs = async () => {
|
const fetchJobs = async () => {
|
||||||
if (!isActive) return;
|
if (!isActive) return;
|
||||||
@@ -20,45 +22,51 @@ export async function GET(request: NextRequest) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (response.ok && isActive) {
|
if (response.ok && isActive) {
|
||||||
|
consecutiveErrors = 0;
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
const dataStr = JSON.stringify(data);
|
const dataStr = JSON.stringify(data);
|
||||||
|
|
||||||
// Send if data changed
|
// Send only if data changed
|
||||||
if (dataStr !== lastData && isActive) {
|
if (dataStr !== lastData && isActive) {
|
||||||
lastData = dataStr;
|
lastData = dataStr;
|
||||||
try {
|
try {
|
||||||
controller.enqueue(
|
controller.enqueue(
|
||||||
new TextEncoder().encode(`data: ${dataStr}\n\n`)
|
new TextEncoder().encode(`data: ${dataStr}\n\n`)
|
||||||
);
|
);
|
||||||
} catch (err) {
|
} catch {
|
||||||
// Controller closed, ignore
|
|
||||||
isActive = false;
|
isActive = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Adapt interval: 2s when active jobs exist, 15s when idle
|
||||||
|
const hasActiveJobs = data.some((j: { status: string }) =>
|
||||||
|
j.status === "running" || j.status === "pending" || j.status === "extracting_pages" || j.status === "generating_thumbnails"
|
||||||
|
);
|
||||||
|
const nextInterval = hasActiveJobs ? 2000 : 15000;
|
||||||
|
restartInterval(nextInterval);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (isActive) {
|
if (isActive) {
|
||||||
console.error("SSE fetch error:", error);
|
consecutiveErrors++;
|
||||||
|
if (consecutiveErrors === 1 || consecutiveErrors % 30 === 0) {
|
||||||
|
console.warn(`SSE fetch error (${consecutiveErrors} consecutive):`, error);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Initial fetch
|
const restartInterval = (ms: number) => {
|
||||||
await fetchJobs();
|
if (intervalId !== null) clearInterval(intervalId);
|
||||||
|
intervalId = setInterval(fetchJobs, ms);
|
||||||
|
};
|
||||||
|
|
||||||
// Poll every 2 seconds
|
// Initial fetch + start polling
|
||||||
const interval = setInterval(async () => {
|
|
||||||
if (!isActive) {
|
|
||||||
clearInterval(interval);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
await fetchJobs();
|
await fetchJobs();
|
||||||
}, 2000);
|
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
request.signal.addEventListener("abort", () => {
|
request.signal.addEventListener("abort", () => {
|
||||||
isActive = false;
|
isActive = false;
|
||||||
clearInterval(interval);
|
if (intervalId !== null) clearInterval(intervalId);
|
||||||
controller.close();
|
controller.close();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|||||||
16
apps/backoffice/app/api/komga/reports/[id]/route.ts
Normal file
16
apps/backoffice/app/api/komga/reports/[id]/route.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { NextResponse, NextRequest } from "next/server";
|
||||||
|
import { getKomgaReport } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET(
|
||||||
|
_request: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ id: string }> },
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const { id } = await params;
|
||||||
|
const data = await getKomgaReport(id);
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to fetch report";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
12
apps/backoffice/app/api/komga/reports/route.ts
Normal file
12
apps/backoffice/app/api/komga/reports/route.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { NextResponse } from "next/server";
|
||||||
|
import { listKomgaReports } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
try {
|
||||||
|
const data = await listKomgaReports();
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to fetch reports";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
16
apps/backoffice/app/api/komga/sync/route.ts
Normal file
16
apps/backoffice/app/api/komga/sync/route.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { NextResponse, NextRequest } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await apiFetch("/komga/sync", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to sync with Komga";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch, LibraryDto } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function PATCH(
|
||||||
|
request: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ id: string }> }
|
||||||
|
) {
|
||||||
|
const { id } = await params;
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await apiFetch<LibraryDto>(`/libraries/${id}/metadata-provider`, {
|
||||||
|
method: "PATCH",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to update metadata provider";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -7,8 +7,8 @@ export async function PATCH(
|
|||||||
) {
|
) {
|
||||||
const { id } = await params;
|
const { id } = await params;
|
||||||
try {
|
try {
|
||||||
const { monitor_enabled, scan_mode, watcher_enabled } = await request.json();
|
const { monitor_enabled, scan_mode, watcher_enabled, metadata_refresh_mode } = await request.json();
|
||||||
const data = await updateLibraryMonitoring(id, monitor_enabled, scan_mode, watcher_enabled);
|
const data = await updateLibraryMonitoring(id, monitor_enabled, scan_mode, watcher_enabled, metadata_refresh_mode);
|
||||||
return NextResponse.json(data);
|
return NextResponse.json(data);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const message = error instanceof Error ? error.message : "Failed to update monitoring settings";
|
const message = error instanceof Error ? error.message : "Failed to update monitoring settings";
|
||||||
|
|||||||
@@ -0,0 +1,16 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { fetchSeriesMetadata } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET(
|
||||||
|
_request: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ id: string; name: string }> }
|
||||||
|
) {
|
||||||
|
const { id, name } = await params;
|
||||||
|
try {
|
||||||
|
const data = await fetchSeriesMetadata(id, name);
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to fetch series metadata";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { updateSeries } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function PATCH(
|
||||||
|
request: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ id: string; name: string }> }
|
||||||
|
) {
|
||||||
|
const { id, name } = await params;
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await updateSeries(id, name, body);
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to update series";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
17
apps/backoffice/app/api/metadata/approve/route.ts
Normal file
17
apps/backoffice/app/api/metadata/approve/route.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const { id, ...rest } = body;
|
||||||
|
const data = await apiFetch<{ status: string; books_synced: number }>(`/metadata/approve/${id}`, {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(rest),
|
||||||
|
});
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to approve metadata";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
17
apps/backoffice/app/api/metadata/batch/report/route.ts
Normal file
17
apps/backoffice/app/api/metadata/batch/report/route.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch, MetadataBatchReportDto } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const { searchParams } = new URL(request.url);
|
||||||
|
const id = searchParams.get("id");
|
||||||
|
if (!id) {
|
||||||
|
return NextResponse.json({ error: "id is required" }, { status: 400 });
|
||||||
|
}
|
||||||
|
const data = await apiFetch<MetadataBatchReportDto>(`/metadata/batch/${id}/report`);
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to fetch report";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
19
apps/backoffice/app/api/metadata/batch/results/route.ts
Normal file
19
apps/backoffice/app/api/metadata/batch/results/route.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch, MetadataBatchResultDto } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const { searchParams } = new URL(request.url);
|
||||||
|
const id = searchParams.get("id");
|
||||||
|
if (!id) {
|
||||||
|
return NextResponse.json({ error: "id is required" }, { status: 400 });
|
||||||
|
}
|
||||||
|
const status = searchParams.get("status") || "";
|
||||||
|
const params = status ? `?status=${status}` : "";
|
||||||
|
const data = await apiFetch<MetadataBatchResultDto[]>(`/metadata/batch/${id}/results${params}`);
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to fetch results";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
16
apps/backoffice/app/api/metadata/batch/route.ts
Normal file
16
apps/backoffice/app/api/metadata/batch/route.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await apiFetch<{ id: string; status: string }>("/metadata/batch", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to start batch";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
35
apps/backoffice/app/api/metadata/links/route.ts
Normal file
35
apps/backoffice/app/api/metadata/links/route.ts
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch, ExternalMetadataLinkDto } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const { searchParams } = new URL(request.url);
|
||||||
|
const libraryId = searchParams.get("library_id") || "";
|
||||||
|
const seriesName = searchParams.get("series_name") || "";
|
||||||
|
const params = new URLSearchParams();
|
||||||
|
if (libraryId) params.set("library_id", libraryId);
|
||||||
|
if (seriesName) params.set("series_name", seriesName);
|
||||||
|
const data = await apiFetch<ExternalMetadataLinkDto[]>(`/metadata/links?${params.toString()}`);
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to fetch metadata links";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function DELETE(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const { searchParams } = new URL(request.url);
|
||||||
|
const id = searchParams.get("id");
|
||||||
|
if (!id) {
|
||||||
|
return NextResponse.json({ error: "id is required" }, { status: 400 });
|
||||||
|
}
|
||||||
|
const data = await apiFetch<{ deleted: boolean }>(`/metadata/links/${id}`, {
|
||||||
|
method: "DELETE",
|
||||||
|
});
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to delete metadata link";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
16
apps/backoffice/app/api/metadata/match/route.ts
Normal file
16
apps/backoffice/app/api/metadata/match/route.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch, ExternalMetadataLinkDto } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await apiFetch<ExternalMetadataLinkDto>("/metadata/match", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to create metadata match";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
17
apps/backoffice/app/api/metadata/missing/route.ts
Normal file
17
apps/backoffice/app/api/metadata/missing/route.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch, MissingBooksDto } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const { searchParams } = new URL(request.url);
|
||||||
|
const id = searchParams.get("id");
|
||||||
|
if (!id) {
|
||||||
|
return NextResponse.json({ error: "id is required" }, { status: 400 });
|
||||||
|
}
|
||||||
|
const data = await apiFetch<MissingBooksDto>(`/metadata/missing/${id}`);
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to fetch missing books";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
16
apps/backoffice/app/api/metadata/refresh/report/route.ts
Normal file
16
apps/backoffice/app/api/metadata/refresh/report/route.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const jobId = request.nextUrl.searchParams.get("job_id");
|
||||||
|
if (!jobId) {
|
||||||
|
return NextResponse.json({ error: "job_id required" }, { status: 400 });
|
||||||
|
}
|
||||||
|
const data = await apiFetch(`/metadata/refresh/${jobId}/report`);
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to get report";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
16
apps/backoffice/app/api/metadata/refresh/route.ts
Normal file
16
apps/backoffice/app/api/metadata/refresh/route.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await apiFetch<{ id: string; status: string }>("/metadata/refresh", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to start refresh";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
15
apps/backoffice/app/api/metadata/reject/route.ts
Normal file
15
apps/backoffice/app/api/metadata/reject/route.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await apiFetch<{ status: string }>(`/metadata/reject/${body.id}`, {
|
||||||
|
method: "POST",
|
||||||
|
});
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to reject metadata";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
16
apps/backoffice/app/api/metadata/search/route.ts
Normal file
16
apps/backoffice/app/api/metadata/search/route.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch, SeriesCandidateDto } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await apiFetch<SeriesCandidateDto[]>("/metadata/search", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to search metadata";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
16
apps/backoffice/app/api/prowlarr/search/route.ts
Normal file
16
apps/backoffice/app/api/prowlarr/search/route.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { NextResponse, NextRequest } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await apiFetch("/prowlarr/search", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to search Prowlarr";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
12
apps/backoffice/app/api/prowlarr/test/route.ts
Normal file
12
apps/backoffice/app/api/prowlarr/test/route.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { NextResponse } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
try {
|
||||||
|
const data = await apiFetch("/prowlarr/test");
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to test Prowlarr connection";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
16
apps/backoffice/app/api/qbittorrent/add/route.ts
Normal file
16
apps/backoffice/app/api/qbittorrent/add/route.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import { NextResponse, NextRequest } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await apiFetch("/qbittorrent/add", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to add torrent";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
12
apps/backoffice/app/api/qbittorrent/test/route.ts
Normal file
12
apps/backoffice/app/api/qbittorrent/test/route.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { NextResponse } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
try {
|
||||||
|
const data = await apiFetch("/qbittorrent/test");
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to test qBittorrent";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
13
apps/backoffice/app/api/series/mark-read/route.ts
Normal file
13
apps/backoffice/app/api/series/mark-read/route.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { markSeriesRead } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await markSeriesRead(body.series, body.status ?? "read");
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to mark series";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
11
apps/backoffice/app/api/series/provider-statuses/route.ts
Normal file
11
apps/backoffice/app/api/series/provider-statuses/route.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { NextResponse } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
try {
|
||||||
|
const data = await apiFetch<string[]>("/series/provider-statuses");
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch {
|
||||||
|
return NextResponse.json([], { status: 200 });
|
||||||
|
}
|
||||||
|
}
|
||||||
11
apps/backoffice/app/api/series/statuses/route.ts
Normal file
11
apps/backoffice/app/api/series/statuses/route.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { NextResponse } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
try {
|
||||||
|
const data = await apiFetch<string[]>("/series/statuses");
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch {
|
||||||
|
return NextResponse.json([], { status: 200 });
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function DELETE(
|
||||||
|
_request: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ id: string }> }
|
||||||
|
) {
|
||||||
|
const { id } = await params;
|
||||||
|
try {
|
||||||
|
const data = await apiFetch<unknown>(`/settings/status-mappings/${id}`, {
|
||||||
|
method: "DELETE",
|
||||||
|
});
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch {
|
||||||
|
return NextResponse.json({ error: "Failed to delete status mapping" }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
24
apps/backoffice/app/api/settings/status-mappings/route.ts
Normal file
24
apps/backoffice/app/api/settings/status-mappings/route.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
try {
|
||||||
|
const data = await apiFetch<unknown>("/settings/status-mappings");
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch {
|
||||||
|
return NextResponse.json({ error: "Failed to fetch status mappings" }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function POST(request: NextRequest) {
|
||||||
|
try {
|
||||||
|
const body = await request.json();
|
||||||
|
const data = await apiFetch<unknown>("/settings/status-mappings", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
});
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch {
|
||||||
|
return NextResponse.json({ error: "Failed to save status mapping" }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
12
apps/backoffice/app/api/telegram/test/route.ts
Normal file
12
apps/backoffice/app/api/telegram/test/route.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { NextResponse } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
try {
|
||||||
|
const data = await apiFetch("/telegram/test");
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to test Telegram connection";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,215 +0,0 @@
|
|||||||
import { fetchLibraries, getBookCoverUrl, BookDto, apiFetch, ReadingStatus } from "../../../lib/api";
|
|
||||||
import { BookPreview } from "../../components/BookPreview";
|
|
||||||
import { ConvertButton } from "../../components/ConvertButton";
|
|
||||||
import Image from "next/image";
|
|
||||||
import Link from "next/link";
|
|
||||||
import { notFound } from "next/navigation";
|
|
||||||
|
|
||||||
export const dynamic = "force-dynamic";
|
|
||||||
|
|
||||||
const readingStatusConfig: Record<ReadingStatus, { label: string; className: string }> = {
|
|
||||||
unread: { label: "Non lu", className: "bg-muted/60 text-muted-foreground border border-border" },
|
|
||||||
reading: { label: "En cours", className: "bg-amber-500/15 text-amber-600 dark:text-amber-400 border border-amber-500/30" },
|
|
||||||
read: { label: "Lu", className: "bg-green-500/15 text-green-600 dark:text-green-400 border border-green-500/30" },
|
|
||||||
};
|
|
||||||
|
|
||||||
function ReadingStatusBadge({
|
|
||||||
status,
|
|
||||||
currentPage,
|
|
||||||
lastReadAt,
|
|
||||||
}: {
|
|
||||||
status: ReadingStatus;
|
|
||||||
currentPage: number | null;
|
|
||||||
lastReadAt: string | null;
|
|
||||||
}) {
|
|
||||||
const { label, className } = readingStatusConfig[status];
|
|
||||||
return (
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<span className={`inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-semibold ${className}`}>
|
|
||||||
{label}
|
|
||||||
{status === "reading" && currentPage != null && ` · p. ${currentPage}`}
|
|
||||||
</span>
|
|
||||||
{lastReadAt && (
|
|
||||||
<span className="text-xs text-muted-foreground">
|
|
||||||
{new Date(lastReadAt).toLocaleDateString()}
|
|
||||||
</span>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fetchBook(bookId: string): Promise<BookDto | null> {
|
|
||||||
try {
|
|
||||||
return await apiFetch<BookDto>(`/books/${bookId}`);
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default async function BookDetailPage({
|
|
||||||
params
|
|
||||||
}: {
|
|
||||||
params: Promise<{ id: string }>;
|
|
||||||
}) {
|
|
||||||
const { id } = await params;
|
|
||||||
const [book, libraries] = await Promise.all([
|
|
||||||
fetchBook(id),
|
|
||||||
fetchLibraries().catch(() => [] as { id: string; name: string }[])
|
|
||||||
]);
|
|
||||||
|
|
||||||
if (!book) {
|
|
||||||
notFound();
|
|
||||||
}
|
|
||||||
|
|
||||||
const library = libraries.find(l => l.id === book.library_id);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<>
|
|
||||||
<div className="mb-6">
|
|
||||||
<Link href="/books" className="inline-flex items-center text-sm text-muted-foreground hover:text-primary transition-colors">
|
|
||||||
← Back to books
|
|
||||||
</Link>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col lg:flex-row gap-8">
|
|
||||||
<div className="flex-shrink-0">
|
|
||||||
<div className="bg-card rounded-xl shadow-card border border-border p-4 inline-block">
|
|
||||||
<Image
|
|
||||||
src={getBookCoverUrl(book.id)}
|
|
||||||
alt={`Cover of ${book.title}`}
|
|
||||||
width={300}
|
|
||||||
height={440}
|
|
||||||
className="w-auto h-auto max-w-[300px] rounded-lg"
|
|
||||||
unoptimized
|
|
||||||
loading="lazy"
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex-1">
|
|
||||||
<div className="bg-card rounded-xl shadow-sm border border-border p-6">
|
|
||||||
<h1 className="text-3xl font-bold text-foreground mb-2">{book.title}</h1>
|
|
||||||
|
|
||||||
{book.author && (
|
|
||||||
<p className="text-lg text-muted-foreground mb-4">by {book.author}</p>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{book.series && (
|
|
||||||
<p className="text-sm text-muted-foreground mb-6">
|
|
||||||
{book.series}
|
|
||||||
{book.volume && <span className="ml-2 px-2 py-1 bg-primary/10 text-primary rounded text-xs">Volume {book.volume}</span>}
|
|
||||||
</p>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="space-y-3">
|
|
||||||
{book.reading_status && (
|
|
||||||
<div className="flex items-center justify-between py-2 border-b border-border">
|
|
||||||
<span className="text-sm text-muted-foreground">Lecture :</span>
|
|
||||||
<ReadingStatusBadge
|
|
||||||
status={book.reading_status}
|
|
||||||
currentPage={book.reading_current_page ?? null}
|
|
||||||
lastReadAt={book.reading_last_read_at ?? null}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="flex items-center justify-between py-2 border-b border-border">
|
|
||||||
<span className="text-sm text-muted-foreground">Format:</span>
|
|
||||||
<span className={`inline-flex px-2.5 py-1 rounded-full text-xs font-semibold ${
|
|
||||||
book.kind === 'epub' ? 'bg-primary/10 text-primary' : 'bg-muted/50 text-muted-foreground'
|
|
||||||
}`}>
|
|
||||||
{book.kind.toUpperCase()}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{book.volume && (
|
|
||||||
<div className="flex items-center justify-between py-2 border-b border-border">
|
|
||||||
<span className="text-sm text-muted-foreground">Volume:</span>
|
|
||||||
<span className="text-sm text-foreground">{book.volume}</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{book.language && (
|
|
||||||
<div className="flex items-center justify-between py-2 border-b border-border">
|
|
||||||
<span className="text-sm text-muted-foreground">Language:</span>
|
|
||||||
<span className="text-sm text-foreground">{book.language.toUpperCase()}</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{book.page_count && (
|
|
||||||
<div className="flex items-center justify-between py-2 border-b border-border">
|
|
||||||
<span className="text-sm text-muted-foreground">Pages:</span>
|
|
||||||
<span className="text-sm text-foreground">{book.page_count}</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="flex items-center justify-between py-2 border-b border-border">
|
|
||||||
<span className="text-sm text-muted-foreground">Library:</span>
|
|
||||||
<span className="text-sm text-foreground">{library?.name || book.library_id}</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{book.series && (
|
|
||||||
<div className="flex items-center justify-between py-2 border-b border-border">
|
|
||||||
<span className="text-sm text-muted-foreground">Series:</span>
|
|
||||||
<span className="text-sm text-foreground">{book.series}</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{book.file_format && (
|
|
||||||
<div className="flex items-center justify-between py-2 border-b border-border">
|
|
||||||
<span className="text-sm text-muted-foreground">File Format:</span>
|
|
||||||
<div className="flex items-center gap-3">
|
|
||||||
<span className="text-sm text-foreground">{book.file_format.toUpperCase()}</span>
|
|
||||||
{book.file_format === "cbr" && <ConvertButton bookId={book.id} />}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{book.file_parse_status && (
|
|
||||||
<div className="flex items-center justify-between py-2 border-b border-border">
|
|
||||||
<span className="text-sm text-muted-foreground">Parse Status:</span>
|
|
||||||
<span className={`inline-flex px-2.5 py-1 rounded-full text-xs font-semibold ${
|
|
||||||
book.file_parse_status === 'success' ? 'bg-success/10 text-success' :
|
|
||||||
book.file_parse_status === 'failed' ? 'bg-destructive/10 text-error' : 'bg-muted/50 text-muted-foreground'
|
|
||||||
}`}>
|
|
||||||
{book.file_parse_status}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{book.file_path && (
|
|
||||||
<div className="flex flex-col py-2 border-b border-border">
|
|
||||||
<span className="text-sm text-muted-foreground mb-1">File Path:</span>
|
|
||||||
<code className="text-xs font-mono text-foreground break-all">{book.file_path}</code>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<div className="flex flex-col py-2 border-b border-border">
|
|
||||||
<span className="text-sm text-muted-foreground mb-1">Book ID:</span>
|
|
||||||
<code className="text-xs font-mono text-foreground break-all">{book.id}</code>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="flex flex-col py-2 border-b border-border">
|
|
||||||
<span className="text-sm text-muted-foreground mb-1">Library ID:</span>
|
|
||||||
<code className="text-xs font-mono text-foreground break-all">{book.library_id}</code>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{book.updated_at && (
|
|
||||||
<div className="flex items-center justify-between py-2">
|
|
||||||
<span className="text-sm text-muted-foreground">Updated:</span>
|
|
||||||
<span className="text-sm text-foreground">{new Date(book.updated_at).toLocaleString()}</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{book.page_count && book.page_count > 0 && (
|
|
||||||
<div className="mt-8">
|
|
||||||
<BookPreview bookId={book.id} pageCount={book.page_count} />
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,14 +1,15 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { useState } from "react";
|
import { memo, useState } from "react";
|
||||||
import Image from "next/image";
|
import Image from "next/image";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import { BookDto, ReadingStatus } from "../../lib/api";
|
import { BookDto, ReadingStatus } from "../../lib/api";
|
||||||
|
import { useTranslation } from "../../lib/i18n/context";
|
||||||
|
|
||||||
const readingStatusOverlay: Record<ReadingStatus, { label: string; className: string } | null> = {
|
const readingStatusOverlayClasses: Record<ReadingStatus, string | null> = {
|
||||||
unread: null,
|
unread: null,
|
||||||
reading: { label: "En cours", className: "bg-amber-500/90 text-white" },
|
reading: "bg-amber-500/90 text-white",
|
||||||
read: { label: "Lu", className: "bg-green-600/90 text-white" },
|
read: "bg-green-600/90 text-white",
|
||||||
};
|
};
|
||||||
|
|
||||||
interface BookCardProps {
|
interface BookCardProps {
|
||||||
@@ -16,7 +17,7 @@ interface BookCardProps {
|
|||||||
readingStatus?: ReadingStatus;
|
readingStatus?: ReadingStatus;
|
||||||
}
|
}
|
||||||
|
|
||||||
function BookImage({ src, alt }: { src: string; alt: string }) {
|
const BookImage = memo(function BookImage({ src, alt }: { src: string; alt: string }) {
|
||||||
const [isLoaded, setIsLoaded] = useState(false);
|
const [isLoaded, setIsLoaded] = useState(false);
|
||||||
const [hasError, setHasError] = useState(false);
|
const [hasError, setHasError] = useState(false);
|
||||||
|
|
||||||
@@ -50,30 +51,37 @@ function BookImage({ src, alt }: { src: string; alt: string }) {
|
|||||||
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
|
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
|
||||||
onLoad={() => setIsLoaded(true)}
|
onLoad={() => setIsLoaded(true)}
|
||||||
onError={() => setHasError(true)}
|
onError={() => setHasError(true)}
|
||||||
unoptimized
|
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
});
|
||||||
|
|
||||||
export function BookCard({ book, readingStatus }: BookCardProps) {
|
export const BookCard = memo(function BookCard({ book, readingStatus }: BookCardProps) {
|
||||||
|
const { t } = useTranslation();
|
||||||
const coverUrl = book.coverUrl || `/api/books/${book.id}/thumbnail`;
|
const coverUrl = book.coverUrl || `/api/books/${book.id}/thumbnail`;
|
||||||
const status = readingStatus ?? book.reading_status;
|
const status = readingStatus ?? book.reading_status;
|
||||||
const overlay = status ? readingStatusOverlay[status] : null;
|
const overlayClass = status ? readingStatusOverlayClasses[status] : null;
|
||||||
|
const statusLabels: Record<ReadingStatus, string> = {
|
||||||
|
unread: t("status.unread"),
|
||||||
|
reading: t("status.reading"),
|
||||||
|
read: t("status.read"),
|
||||||
|
};
|
||||||
|
|
||||||
|
const isRead = status === "read";
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Link
|
<Link
|
||||||
href={`/books/${book.id}`}
|
href={`/books/${book.id}`}
|
||||||
className="group block bg-card rounded-xl border border-border/60 shadow-sm hover:shadow-md hover:-translate-y-1 transition-all duration-200 overflow-hidden"
|
className={`group block bg-card rounded-xl border border-border/60 shadow-sm hover:shadow-md hover:-translate-y-1 transition-all duration-200 overflow-hidden ${isRead ? "opacity-50" : ""}`}
|
||||||
>
|
>
|
||||||
<div className="relative">
|
<div className="relative">
|
||||||
<BookImage
|
<BookImage
|
||||||
src={coverUrl}
|
src={coverUrl}
|
||||||
alt={`Cover of ${book.title}`}
|
alt={t("books.coverOf", { name: book.title })}
|
||||||
/>
|
/>
|
||||||
{overlay && (
|
{overlayClass && status && (
|
||||||
<span className={`absolute bottom-2 left-2 px-2 py-0.5 rounded-full text-[10px] font-bold tracking-wide ${overlay.className}`}>
|
<span className={`absolute bottom-2 left-2 px-2 py-0.5 rounded-full text-[10px] font-bold tracking-wide ${overlayClass}`}>
|
||||||
{overlay.label}
|
{statusLabels[status]}
|
||||||
</span>
|
</span>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
@@ -100,14 +108,17 @@ export function BookCard({ book, readingStatus }: BookCardProps) {
|
|||||||
|
|
||||||
{/* Meta Tags */}
|
{/* Meta Tags */}
|
||||||
<div className="flex items-center gap-2 mt-2">
|
<div className="flex items-center gap-2 mt-2">
|
||||||
|
{(book.format ?? book.kind) && (
|
||||||
<span className={`
|
<span className={`
|
||||||
px-2 py-0.5 text-[10px] font-bold uppercase tracking-wider rounded-full
|
px-2 py-0.5 text-[10px] font-bold uppercase tracking-wider rounded-full
|
||||||
${book.kind === 'cbz' ? 'bg-success/10 text-success' : ''}
|
${(book.format ?? book.kind) === 'cbz' ? 'bg-success/10 text-success' : ''}
|
||||||
${book.kind === 'cbr' ? 'bg-warning/10 text-warning' : ''}
|
${(book.format ?? book.kind) === 'cbr' ? 'bg-warning/10 text-warning' : ''}
|
||||||
${book.kind === 'pdf' ? 'bg-destructive/10 text-destructive' : ''}
|
${(book.format ?? book.kind) === 'pdf' ? 'bg-destructive/10 text-destructive' : ''}
|
||||||
|
${(book.format ?? book.kind) === 'epub' ? 'bg-info/10 text-info' : ''}
|
||||||
`}>
|
`}>
|
||||||
{book.kind}
|
{book.format ?? book.kind}
|
||||||
</span>
|
</span>
|
||||||
|
)}
|
||||||
{book.language && (
|
{book.language && (
|
||||||
<span className="px-2 py-0.5 text-[10px] font-medium uppercase tracking-wider rounded-full bg-primary/10 text-primary">
|
<span className="px-2 py-0.5 text-[10px] font-medium uppercase tracking-wider rounded-full bg-primary/10 text-primary">
|
||||||
{book.language}
|
{book.language}
|
||||||
@@ -117,7 +128,7 @@ export function BookCard({ book, readingStatus }: BookCardProps) {
|
|||||||
</div>
|
</div>
|
||||||
</Link>
|
</Link>
|
||||||
);
|
);
|
||||||
}
|
});
|
||||||
|
|
||||||
interface BooksGridProps {
|
interface BooksGridProps {
|
||||||
books: (BookDto & { coverUrl?: string })[];
|
books: (BookDto & { coverUrl?: string })[];
|
||||||
|
|||||||
@@ -2,10 +2,12 @@
|
|||||||
|
|
||||||
import { useState } from "react";
|
import { useState } from "react";
|
||||||
import Image from "next/image";
|
import Image from "next/image";
|
||||||
|
import { useTranslation } from "../../lib/i18n/context";
|
||||||
|
|
||||||
const PAGE_SIZE = 5;
|
const PAGE_SIZE = 5;
|
||||||
|
|
||||||
export function BookPreview({ bookId, pageCount }: { bookId: string; pageCount: number }) {
|
export function BookPreview({ bookId, pageCount }: { bookId: string; pageCount: number }) {
|
||||||
|
const { t } = useTranslation();
|
||||||
const [offset, setOffset] = useState(0);
|
const [offset, setOffset] = useState(0);
|
||||||
|
|
||||||
const pages = Array.from({ length: PAGE_SIZE }, (_, i) => offset + i + 1).filter(
|
const pages = Array.from({ length: PAGE_SIZE }, (_, i) => offset + i + 1).filter(
|
||||||
@@ -16,9 +18,9 @@ export function BookPreview({ bookId, pageCount }: { bookId: string; pageCount:
|
|||||||
<div className="bg-card rounded-xl border border-border p-6">
|
<div className="bg-card rounded-xl border border-border p-6">
|
||||||
<div className="flex items-center justify-between mb-4">
|
<div className="flex items-center justify-between mb-4">
|
||||||
<h2 className="text-lg font-semibold text-foreground">
|
<h2 className="text-lg font-semibold text-foreground">
|
||||||
Preview
|
{t("bookPreview.preview")}
|
||||||
<span className="ml-2 text-sm font-normal text-muted-foreground">
|
<span className="ml-2 text-sm font-normal text-muted-foreground">
|
||||||
pages {offset + 1}–{Math.min(offset + PAGE_SIZE, pageCount)} / {pageCount}
|
{t("bookPreview.pages", { start: offset + 1, end: Math.min(offset + PAGE_SIZE, pageCount), total: pageCount })}
|
||||||
</span>
|
</span>
|
||||||
</h2>
|
</h2>
|
||||||
<div className="flex gap-2">
|
<div className="flex gap-2">
|
||||||
@@ -27,14 +29,14 @@ export function BookPreview({ bookId, pageCount }: { bookId: string; pageCount:
|
|||||||
disabled={offset === 0}
|
disabled={offset === 0}
|
||||||
className="px-3 py-1.5 text-sm rounded-lg border border-border bg-muted/50 text-foreground hover:bg-muted disabled:opacity-40 disabled:cursor-not-allowed transition-colors"
|
className="px-3 py-1.5 text-sm rounded-lg border border-border bg-muted/50 text-foreground hover:bg-muted disabled:opacity-40 disabled:cursor-not-allowed transition-colors"
|
||||||
>
|
>
|
||||||
← Prev
|
{t("bookPreview.prev")}
|
||||||
</button>
|
</button>
|
||||||
<button
|
<button
|
||||||
onClick={() => setOffset((o) => Math.min(o + PAGE_SIZE, pageCount - 1))}
|
onClick={() => setOffset((o) => Math.min(o + PAGE_SIZE, pageCount - 1))}
|
||||||
disabled={offset + PAGE_SIZE >= pageCount}
|
disabled={offset + PAGE_SIZE >= pageCount}
|
||||||
className="px-3 py-1.5 text-sm rounded-lg border border-border bg-muted/50 text-foreground hover:bg-muted disabled:opacity-40 disabled:cursor-not-allowed transition-colors"
|
className="px-3 py-1.5 text-sm rounded-lg border border-border bg-muted/50 text-foreground hover:bg-muted disabled:opacity-40 disabled:cursor-not-allowed transition-colors"
|
||||||
>
|
>
|
||||||
Next →
|
{t("bookPreview.next")}
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
import { useState } from "react";
|
import { useState } from "react";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import { Button } from "./ui";
|
import { Button } from "./ui";
|
||||||
|
import { useTranslation } from "../../lib/i18n/context";
|
||||||
|
|
||||||
interface ConvertButtonProps {
|
interface ConvertButtonProps {
|
||||||
bookId: string;
|
bookId: string;
|
||||||
@@ -15,6 +16,7 @@ type ConvertState =
|
|||||||
| { type: "error"; message: string };
|
| { type: "error"; message: string };
|
||||||
|
|
||||||
export function ConvertButton({ bookId }: ConvertButtonProps) {
|
export function ConvertButton({ bookId }: ConvertButtonProps) {
|
||||||
|
const { t } = useTranslation();
|
||||||
const [state, setState] = useState<ConvertState>({ type: "idle" });
|
const [state, setState] = useState<ConvertState>({ type: "idle" });
|
||||||
|
|
||||||
const handleConvert = async () => {
|
const handleConvert = async () => {
|
||||||
@@ -23,22 +25,22 @@ export function ConvertButton({ bookId }: ConvertButtonProps) {
|
|||||||
const res = await fetch(`/api/books/${bookId}/convert`, { method: "POST" });
|
const res = await fetch(`/api/books/${bookId}/convert`, { method: "POST" });
|
||||||
if (!res.ok) {
|
if (!res.ok) {
|
||||||
const body = await res.json().catch(() => ({ error: res.statusText }));
|
const body = await res.json().catch(() => ({ error: res.statusText }));
|
||||||
setState({ type: "error", message: body.error || "Conversion failed" });
|
setState({ type: "error", message: body.error || t("convert.failed") });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const job = await res.json();
|
const job = await res.json();
|
||||||
setState({ type: "success", jobId: job.id });
|
setState({ type: "success", jobId: job.id });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
setState({ type: "error", message: err instanceof Error ? err.message : "Unknown error" });
|
setState({ type: "error", message: err instanceof Error ? err.message : t("convert.unknownError") });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if (state.type === "success") {
|
if (state.type === "success") {
|
||||||
return (
|
return (
|
||||||
<div className="flex items-center gap-2 text-sm text-success">
|
<div className="flex items-center gap-2 text-sm text-success">
|
||||||
<span>Conversion started.</span>
|
<span>{t("convert.started")}</span>
|
||||||
<Link href={`/jobs/${state.jobId}`} className="text-primary hover:underline font-medium">
|
<Link href={`/jobs/${state.jobId}`} className="text-primary hover:underline font-medium">
|
||||||
View job →
|
{t("convert.viewJob")}
|
||||||
</Link>
|
</Link>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
@@ -52,7 +54,7 @@ export function ConvertButton({ bookId }: ConvertButtonProps) {
|
|||||||
className="text-xs text-muted-foreground hover:underline text-left"
|
className="text-xs text-muted-foreground hover:underline text-left"
|
||||||
onClick={() => setState({ type: "idle" })}
|
onClick={() => setState({ type: "idle" })}
|
||||||
>
|
>
|
||||||
Dismiss
|
{t("common.close")}
|
||||||
</button>
|
</button>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
@@ -65,7 +67,7 @@ export function ConvertButton({ bookId }: ConvertButtonProps) {
|
|||||||
onClick={handleConvert}
|
onClick={handleConvert}
|
||||||
disabled={state.type === "loading"}
|
disabled={state.type === "loading"}
|
||||||
>
|
>
|
||||||
{state.type === "loading" ? "Converting…" : "Convert to CBZ"}
|
{state.type === "loading" ? t("convert.converting") : t("convert.convertToCbz")}
|
||||||
</Button>
|
</Button>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
231
apps/backoffice/app/components/DashboardCharts.tsx
Normal file
231
apps/backoffice/app/components/DashboardCharts.tsx
Normal file
@@ -0,0 +1,231 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import {
|
||||||
|
PieChart, Pie, Cell, ResponsiveContainer, Tooltip,
|
||||||
|
BarChart, Bar, XAxis, YAxis, CartesianGrid,
|
||||||
|
AreaChart, Area, Line, LineChart,
|
||||||
|
Legend,
|
||||||
|
} from "recharts";
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Donut
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
export function RcDonutChart({
|
||||||
|
data,
|
||||||
|
noDataLabel,
|
||||||
|
}: {
|
||||||
|
data: { name: string; value: number; color: string }[];
|
||||||
|
noDataLabel?: string;
|
||||||
|
}) {
|
||||||
|
const total = data.reduce((s, d) => s + d.value, 0);
|
||||||
|
if (total === 0) return <p className="text-muted-foreground text-sm text-center py-8">{noDataLabel}</p>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex items-center gap-4">
|
||||||
|
<ResponsiveContainer width={130} height={130}>
|
||||||
|
<PieChart>
|
||||||
|
<Pie
|
||||||
|
data={data}
|
||||||
|
cx="50%"
|
||||||
|
cy="50%"
|
||||||
|
innerRadius={32}
|
||||||
|
outerRadius={55}
|
||||||
|
dataKey="value"
|
||||||
|
strokeWidth={0}
|
||||||
|
>
|
||||||
|
{data.map((d, i) => (
|
||||||
|
<Cell key={i} fill={d.color} />
|
||||||
|
))}
|
||||||
|
</Pie>
|
||||||
|
<Tooltip
|
||||||
|
formatter={(value) => value}
|
||||||
|
contentStyle={{ backgroundColor: "var(--color-card)", border: "1px solid var(--color-border)", borderRadius: 8, fontSize: 12 }}
|
||||||
|
/>
|
||||||
|
</PieChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
<div className="flex flex-col gap-1.5 min-w-0">
|
||||||
|
{data.map((d, i) => (
|
||||||
|
<div key={i} className="flex items-center gap-2 text-sm">
|
||||||
|
<span className="w-3 h-3 rounded-full shrink-0" style={{ backgroundColor: d.color }} />
|
||||||
|
<span className="text-muted-foreground truncate">{d.name}</span>
|
||||||
|
<span className="font-medium text-foreground ml-auto">{d.value}</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Bar chart
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
export function RcBarChart({
|
||||||
|
data,
|
||||||
|
color = "hsl(198 78% 37%)",
|
||||||
|
noDataLabel,
|
||||||
|
}: {
|
||||||
|
data: { label: string; value: number }[];
|
||||||
|
color?: string;
|
||||||
|
noDataLabel?: string;
|
||||||
|
}) {
|
||||||
|
if (data.length === 0) return <p className="text-muted-foreground text-sm text-center py-8">{noDataLabel}</p>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ResponsiveContainer width="100%" height={180}>
|
||||||
|
<BarChart data={data} margin={{ top: 5, right: 5, bottom: 0, left: -20 }}>
|
||||||
|
<CartesianGrid strokeDasharray="3 3" vertical={false} stroke="var(--color-border)" opacity={0.3} />
|
||||||
|
<XAxis dataKey="label" tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} />
|
||||||
|
<YAxis tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} allowDecimals={false} />
|
||||||
|
<Tooltip
|
||||||
|
contentStyle={{ backgroundColor: "var(--color-card)", border: "1px solid var(--color-border)", borderRadius: 8, fontSize: 12 }}
|
||||||
|
/>
|
||||||
|
<Bar dataKey="value" fill={color} radius={[4, 4, 0, 0]} />
|
||||||
|
</BarChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Area / Line chart
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
export function RcAreaChart({
|
||||||
|
data,
|
||||||
|
color = "hsl(142 60% 45%)",
|
||||||
|
noDataLabel,
|
||||||
|
}: {
|
||||||
|
data: { label: string; value: number }[];
|
||||||
|
color?: string;
|
||||||
|
noDataLabel?: string;
|
||||||
|
}) {
|
||||||
|
if (data.length === 0) return <p className="text-muted-foreground text-sm text-center py-8">{noDataLabel}</p>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ResponsiveContainer width="100%" height={180}>
|
||||||
|
<AreaChart data={data} margin={{ top: 5, right: 5, bottom: 0, left: -20 }}>
|
||||||
|
<defs>
|
||||||
|
<linearGradient id="areaGradient" x1="0" y1="0" x2="0" y2="1">
|
||||||
|
<stop offset="0%" stopColor={color} stopOpacity={0.3} />
|
||||||
|
<stop offset="100%" stopColor={color} stopOpacity={0} />
|
||||||
|
</linearGradient>
|
||||||
|
</defs>
|
||||||
|
<CartesianGrid strokeDasharray="3 3" vertical={false} stroke="var(--color-border)" opacity={0.3} />
|
||||||
|
<XAxis dataKey="label" tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} />
|
||||||
|
<YAxis tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} allowDecimals={false} />
|
||||||
|
<Tooltip
|
||||||
|
contentStyle={{ backgroundColor: "var(--color-card)", border: "1px solid var(--color-border)", borderRadius: 8, fontSize: 12 }}
|
||||||
|
/>
|
||||||
|
<Area type="monotone" dataKey="value" stroke={color} strokeWidth={2} fill="url(#areaGradient)" dot={{ r: 3, fill: color }} />
|
||||||
|
</AreaChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Horizontal stacked bar (libraries breakdown)
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
export function RcStackedBar({
|
||||||
|
data,
|
||||||
|
labels,
|
||||||
|
}: {
|
||||||
|
data: { name: string; read: number; reading: number; unread: number; sizeLabel: string }[];
|
||||||
|
labels: { read: string; reading: string; unread: string; books: string };
|
||||||
|
}) {
|
||||||
|
if (data.length === 0) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ResponsiveContainer width="100%" height={data.length * 60 + 30}>
|
||||||
|
<BarChart data={data} layout="vertical" margin={{ top: 0, right: 5, bottom: 0, left: 5 }}>
|
||||||
|
<CartesianGrid strokeDasharray="3 3" horizontal={false} stroke="var(--color-border)" opacity={0.3} />
|
||||||
|
<XAxis type="number" tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} allowDecimals={false} />
|
||||||
|
<YAxis type="category" dataKey="name" tick={{ fontSize: 12, fill: "var(--color-foreground)" }} axisLine={false} tickLine={false} width={120} />
|
||||||
|
<Tooltip
|
||||||
|
contentStyle={{ backgroundColor: "var(--color-card)", border: "1px solid var(--color-border)", borderRadius: 8, fontSize: 12 }}
|
||||||
|
/>
|
||||||
|
<Legend
|
||||||
|
wrapperStyle={{ fontSize: 11 }}
|
||||||
|
formatter={(value: string) => <span className="text-muted-foreground">{value}</span>}
|
||||||
|
/>
|
||||||
|
<Bar dataKey="read" stackId="a" fill="hsl(142 60% 45%)" name={labels.read} radius={[0, 0, 0, 0]} />
|
||||||
|
<Bar dataKey="reading" stackId="a" fill="hsl(45 93% 47%)" name={labels.reading} />
|
||||||
|
<Bar dataKey="unread" stackId="a" fill="hsl(220 13% 70%)" name={labels.unread} radius={[0, 4, 4, 0]} />
|
||||||
|
</BarChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Horizontal bar chart (top series)
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
export function RcHorizontalBar({
|
||||||
|
data,
|
||||||
|
color = "hsl(142 60% 45%)",
|
||||||
|
noDataLabel,
|
||||||
|
}: {
|
||||||
|
data: { name: string; value: number; subLabel: string }[];
|
||||||
|
color?: string;
|
||||||
|
noDataLabel?: string;
|
||||||
|
}) {
|
||||||
|
if (data.length === 0) return <p className="text-muted-foreground text-sm text-center py-4">{noDataLabel}</p>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ResponsiveContainer width="100%" height={data.length * 40 + 10}>
|
||||||
|
<BarChart data={data} layout="vertical" margin={{ top: 0, right: 5, bottom: 0, left: 5 }}>
|
||||||
|
<CartesianGrid strokeDasharray="3 3" horizontal={false} stroke="var(--color-border)" opacity={0.3} />
|
||||||
|
<XAxis type="number" tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} allowDecimals={false} />
|
||||||
|
<YAxis type="category" dataKey="name" tick={{ fontSize: 11, fill: "var(--color-foreground)" }} axisLine={false} tickLine={false} width={120} />
|
||||||
|
<Tooltip
|
||||||
|
contentStyle={{ backgroundColor: "var(--color-card)", border: "1px solid var(--color-border)", borderRadius: 8, fontSize: 12 }}
|
||||||
|
/>
|
||||||
|
<Bar dataKey="value" fill={color} radius={[0, 4, 4, 0]} />
|
||||||
|
</BarChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Multi-line chart (jobs over time)
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
export function RcMultiLineChart({
|
||||||
|
data,
|
||||||
|
lines,
|
||||||
|
noDataLabel,
|
||||||
|
}: {
|
||||||
|
data: Record<string, unknown>[];
|
||||||
|
lines: { key: string; label: string; color: string }[];
|
||||||
|
noDataLabel?: string;
|
||||||
|
}) {
|
||||||
|
const hasData = data.some((d) => lines.some((l) => (d[l.key] as number) > 0));
|
||||||
|
if (data.length === 0 || !hasData)
|
||||||
|
return <p className="text-muted-foreground text-sm text-center py-8">{noDataLabel}</p>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ResponsiveContainer width="100%" height={180}>
|
||||||
|
<LineChart data={data} margin={{ top: 5, right: 5, bottom: 0, left: -20 }}>
|
||||||
|
<CartesianGrid strokeDasharray="3 3" vertical={false} stroke="var(--color-border)" opacity={0.3} />
|
||||||
|
<XAxis dataKey="label" tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} />
|
||||||
|
<YAxis tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} allowDecimals={false} />
|
||||||
|
<Tooltip
|
||||||
|
contentStyle={{ backgroundColor: "var(--color-card)", border: "1px solid var(--color-border)", borderRadius: 8, fontSize: 12 }}
|
||||||
|
/>
|
||||||
|
<Legend wrapperStyle={{ fontSize: 11 }} />
|
||||||
|
{lines.map((l) => (
|
||||||
|
<Line
|
||||||
|
key={l.key}
|
||||||
|
type="monotone"
|
||||||
|
dataKey={l.key}
|
||||||
|
name={l.label}
|
||||||
|
stroke={l.color}
|
||||||
|
strokeWidth={2}
|
||||||
|
dot={{ r: 3, fill: l.color }}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</LineChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
);
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user