Compare commits

..

6 Commits

Author SHA1 Message Date
e64848a216 feat: implement thumbnail generation and management
- Remove unused image dependencies from Cargo.lock.
- Update API to handle thumbnail generation and checkup processes.
- Introduce new routes for rebuilding and regenerating thumbnails.
- Enhance job tracking with progress indicators for thumbnail jobs.
- Update front-end components to display thumbnail job status and progress.
- Add backend logic for managing thumbnail jobs and integrating with the API.
- Refactor existing code to accommodate new thumbnail functionalities.
2026-03-08 20:55:12 +01:00
c93a7d5d29 feat: thumbnails : part1 2026-03-08 17:54:47 +01:00
360d6e85de feat: review cbr and unraring for image on api 2026-03-07 15:47:46 +01:00
162b4712e7 fix: improve CBR extraction with fallback and increase timeout
- Try multiple entries in CBR archive until finding valid image
- Increase timeout from 12s to 30s for large files
- Better error messages for debugging
2026-03-07 15:02:55 +01:00
217919fa77 perf: increase concurrent renders limit from 4 to 8 2026-03-07 12:16:23 +01:00
ee0235b824 fix: improve CBR extraction logging and remove dead code
- Add magic bytes validation for extracted CBR images
- Add hex dump for debugging invalid images
- Show first entries when listing CBR archive
- Remove unused structs and functions from settings.rs
- Add -y flag to unrar for auto-confirm
2026-03-07 12:13:55 +01:00
38 changed files with 1739 additions and 269 deletions

View File

@@ -48,6 +48,10 @@ LIBRARIES_ROOT_PATH=/libraries
# You can change this to an absolute path on your machine # You can change this to an absolute path on your machine
LIBRARIES_HOST_PATH=../libraries LIBRARIES_HOST_PATH=../libraries
# Path to thumbnails directory on host machine (for Docker volume mount)
# Default: ../data/thumbnails (relative to infra/docker-compose.yml)
THUMBNAILS_HOST_PATH=../data/thumbnails
# ============================================================================= # =============================================================================
# Port Configuration # Port Configuration
# ============================================================================= # =============================================================================

1
.gitignore vendored
View File

@@ -5,3 +5,4 @@ tmp/
libraries/ libraries/
node_modules/ node_modules/
.next/ .next/
data/thumbnails

301
AGENTS.md Normal file
View File

@@ -0,0 +1,301 @@
# AGENTS.md - Agent Coding Guidelines for Stripstream Librarian
This file provides guidelines for agentic coding agents operating in this repository.
---
## 1. Build, Lint, and Test Commands
### Build Commands
```bash
# Build debug version (fastest for development)
cargo build
# Build release version (optimized)
cargo build --release
# Build specific crate
cargo build -p api
cargo build -p indexer
# Watch mode for development (requires cargo-watch)
cargo watch -x build
```
### Lint & Format Commands
```bash
# Run clippy lints
cargo clippy
# Fix auto-fixable clippy warnings
cargo clippy --fix
# Format code
cargo fmt
# Check formatting without making changes
cargo fmt -- --check
```
### Test Commands
```bash
# Run all tests
cargo test
# Run tests for specific crate
cargo test -p api
cargo test -p indexer
cargo test -p parsers
# Run a single test by name
cargo test test_name_here
# Run tests with output display
cargo test -- --nocapture
# Run doc tests
cargo test --doc
```
### Database Migrations
```bash
# Run migrations manually (via sqlx CLI)
# Ensure DATABASE_URL is set, then:
sqlx migrate run
# Create new migration
sqlx migrate add -r migration_name
```
### Docker Development
```bash
# Start infrastructure only
cd infra && docker compose up -d postgres meilisearch
# Start full stack
cd infra && docker compose up -d
# View logs
docker compose logs -f api
docker compose logs -f indexer
```
---
## 2. Code Style Guidelines
### General Principles
- **Conciseness**: Keep responses short and direct. Avoid unnecessary preamble or explanation.
- **Idiomatic Rust**: Follow Rust best practices and ecosystem conventions.
- **Error Handling**: Use `anyhow::Result<T>` for application code, `std::io::Result<T>` for simple file operations.
- **Async**: Use `tokio` for async runtime. Prefer `#[tokio::main]` over manual runtime.
### Naming Conventions
| Element | Convention | Example |
|---------|------------|---------|
| Variables | snake_case | `let book_id = ...` |
| Functions | snake_case | `fn get_book(...)` |
| Structs/Enums | PascalCase | `struct BookItem` |
| Modules | snake_case | `mod books;` |
| Constants | SCREAMING_SNAKE_CASE | `const BATCH_SIZE: usize = 100;` |
| Types | PascalCase | `type MyResult<T> = Result<T, Error>;` |
### Imports
- **Absolute imports** for workspace crates: `use parsers::{detect_format, parse_metadata};`
- **Standard library** imports: `use std::path::Path;`
- **External crates**: `use sqlx::{postgres::PgPoolOptions, Row};`
- **Group by**: std → external → workspace → local (with blank lines between)
```rust
use std::collections::HashMap;
use std::path::Path;
use anyhow::Context;
use serde::{Deserialize, Serialize};
use sqlx::Row;
use uuid::Uuid;
use crate::error::ApiError;
use crate::AppState;
```
### Error Handling
- Use `anyhow` for application-level error handling with context
- Use `with_context()` for adding context to errors
- Return `Result<T, ApiError>` in API handlers
- Use `?` operator instead of manual match/unwrap where possible
```rust
// Good
fn process_book(path: &Path) -> anyhow::Result<Book> {
let file = std::fs::File::open(path)
.with_context(|| format!("cannot open file: {}", path.display()))?;
// ...
}
// Good - API error handling
async fn get_book(State(state): State<AppState>, Path(id): Path<Uuid>)
-> Result<Json<Book>, ApiError> {
let row = sqlx::query("SELECT * FROM books WHERE id = $1")
.bind(id)
.fetch_optional(&state.pool)
.await
.map_err(ApiError::internal)?;
// ...
}
```
### Database (sqlx)
- Use **raw SQL queries** with `sqlx::query()` and `sqlx::query_scalar()`
- Prefer **batch operations** using `UNNEST` for bulk inserts/updates
- Always use **parameterized queries** (`$1`, `$2`, etc.) - never string interpolation
- Follow existing patterns for transactions:
```rust
let mut tx = pool.begin().await?;
// ... queries ...
tx.commit().await?;
```
### Async/Tokio
- Use `tokio::spawn` for background tasks
- Use `spawn_blocking` for CPU-bound work (image processing, file I/O)
- Keep async handlers non-blocking
- Use `tokio::time::timeout` for operations with timeouts
```rust
let bytes = tokio::time::timeout(
Duration::from_secs(60),
tokio::task::spawn_blocking(move || {
render_page(&abs_path_clone, n)
}),
)
.await
.map_err(|_| ApiError::internal("timeout"))?
.map_err(ApiError::internal)?;
```
### Structs and Serialization
- Use `#[derive(Serialize, Deserialize, ToSchema)]` for API types
- Add `utoipa` schemas for OpenAPI documentation
- Use `Option<T>` for nullable fields
- Document public structs briefly
```rust
#[derive(Serialize, ToSchema)]
pub struct BookItem {
#[schema(value_type = String)]
pub id: Uuid,
pub title: String,
pub author: Option<String>,
// ...
}
```
### Performance Considerations
- Use **batch operations** for database inserts/updates (100 items recommended)
- Use **parallel iterators** (`rayon::par_iter()`) for CPU-intensive scans
- Implement **caching** for expensive operations (see `pages.rs` for disk/memory cache examples)
- Use **streaming** for large data where applicable
### Testing
- Currently there are no test files - consider adding unit tests for:
- Parser functions
- Thumbnail generation
- Configuration parsing
- Use `#[cfg(test)]` modules for integration tests
---
## 3. Project Structure
```
stripstream-librarian/
├── apps/
│ ├── api/ # REST API (axum)
│ │ └── src/
│ │ ├── main.rs
│ │ ├── books.rs
│ │ ├── pages.rs
│ │ └── ...
│ ├── indexer/ # Background indexing service
│ │ └── src/
│ │ └── main.rs
│ └── backoffice/ # Next.js admin UI
├── crates/
│ ├── core/ # Shared config
│ │ └── src/config.rs
│ └── parsers/ # Book parsing (CBZ, CBR, PDF)
├── infra/
│ ├── migrations/ # SQL migrations
│ └── docker-compose.yml
└── libraries/ # Book storage (mounted volume)
```
### Key Files
| File | Purpose |
|------|---------|
| `apps/api/src/books.rs` | Book CRUD endpoints |
| `apps/api/src/pages.rs` | Page rendering & caching |
| `apps/indexer/src/main.rs` | Indexing logic, batch processing |
| `crates/parsers/src/lib.rs` | Format detection, metadata parsing |
| `crates/core/src/config.rs` | Configuration from environment |
| `infra/migrations/*.sql` | Database schema |
---
## 4. Common Patterns
### Configuration from Environment
```rust
// In crates/core/src/config.rs
impl IndexerConfig {
pub fn from_env() -> Result<Self> {
Ok(Self {
listen_addr: std::env::var("INDEXER_LISTEN_ADDR")
.unwrap_or_else(|_| "0.0.0.0:8081".to_string()),
database_url: std::env::var("DATABASE_URL")
.context("DATABASE_URL is required")?,
// ...
})
}
}
```
### Path Remapping
```rust
fn remap_libraries_path(path: &str) -> String {
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
if path.starts_with("/libraries/") {
return path.replacen("/libraries", &root, 1);
}
}
path.to_string()
}
```
---
## 5. Important Notes
- **Workspace**: This is a Cargo workspace. Always specify the package when building specific apps.
- **Dependencies**: External crates are defined in workspace `Cargo.toml`, not individual `Cargo.toml`.
- **Database**: PostgreSQL is required. Run migrations before starting services.
- **External Tools**: The indexer relies on `unar` (for CBR) and `pdftoppm` (for PDF) being installed on the system.

4
Cargo.lock generated
View File

@@ -78,6 +78,7 @@ dependencies = [
"utoipa", "utoipa",
"utoipa-swagger-ui", "utoipa-swagger-ui",
"uuid", "uuid",
"walkdir",
"webp", "webp",
"zip 2.4.2", "zip 2.4.2",
] ]
@@ -1148,6 +1149,7 @@ dependencies = [
"notify", "notify",
"parsers", "parsers",
"rand 0.8.5", "rand 0.8.5",
"rayon",
"reqwest", "reqwest",
"serde", "serde",
"serde_json", "serde_json",
@@ -1623,6 +1625,8 @@ dependencies = [
"anyhow", "anyhow",
"lopdf", "lopdf",
"regex", "regex",
"uuid",
"walkdir",
"zip 2.4.2", "zip 2.4.2",
] ]

View File

@@ -20,6 +20,7 @@ base64 = "0.22"
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
image = { version = "0.25", default-features = false, features = ["jpeg", "png", "webp"] } image = { version = "0.25", default-features = false, features = ["jpeg", "png", "webp"] }
lru = "0.12" lru = "0.12"
rayon = "1.10"
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] } reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
rand = "0.8" rand = "0.8"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }

141
PLAN_THUMBNAILS.md Normal file
View File

@@ -0,0 +1,141 @@
# Plan: Génération des vignettes à l'index
## 1. Base de données
### Migration SQL (`0010_add_thumbnails.sql`)
- [x] Ajouter `thumbnail_path TEXT` à la table `books` (nullable)
- [x] Ajouter settings pour thumbnails dans `app_settings`:
```json
{
"thumbnail": {
"enabled": true,
"width": 300,
"height": 400,
"quality": 80,
"format": "webp"
}
}
```
---
## 2. Configuration
### `crates/core/src/config.rs`
- [x] Ajouter `ThumbnailConfig` struct
- [x] Ajouter champs dans `IndexerConfig`:
- `thumbnail_width: u32` (défaut: 300)
- `thumbnail_height: u32` (défaut: 400)
- `thumbnail_quality: u8` (défaut: 80)
- `thumbnail_dir: String` (défaut: `/data/thumbnails`)
- [x] Ajouter getter depuis env vars
---
## 3. Indexer - Extraction de la 1ère page
### Fonction à créer dans `crates/parsers/src/lib.rs`
- [x] `extract_first_page(path: &Path, format: BookFormat) -> Result<Vec<u8>>`
- Réutiliser logique de `pages.rs:extract_cbz_page`
- Réutiliser logique de `pages.rs:extract_cbr_page`
- Réutiliser logique de `pages.rs:render_pdf_page`
### Fonction de génération vignette dans `apps/indexer/src/main.rs`
- [x] `generate_thumbnail(image_bytes: &[u8], config: &ThumbnailConfig) -> Result<Vec<u8>>`
- Load image avec `image::load_from_memory`
- Resize avec `image::resize` (ratio kept)
- Encode en WebP avec `webp::Encoder`
- [x] `save_thumbnail(book_id: Uuid, thumbnail_bytes: &[u8], config: &ThumbnailConfig) -> Result<String>`
### Intégration dans `scan_library`
- [x] Après parsing metadata, extraire 1ère page
- [x] Générer vignette et sauvegarder
- [x] Stocker chemin en DB (via batch insert)
---
## 4. Indexer - WalkDir parallèle
### Remplacement de `WalkDir` séquentiel
- [x] Utiliser `rayon` pour paralléliser le scan:
```rust
let total_files: usize = library_paths.par_iter()
.map(|root_path| { ... })
.sum();
```
- [x] Ajouter `rayon = "1.10"` dans workspace dependencies
---
## 5. API - Service des vignettes
### Mise à jour models dans `apps/api/src/books.rs`
- [x] Ajouter `thumbnail_url: Option<String>` à `BookItem`
- [x] Ajouter `thumbnail_url: Option<String>` à `BookDetails`
- [x] Mise à jour des requêtes SQL pour récupérer `thumbnail_path`
### Nouvelle route dans `apps/api/src/main.rs`
- [x] Route `/books/:id/thumbnail` (GET)
- Retourne fichier statique depuis `thumbnail_path`
- Content-Type: image/webp
- Cache-Control: public, max-age=31536000
### Suppression cache 1ère page (optionnel)
- [ ] Optionnel: simplifier `pages.rs` car thumbnail pré-générée
- [ ] Garder render pour pages > 1
### Adapter backoffice
La recupération des thumbnail est fait par une route page/1.
- [x] Passer par la nouvelle route avec une route clean /thumbnail pour chaque cover.
### refacto code entre api et indexer
En fait l'indexer pourrait appeler l'api pour qu'il fasse les vignettes et c'est l'api qui est responsable des images et des lectures ebooks. Je préfère que chaque domaine soit bien respecté. A la fin d'une build, on appelle l'api pour faire le checkup des thumbnails.
Il faudra que coté backoffice on voit partout ou on peut voir le traitement live des jobs, une phase ou on voit en sse le traitement des thumbnails. Coté api, si on a pas de thumbnail on passe par le code actuel de pages.
- [x] Migration `0010_index_job_thumbnails_phase.sql`: status `generating_thumbnails` dans index_jobs
- [x] API: `get_thumbnail` fallback sur page 1 si pas de thumbnail_path (via `pages::render_book_page_1`)
- [x] API: module `thumbnails.rs`, POST `/index/jobs/:id/thumbnails/checkup` (admin), lance la génération en tâche de fond et met à jour la job
- [x] Indexer: plus de génération de thumbnails; en fin de build: status = `generating_thumbnails`, puis appel API checkup; config `api_base_url` + `api_bootstrap_token` (core)
- [x] Backoffice: StatusBadge "Thumbnails" pour `generating_thumbnails`; JobProgress/JobRow/JobsIndicator/page job détail: phase thumbnails visible en SSE (X/Y thumbnails, barre de progression)
---
## 6. Settings API
### Endpoint settings existant
- [ ] Vérifier que `/settings` expose thumbnail config
- [ ] Ajouter endpoint PUT pour mettre à jour thumbnail settings
---
## 7. Taches diverses
- [x] Ajouter dependency `image` et `webp` dans indexer `Cargo.toml`
- [x] Build release vérifié
---
## Ordre d'implémentation suggéré
1. [x] Migration DB + settings
2. [x] Config + parsers (extract first page)
3. [x] Indexer thumbnail generation + save to disk
4. [x] API serve thumbnail
5. [x] Parallel walkdir
6. [ ] Tests & polish (à faire)
---
## Post-déploiement
- [ ] Appliquer migration SQL: `psql -f infra/migrations/0009_add_thumbnails.sql`
- [ ] Créer dossier thumbnails: `mkdir -p /data/thumbnails`
- [ ] Configurer env vars si besoin:
- `THUMBNAIL_ENABLED=true`
- `THUMBNAIL_WIDTH=300`
- `THUMBNAIL_HEIGHT=400`
- `THUMBNAIL_QUALITY=80`
- `THUMBNAIL_DIRECTORY=/data/thumbnails`

View File

@@ -32,3 +32,4 @@ zip = { version = "2.2", default-features = false, features = ["deflate"] }
utoipa.workspace = true utoipa.workspace = true
utoipa-swagger-ui = { workspace = true, features = ["axum"] } utoipa-swagger-ui = { workspace = true, features = ["axum"] }
webp = "0.3" webp = "0.3"
walkdir = "2"

View File

@@ -21,7 +21,10 @@ RUN --mount=type=cache,target=/sccache \
cargo build --release -p api cargo build --release -p api
FROM debian:bookworm-slim FROM debian:bookworm-slim
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates wget unrar-free poppler-utils && rm -rf /var/lib/apt/lists/* RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates wget unar poppler-utils locales && rm -rf /var/lib/apt/lists/*
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen
ENV LANG=en_US.UTF-8
ENV LC_ALL=en_US.UTF-8
COPY --from=builder /app/target/release/api /usr/local/bin/api COPY --from=builder /app/target/release/api /usr/local/bin/api
EXPOSE 8080 EXPOSE 8080
CMD ["/usr/local/bin/api"] CMD ["/usr/local/bin/api"]

View File

@@ -34,6 +34,7 @@ pub struct BookItem {
pub volume: Option<i32>, pub volume: Option<i32>,
pub language: Option<String>, pub language: Option<String>,
pub page_count: Option<i32>, pub page_count: Option<i32>,
pub thumbnail_url: Option<String>,
#[schema(value_type = String)] #[schema(value_type = String)]
pub updated_at: DateTime<Utc>, pub updated_at: DateTime<Utc>,
} }
@@ -58,6 +59,7 @@ pub struct BookDetails {
pub volume: Option<i32>, pub volume: Option<i32>,
pub language: Option<String>, pub language: Option<String>,
pub page_count: Option<i32>, pub page_count: Option<i32>,
pub thumbnail_url: Option<String>,
pub file_path: Option<String>, pub file_path: Option<String>,
pub file_format: Option<String>, pub file_format: Option<String>,
pub file_parse_status: Option<String>, pub file_parse_status: Option<String>,
@@ -96,7 +98,7 @@ pub async fn list_books(
let sql = format!( let sql = format!(
r#" r#"
SELECT id, library_id, kind, title, author, series, volume, language, page_count, updated_at SELECT id, library_id, kind, title, author, series, volume, language, page_count, thumbnail_path, updated_at
FROM books FROM books
WHERE ($1::uuid IS NULL OR library_id = $1) WHERE ($1::uuid IS NULL OR library_id = $1)
AND ($2::text IS NULL OR kind = $2) AND ($2::text IS NULL OR kind = $2)
@@ -135,7 +137,9 @@ pub async fn list_books(
let mut items: Vec<BookItem> = rows let mut items: Vec<BookItem> = rows
.iter() .iter()
.take(limit as usize) .take(limit as usize)
.map(|row| BookItem { .map(|row| {
let thumbnail_path: Option<String> = row.get("thumbnail_path");
BookItem {
id: row.get("id"), id: row.get("id"),
library_id: row.get("library_id"), library_id: row.get("library_id"),
kind: row.get("kind"), kind: row.get("kind"),
@@ -145,7 +149,9 @@ pub async fn list_books(
volume: row.get("volume"), volume: row.get("volume"),
language: row.get("language"), language: row.get("language"),
page_count: row.get("page_count"), page_count: row.get("page_count"),
thumbnail_url: thumbnail_path.map(|_p| format!("/books/{}/thumbnail", row.get::<Uuid, _>("id"))),
updated_at: row.get("updated_at"), updated_at: row.get("updated_at"),
}
}) })
.collect(); .collect();
@@ -182,7 +188,7 @@ pub async fn get_book(
) -> Result<Json<BookDetails>, ApiError> { ) -> Result<Json<BookDetails>, ApiError> {
let row = sqlx::query( let row = sqlx::query(
r#" r#"
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.series, b.volume, b.language, b.page_count, SELECT b.id, b.library_id, b.kind, b.title, b.author, b.series, b.volume, b.language, b.page_count, b.thumbnail_path,
bf.abs_path, bf.format, bf.parse_status bf.abs_path, bf.format, bf.parse_status
FROM books b FROM books b
LEFT JOIN LATERAL ( LEFT JOIN LATERAL (
@@ -200,6 +206,7 @@ pub async fn get_book(
.await?; .await?;
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?; let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
let thumbnail_path: Option<String> = row.get("thumbnail_path");
Ok(Json(BookDetails { Ok(Json(BookDetails {
id: row.get("id"), id: row.get("id"),
library_id: row.get("library_id"), library_id: row.get("library_id"),
@@ -210,6 +217,7 @@ pub async fn get_book(
volume: row.get("volume"), volume: row.get("volume"),
language: row.get("language"), language: row.get("language"),
page_count: row.get("page_count"), page_count: row.get("page_count"),
thumbnail_url: thumbnail_path.map(|_| format!("/books/{}/thumbnail", id)),
file_path: row.get("abs_path"), file_path: row.get("abs_path"),
file_format: row.get("format"), file_format: row.get("format"),
file_parse_status: row.get("parse_status"), file_parse_status: row.get("parse_status"),
@@ -332,3 +340,40 @@ pub async fn list_series(
next_cursor, next_cursor,
})) }))
} }
use axum::{
body::Body,
http::{header, HeaderMap, HeaderValue, StatusCode},
response::IntoResponse,
};
pub async fn get_thumbnail(
State(state): State<AppState>,
Path(book_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> {
let row = sqlx::query("SELECT thumbnail_path FROM books WHERE id = $1")
.bind(book_id)
.fetch_optional(&state.pool)
.await
.map_err(|e| ApiError::internal(e.to_string()))?;
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
let thumbnail_path: Option<String> = row.get("thumbnail_path");
let data = if let Some(ref path) = thumbnail_path {
std::fs::read(path)
.map_err(|e| ApiError::internal(format!("cannot read thumbnail: {}", e)))?
} else {
// Fallback: render page 1 on the fly (same as pages logic)
crate::pages::render_book_page_1(&state, book_id, 300, 80).await?
};
let mut headers = HeaderMap::new();
headers.insert(header::CONTENT_TYPE, HeaderValue::from_static("image/webp"));
headers.insert(
header::CACHE_CONTROL,
HeaderValue::from_static("public, max-age=31536000, immutable"),
);
Ok((StatusCode::OK, headers, Body::from(data)))
}

View File

@@ -1,4 +1,8 @@
use axum::{http::StatusCode, response::{IntoResponse, Response}, Json}; use axum::{
http::StatusCode,
response::{IntoResponse, Response},
Json,
};
use serde::Serialize; use serde::Serialize;
#[derive(Debug)] #[derive(Debug)]
@@ -51,7 +55,13 @@ impl ApiError {
impl IntoResponse for ApiError { impl IntoResponse for ApiError {
fn into_response(self) -> Response { fn into_response(self) -> Response {
(self.status, Json(ErrorBody { error: &self.message })).into_response() (
self.status,
Json(ErrorBody {
error: &self.message,
}),
)
.into_response()
} }
} }
@@ -60,3 +70,9 @@ impl From<sqlx::Error> for ApiError {
Self::internal(format!("database error: {err}")) Self::internal(format!("database error: {err}"))
} }
} }
impl From<std::io::Error> for ApiError {
fn from(err: std::io::Error) -> Self {
Self::internal(format!("IO error: {err}"))
}
}

View File

@@ -34,6 +34,9 @@ pub struct IndexJobResponse {
pub error_opt: Option<String>, pub error_opt: Option<String>,
#[schema(value_type = String)] #[schema(value_type = String)]
pub created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
pub progress_percent: Option<i32>,
pub processed_files: Option<i32>,
pub total_files: Option<i32>,
} }
#[derive(Serialize, ToSchema)] #[derive(Serialize, ToSchema)]
@@ -142,7 +145,7 @@ pub async fn enqueue_rebuild(
)] )]
pub async fn list_index_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> { pub async fn list_index_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> {
let rows = sqlx::query( let rows = sqlx::query(
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs ORDER BY created_at DESC LIMIT 100", "SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs ORDER BY created_at DESC LIMIT 100",
) )
.fetch_all(&state.pool) .fetch_all(&state.pool)
.await?; .await?;
@@ -171,7 +174,7 @@ pub async fn cancel_job(
id: axum::extract::Path<Uuid>, id: axum::extract::Path<Uuid>,
) -> Result<Json<IndexJobResponse>, ApiError> { ) -> Result<Json<IndexJobResponse>, ApiError> {
let rows_affected = sqlx::query( let rows_affected = sqlx::query(
"UPDATE index_jobs SET status = 'cancelled' WHERE id = $1 AND status IN ('pending', 'running')", "UPDATE index_jobs SET status = 'cancelled' WHERE id = $1 AND status IN ('pending', 'running', 'generating_thumbnails')",
) )
.bind(id.0) .bind(id.0)
.execute(&state.pool) .execute(&state.pool)
@@ -182,7 +185,7 @@ pub async fn cancel_job(
} }
let row = sqlx::query( let row = sqlx::query(
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs WHERE id = $1", "SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs WHERE id = $1",
) )
.bind(id.0) .bind(id.0)
.fetch_one(&state.pool) .fetch_one(&state.pool)
@@ -298,6 +301,9 @@ pub fn map_row(row: sqlx::postgres::PgRow) -> IndexJobResponse {
stats_json: row.get("stats_json"), stats_json: row.get("stats_json"),
error_opt: row.get("error_opt"), error_opt: row.get("error_opt"),
created_at: row.get("created_at"), created_at: row.get("created_at"),
progress_percent: row.try_get("progress_percent").ok(),
processed_files: row.try_get("processed_files").ok(),
total_files: row.try_get("total_files").ok(),
} }
} }
@@ -333,9 +339,9 @@ fn map_row_detail(row: sqlx::postgres::PgRow) -> IndexJobDetailResponse {
)] )]
pub async fn get_active_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> { pub async fn get_active_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> {
let rows = sqlx::query( let rows = sqlx::query(
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at "SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files
FROM index_jobs FROM index_jobs
WHERE status IN ('pending', 'running') WHERE status IN ('pending', 'running', 'generating_thumbnails')
ORDER BY created_at ASC" ORDER BY created_at ASC"
) )
.fetch_all(&state.pool) .fetch_all(&state.pool)

View File

@@ -7,6 +7,7 @@ mod openapi;
mod pages; mod pages;
mod search; mod search;
mod settings; mod settings;
mod thumbnails;
mod tokens; mod tokens;
use std::{ use std::{
@@ -85,7 +86,7 @@ async fn main() -> anyhow::Result<()> {
meili_url: Arc::from(config.meili_url), meili_url: Arc::from(config.meili_url),
meili_master_key: Arc::from(config.meili_master_key), meili_master_key: Arc::from(config.meili_master_key),
page_cache: Arc::new(Mutex::new(LruCache::new(NonZeroUsize::new(512).expect("non-zero")))), page_cache: Arc::new(Mutex::new(LruCache::new(NonZeroUsize::new(512).expect("non-zero")))),
page_render_limit: Arc::new(Semaphore::new(4)), page_render_limit: Arc::new(Semaphore::new(8)),
metrics: Arc::new(Metrics::new()), metrics: Arc::new(Metrics::new()),
read_rate_limit: Arc::new(Mutex::new(ReadRateLimit { read_rate_limit: Arc::new(Mutex::new(ReadRateLimit {
window_started_at: Instant::now(), window_started_at: Instant::now(),
@@ -99,10 +100,13 @@ async fn main() -> anyhow::Result<()> {
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library)) .route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring)) .route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild)) .route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
.route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild))
.route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate))
.route("/index/status", get(index_jobs::list_index_jobs)) .route("/index/status", get(index_jobs::list_index_jobs))
.route("/index/jobs/active", get(index_jobs::get_active_jobs)) .route("/index/jobs/active", get(index_jobs::get_active_jobs))
.route("/index/jobs/:id", get(index_jobs::get_job_details)) .route("/index/jobs/:id", get(index_jobs::get_job_details))
.route("/index/jobs/:id/stream", get(index_jobs::stream_job_progress)) .route("/index/jobs/:id/stream", get(index_jobs::stream_job_progress))
.route("/index/jobs/:id/thumbnails/checkup", axum::routing::post(thumbnails::start_checkup))
.route("/index/jobs/:id/errors", get(index_jobs::get_job_errors)) .route("/index/jobs/:id/errors", get(index_jobs::get_job_errors))
.route("/index/cancel/:id", axum::routing::post(index_jobs::cancel_job)) .route("/index/cancel/:id", axum::routing::post(index_jobs::cancel_job))
.route("/folders", get(index_jobs::list_folders)) .route("/folders", get(index_jobs::list_folders))
@@ -117,6 +121,7 @@ async fn main() -> anyhow::Result<()> {
let read_routes = Router::new() let read_routes = Router::new()
.route("/books", get(books::list_books)) .route("/books", get(books::list_books))
.route("/books/:id", get(books::get_book)) .route("/books/:id", get(books::get_book))
.route("/books/:id/thumbnail", get(books::get_thumbnail))
.route("/books/:id/pages/:n", get(pages::get_page)) .route("/books/:id/pages/:n", get(pages::get_page))
.route("/libraries/:library_id/series", get(books::list_series)) .route("/libraries/:library_id/series", get(books::list_series))
.route("/search", get(search::search_books)) .route("/search", get(search::search_books))

View File

@@ -10,6 +10,8 @@ use utoipa::OpenApi;
crate::pages::get_page, crate::pages::get_page,
crate::search::search_books, crate::search::search_books,
crate::index_jobs::enqueue_rebuild, crate::index_jobs::enqueue_rebuild,
crate::thumbnails::start_thumbnails_rebuild,
crate::thumbnails::start_thumbnails_regenerate,
crate::index_jobs::list_index_jobs, crate::index_jobs::list_index_jobs,
crate::index_jobs::get_active_jobs, crate::index_jobs::get_active_jobs,
crate::index_jobs::get_job_details, crate::index_jobs::get_job_details,
@@ -37,6 +39,7 @@ use utoipa::OpenApi;
crate::search::SearchQuery, crate::search::SearchQuery,
crate::search::SearchResponse, crate::search::SearchResponse,
crate::index_jobs::RebuildRequest, crate::index_jobs::RebuildRequest,
crate::thumbnails::ThumbnailsRebuildRequest,
crate::index_jobs::IndexJobResponse, crate::index_jobs::IndexJobResponse,
crate::index_jobs::IndexJobDetailResponse, crate::index_jobs::IndexJobDetailResponse,
crate::index_jobs::JobErrorResponse, crate::index_jobs::JobErrorResponse,

View File

@@ -18,6 +18,7 @@ use sha2::{Digest, Sha256};
use sqlx::Row; use sqlx::Row;
use tracing::{debug, error, info, instrument, warn}; use tracing::{debug, error, info, instrument, warn};
use uuid::Uuid; use uuid::Uuid;
use walkdir::WalkDir;
use crate::{error::ApiError, AppState}; use crate::{error::ApiError, AppState};
@@ -220,7 +221,7 @@ pub async fn get_page(
let start_time = std::time::Instant::now(); let start_time = std::time::Instant::now();
let bytes = tokio::time::timeout( let bytes = tokio::time::timeout(
Duration::from_secs(12), Duration::from_secs(60),
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || {
render_page(&abs_path_clone, &input_format, n, &format_clone, quality, width) render_page(&abs_path_clone, &input_format, n, &format_clone, quality, width)
}), }),
@@ -278,6 +279,54 @@ fn image_response(bytes: Arc<Vec<u8>>, content_type: &str, etag_suffix: Option<&
(StatusCode::OK, headers, Body::from((*bytes).clone())).into_response() (StatusCode::OK, headers, Body::from((*bytes).clone())).into_response()
} }
/// Render page 1 of a book (for thumbnail fallback or thumbnail checkup). Uses thumbnail dimensions by default.
pub async fn render_book_page_1(
state: &AppState,
book_id: Uuid,
width: u32,
quality: u8,
) -> Result<Vec<u8>, ApiError> {
let row = sqlx::query(
r#"SELECT abs_path, format FROM book_files WHERE book_id = $1 ORDER BY updated_at DESC LIMIT 1"#,
)
.bind(book_id)
.fetch_optional(&state.pool)
.await
.map_err(|e| ApiError::internal(e.to_string()))?;
let row = row.ok_or_else(|| ApiError::not_found("book file not found"))?;
let abs_path: String = row.get("abs_path");
let abs_path = remap_libraries_path(&abs_path);
let input_format: String = row.get("format");
let _permit = state
.page_render_limit
.clone()
.acquire_owned()
.await
.map_err(|_| ApiError::internal("render limiter unavailable"))?;
let abs_path_clone = abs_path.clone();
let bytes = tokio::time::timeout(
Duration::from_secs(60),
tokio::task::spawn_blocking(move || {
render_page(
&abs_path_clone,
&input_format,
1,
&OutputFormat::Webp,
quality,
width,
)
}),
)
.await
.map_err(|_| ApiError::internal("page rendering timeout"))?
.map_err(|e| ApiError::internal(format!("render task failed: {e}")))?;
bytes
}
fn render_page( fn render_page(
abs_path: &str, abs_path: &str,
input_format: &str, input_format: &str,
@@ -342,53 +391,64 @@ fn extract_cbz_page(abs_path: &str, page_number: u32) -> Result<Vec<u8>, ApiErro
} }
fn extract_cbr_page(abs_path: &str, page_number: u32) -> Result<Vec<u8>, ApiError> { fn extract_cbr_page(abs_path: &str, page_number: u32) -> Result<Vec<u8>, ApiError> {
debug!("Listing CBR archive: {}", abs_path); info!("Opening CBR archive: {}", abs_path);
let list_output = std::process::Command::new("unrar")
.arg("lb") let index = page_number as usize - 1;
let tmp_dir = std::env::temp_dir().join(format!("stripstream-cbr-{}", Uuid::new_v4()));
debug!("Creating temp dir for CBR extraction: {}", tmp_dir.display());
std::fs::create_dir_all(&tmp_dir).map_err(|e| {
error!("Cannot create temp dir: {}", e);
ApiError::internal(format!("temp dir error: {}", e))
})?;
// Extract directly - skip listing which fails on UTF-16 encoded filenames
let extract_output = std::process::Command::new("env")
.args(["LC_ALL=en_US.UTF-8", "LANG=en_US.UTF-8", "unar", "-o"])
.arg(&tmp_dir)
.arg(abs_path) .arg(abs_path)
.output() .output()
.map_err(|e| { .map_err(|e| {
error!("unrar list command failed for {}: {}", abs_path, e); let _ = std::fs::remove_dir_all(&tmp_dir);
ApiError::internal(format!("unrar list failed: {e}")) error!("unar extract failed: {}", e);
ApiError::internal(format!("unar extract failed: {e}"))
})?; })?;
if !list_output.status.success() {
let stderr = String::from_utf8_lossy(&list_output.stderr); if !extract_output.status.success() {
error!("unrar could not list archive {}: {}", abs_path, stderr); let _ = std::fs::remove_dir_all(&tmp_dir);
return Err(ApiError::internal("unrar could not list archive")); let stderr = String::from_utf8_lossy(&extract_output.stderr);
error!("unar extract failed {}: {}", abs_path, stderr);
return Err(ApiError::internal("unar extract failed"));
} }
let mut entries: Vec<String> = String::from_utf8_lossy(&list_output.stdout) // Find and read the requested image (recursive search for CBR files with subdirectories)
.lines() let mut image_files: Vec<_> = WalkDir::new(&tmp_dir)
.filter(|line| is_image_name(&line.to_ascii_lowercase())) .into_iter()
.map(|s| s.to_string()) .filter_map(|e| e.ok())
.filter(|e| {
let name = e.file_name().to_string_lossy().to_lowercase();
is_image_name(&name)
})
.collect(); .collect();
entries.sort();
debug!("Found {} images in CBR {}", entries.len(), abs_path);
let index = page_number as usize - 1; image_files.sort_by_key(|e| e.path().to_string_lossy().to_lowercase());
let selected = entries.get(index).ok_or_else(|| {
error!("Page {} out of range in {} (total: {})", page_number, abs_path, entries.len()); let selected = image_files.get(index).ok_or_else(|| {
let _ = std::fs::remove_dir_all(&tmp_dir);
error!("Page {} not found (total: {})", page_number, image_files.len());
ApiError::not_found("page out of range") ApiError::not_found("page out of range")
})?; })?;
debug!("Extracting page {} ({}) from {}", page_number, selected, abs_path); let data = std::fs::read(selected.path()).map_err(|e| {
let page_output = std::process::Command::new("unrar") let _ = std::fs::remove_dir_all(&tmp_dir);
.arg("p") error!("read failed: {}", e);
.arg("-inul") ApiError::internal(format!("read error: {}", e))
.arg(abs_path)
.arg(selected)
.output()
.map_err(|e| {
error!("unrar extract command failed for {} page {}: {}", abs_path, selected, e);
ApiError::internal(format!("unrar extract failed: {e}"))
})?; })?;
if !page_output.status.success() {
let stderr = String::from_utf8_lossy(&page_output.stderr); let _ = std::fs::remove_dir_all(&tmp_dir);
error!("unrar could not extract page {} from {}: {}", selected, abs_path, stderr);
return Err(ApiError::internal("unrar could not extract page")); info!("Successfully extracted CBR page {} ({} bytes)", page_number, data.len());
} Ok(data)
debug!("Successfully extracted {} bytes from CBR page {}", page_output.stdout.len(), page_number);
Ok(page_output.stdout)
} }
fn render_pdf_page(abs_path: &str, page_number: u32, width: u32) -> Result<Vec<u8>, ApiError> { fn render_pdf_page(abs_path: &str, page_number: u32, width: u32) -> Result<Vec<u8>, ApiError> {
@@ -499,11 +559,16 @@ fn format_matches(source: &ImageFormat, target: &OutputFormat) -> bool {
} }
fn is_image_name(name: &str) -> bool { fn is_image_name(name: &str) -> bool {
name.ends_with(".jpg") let lower = name.to_lowercase();
|| name.ends_with(".jpeg") lower.ends_with(".jpg")
|| name.ends_with(".png") || lower.ends_with(".jpeg")
|| name.ends_with(".webp") || lower.ends_with(".png")
|| name.ends_with(".avif") || lower.ends_with(".webp")
|| lower.ends_with(".avif")
|| lower.ends_with(".gif")
|| lower.ends_with(".tif")
|| lower.ends_with(".tiff")
|| lower.ends_with(".bmp")
} }
#[allow(dead_code)] #[allow(dead_code)]

View File

@@ -1,6 +1,5 @@
use axum::{ use axum::{
extract::{Query, State}, extract::State,
response::IntoResponse,
routing::{get, post}, routing::{get, post},
Json, Router, Json, Router,
}; };
@@ -10,46 +9,38 @@ use sqlx::Row;
use crate::{error::ApiError, AppState}; use crate::{error::ApiError, AppState};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ImageProcessingSettings {
pub format: String,
pub quality: u8,
pub filter: String,
pub max_width: u32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CacheSettings {
pub enabled: bool,
pub directory: String,
pub max_size_mb: u32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LimitsSettings {
pub concurrent_renders: u8,
pub timeout_seconds: u8,
pub rate_limit_per_second: u16,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AppSettings {
pub image_processing: ImageProcessingSettings,
pub cache: CacheSettings,
pub limits: LimitsSettings,
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpdateSettingRequest { pub struct UpdateSettingRequest {
pub value: Value, pub value: Value,
} }
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ClearCacheResponse {
pub success: bool,
pub message: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CacheStats {
pub total_size_mb: f64,
pub file_count: u64,
pub directory: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ThumbnailStats {
pub total_size_mb: f64,
pub file_count: u64,
pub directory: String,
}
pub fn settings_routes() -> Router<AppState> { pub fn settings_routes() -> Router<AppState> {
Router::new() Router::new()
.route("/settings", get(get_settings)) .route("/settings", get(get_settings))
.route("/settings/:key", get(get_setting).post(update_setting)) .route("/settings/:key", get(get_setting).post(update_setting))
.route("/settings/cache/clear", post(clear_cache)) .route("/settings/cache/clear", post(clear_cache))
.route("/settings/cache/stats", get(get_cache_stats)) .route("/settings/cache/stats", get(get_cache_stats))
.route("/settings/thumbnail/stats", get(get_thumbnail_stats))
} }
async fn get_settings(State(state): State<AppState>) -> Result<Json<Value>, ApiError> { async fn get_settings(State(state): State<AppState>) -> Result<Json<Value>, ApiError> {
@@ -108,12 +99,6 @@ async fn update_setting(
Ok(Json(value)) Ok(Json(value))
} }
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ClearCacheResponse {
pub success: bool,
pub message: String,
}
async fn clear_cache(State(_state): State<AppState>) -> Result<Json<ClearCacheResponse>, ApiError> { async fn clear_cache(State(_state): State<AppState>) -> Result<Json<ClearCacheResponse>, ApiError> {
let cache_dir = std::env::var("IMAGE_CACHE_DIR") let cache_dir = std::env::var("IMAGE_CACHE_DIR")
.unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string()); .unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string());
@@ -143,13 +128,6 @@ async fn clear_cache(State(_state): State<AppState>) -> Result<Json<ClearCacheRe
Ok(Json(result)) Ok(Json(result))
} }
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CacheStats {
pub total_size_mb: f64,
pub file_count: u64,
pub directory: String,
}
async fn get_cache_stats(State(_state): State<AppState>) -> Result<Json<CacheStats>, ApiError> { async fn get_cache_stats(State(_state): State<AppState>) -> Result<Json<CacheStats>, ApiError> {
let cache_dir = std::env::var("IMAGE_CACHE_DIR") let cache_dir = std::env::var("IMAGE_CACHE_DIR")
.unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string()); .unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string());
@@ -202,59 +180,71 @@ async fn get_cache_stats(State(_state): State<AppState>) -> Result<Json<CacheSta
Ok(Json(stats)) Ok(Json(stats))
} }
pub async fn get_settings_from_db( fn compute_dir_stats(path: &std::path::Path) -> (u64, u64) {
pool: &sqlx::PgPool, let mut total_size: u64 = 0;
) -> Result<AppSettings, ApiError> { let mut file_count: u64 = 0;
let settings = get_settings_from_db_raw(pool).await?;
let image_processing = settings fn visit_dirs(
.get("image_processing") dir: &std::path::Path,
.and_then(|v| serde_json::from_value(v.clone()).ok()) total_size: &mut u64,
.unwrap_or_else(|| ImageProcessingSettings { file_count: &mut u64,
format: "webp".to_string(), ) -> std::io::Result<()> {
quality: 85, if dir.is_dir() {
filter: "lanczos3".to_string(), for entry in std::fs::read_dir(dir)? {
max_width: 2160, let entry = entry?;
}); let path = entry.path();
if path.is_dir() {
let cache = settings visit_dirs(&path, total_size, file_count)?;
.get("cache") } else {
.and_then(|v| serde_json::from_value(v.clone()).ok()) *total_size += entry.metadata()?.len();
.unwrap_or_else(|| CacheSettings { *file_count += 1;
enabled: true, }
directory: "/tmp/stripstream-image-cache".to_string(), }
max_size_mb: 10000, }
}); Ok(())
let limits = settings
.get("limits")
.and_then(|v| serde_json::from_value(v.clone()).ok())
.unwrap_or_else(|| LimitsSettings {
concurrent_renders: 4,
timeout_seconds: 12,
rate_limit_per_second: 120,
});
Ok(AppSettings {
image_processing,
cache,
limits,
})
}
async fn get_settings_from_db_raw(
pool: &sqlx::PgPool,
) -> Result<std::collections::HashMap<String, Value>, ApiError> {
let rows = sqlx::query(r#"SELECT key, value FROM app_settings"#)
.fetch_all(pool)
.await?;
let mut settings = std::collections::HashMap::new();
for row in rows {
let key: String = row.get("key");
let value: Value = row.get("value");
settings.insert(key, value);
} }
Ok(settings) let _ = visit_dirs(path, &mut total_size, &mut file_count);
(total_size, file_count)
}
async fn get_thumbnail_stats(State(_state): State<AppState>) -> Result<Json<ThumbnailStats>, ApiError> {
let settings = sqlx::query(r#"SELECT value FROM app_settings WHERE key = 'thumbnail'"#)
.fetch_optional(&_state.pool)
.await?;
let directory = match settings {
Some(row) => {
let value: serde_json::Value = row.get("value");
value.get("directory")
.and_then(|v| v.as_str())
.unwrap_or("/data/thumbnails")
.to_string()
}
None => "/data/thumbnails".to_string(),
};
let directory_clone = directory.clone();
let stats = tokio::task::spawn_blocking(move || {
let path = std::path::Path::new(&directory_clone);
if !path.exists() {
return ThumbnailStats {
total_size_mb: 0.0,
file_count: 0,
directory: directory_clone,
};
}
let (total_size, file_count) = compute_dir_stats(path);
ThumbnailStats {
total_size_mb: total_size as f64 / 1024.0 / 1024.0,
file_count,
directory: directory_clone,
}
})
.await
.map_err(|e| ApiError::internal(format!("thumbnail stats failed: {}", e)))?;
Ok(Json(stats))
} }

284
apps/api/src/thumbnails.rs Normal file
View File

@@ -0,0 +1,284 @@
use std::path::Path;
use anyhow::Context;
use axum::{
extract::{Path as AxumPath, State},
http::StatusCode,
Json,
};
use image::GenericImageView;
use serde::Deserialize;
use sqlx::Row;
use tracing::{info, warn};
use uuid::Uuid;
use utoipa::ToSchema;
use crate::{error::ApiError, index_jobs, pages, AppState};
#[derive(Clone)]
struct ThumbnailConfig {
enabled: bool,
width: u32,
height: u32,
quality: u8,
directory: String,
}
async fn load_thumbnail_config(pool: &sqlx::PgPool) -> ThumbnailConfig {
let fallback = ThumbnailConfig {
enabled: true,
width: 300,
height: 400,
quality: 80,
directory: "/data/thumbnails".to_string(),
};
let row = sqlx::query(r#"SELECT value FROM app_settings WHERE key = 'thumbnail'"#)
.fetch_optional(pool)
.await;
match row {
Ok(Some(row)) => {
let value: serde_json::Value = row.get("value");
ThumbnailConfig {
enabled: value
.get("enabled")
.and_then(|v| v.as_bool())
.unwrap_or(fallback.enabled),
width: value
.get("width")
.and_then(|v| v.as_u64())
.map(|v| v as u32)
.unwrap_or(fallback.width),
height: value
.get("height")
.and_then(|v| v.as_u64())
.map(|v| v as u32)
.unwrap_or(fallback.height),
quality: value
.get("quality")
.and_then(|v| v.as_u64())
.map(|v| v as u8)
.unwrap_or(fallback.quality),
directory: value
.get("directory")
.and_then(|v| v.as_str())
.map(|s| s.to_string())
.unwrap_or_else(|| fallback.directory.clone()),
}
}
_ => fallback,
}
}
fn generate_thumbnail(image_bytes: &[u8], config: &ThumbnailConfig) -> anyhow::Result<Vec<u8>> {
let img = image::load_from_memory(image_bytes).context("failed to load image")?;
let (orig_w, orig_h) = img.dimensions();
let ratio_w = config.width as f32 / orig_w as f32;
let ratio_h = config.height as f32 / orig_h as f32;
let ratio = ratio_w.min(ratio_h);
let new_w = (orig_w as f32 * ratio) as u32;
let new_h = (orig_h as f32 * ratio) as u32;
let resized = img.resize(new_w, new_h, image::imageops::FilterType::Lanczos3);
let rgba = resized.to_rgba8();
let (w, h) = rgba.dimensions();
let rgb_data: Vec<u8> = rgba.pixels().flat_map(|p| [p[0], p[1], p[2]]).collect();
let quality = f32::max(config.quality as f32, 85.0);
let webp_data =
webp::Encoder::new(&rgb_data, webp::PixelLayout::Rgb, w, h).encode(quality);
Ok(webp_data.to_vec())
}
fn save_thumbnail(book_id: Uuid, thumbnail_bytes: &[u8], config: &ThumbnailConfig) -> anyhow::Result<String> {
let dir = Path::new(&config.directory);
std::fs::create_dir_all(dir)?;
let filename = format!("{}.webp", book_id);
let path = dir.join(&filename);
std::fs::write(&path, thumbnail_bytes)?;
Ok(path.to_string_lossy().to_string())
}
async fn run_checkup(state: AppState, job_id: Uuid) {
let pool = &state.pool;
let row = sqlx::query("SELECT library_id, type FROM index_jobs WHERE id = $1")
.bind(job_id)
.fetch_optional(pool)
.await;
let (library_id, job_type) = match row {
Ok(Some(r)) => (
r.get::<Option<Uuid>, _>("library_id"),
r.get::<String, _>("type"),
),
_ => {
warn!("thumbnails checkup: job {} not found", job_id);
return;
}
};
// Regenerate: clear existing thumbnails in scope so they get regenerated
if job_type == "thumbnail_regenerate" {
let cleared = sqlx::query(
r#"UPDATE books SET thumbnail_path = NULL WHERE (library_id = $1 OR $1 IS NULL)"#,
)
.bind(library_id)
.execute(pool)
.await;
if let Ok(res) = cleared {
info!("thumbnails regenerate: cleared {} books", res.rows_affected());
}
}
let book_ids: Vec<Uuid> = sqlx::query_scalar(
r#"SELECT id FROM books WHERE (library_id = $1 OR $1 IS NULL) AND thumbnail_path IS NULL"#,
)
.bind(library_id)
.fetch_all(pool)
.await
.unwrap_or_default();
let config = load_thumbnail_config(pool).await;
if !config.enabled || book_ids.is_empty() {
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), progress_percent = 100, current_file = NULL WHERE id = $1",
)
.bind(job_id)
.execute(pool)
.await;
return;
}
let total = book_ids.len() as i32;
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'generating_thumbnails', total_files = $2, processed_files = 0, current_file = NULL WHERE id = $1",
)
.bind(job_id)
.bind(total)
.execute(pool)
.await;
for (i, &book_id) in book_ids.iter().enumerate() {
match pages::render_book_page_1(&state, book_id, config.width, config.quality).await {
Ok(page_bytes) => {
match generate_thumbnail(&page_bytes, &config) {
Ok(thumb_bytes) => {
if let Ok(path) = save_thumbnail(book_id, &thumb_bytes, &config) {
if sqlx::query("UPDATE books SET thumbnail_path = $1 WHERE id = $2")
.bind(&path)
.bind(book_id)
.execute(pool)
.await
.is_ok()
{
let processed = (i + 1) as i32;
let percent = ((i + 1) as f64 / total as f64 * 100.0) as i32;
let _ = sqlx::query(
"UPDATE index_jobs SET processed_files = $2, progress_percent = $3 WHERE id = $1",
)
.bind(job_id)
.bind(processed)
.bind(percent)
.execute(pool)
.await;
}
}
}
Err(e) => warn!("thumbnail generate failed for book {}: {:?}", book_id, e),
}
}
Err(e) => warn!("render page 1 failed for book {}: {:?}", book_id, e),
}
}
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), progress_percent = 100, current_file = NULL WHERE id = $1",
)
.bind(job_id)
.execute(pool)
.await;
info!("thumbnails checkup finished for job {} ({} books)", job_id, total);
}
#[derive(Deserialize, ToSchema)]
pub struct ThumbnailsRebuildRequest {
#[schema(value_type = Option<String>)]
pub library_id: Option<Uuid>,
}
/// POST /index/thumbnails/rebuild — create a job and generate thumbnails for books that don't have one (optional library scope).
#[utoipa::path(
post,
path = "/index/thumbnails/rebuild",
tag = "indexing",
request_body = Option<ThumbnailsRebuildRequest>,
responses(
(status = 200, body = index_jobs::IndexJobResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden - Admin scope required"),
),
security(("Bearer" = []))
)]
pub async fn start_thumbnails_rebuild(
State(state): State<AppState>,
payload: Option<Json<ThumbnailsRebuildRequest>>,
) -> Result<Json<index_jobs::IndexJobResponse>, ApiError> {
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
let job_id = Uuid::new_v4();
let row = sqlx::query(
r#"INSERT INTO index_jobs (id, library_id, type, status)
VALUES ($1, $2, 'thumbnail_rebuild', 'pending')
RETURNING id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at"#,
)
.bind(job_id)
.bind(library_id)
.fetch_one(&state.pool)
.await
.map_err(|e| ApiError::internal(e.to_string()))?;
Ok(Json(index_jobs::map_row(row)))
}
/// POST /index/thumbnails/regenerate — create a job and regenerate all thumbnails in scope (clears then regenerates).
#[utoipa::path(
post,
path = "/index/thumbnails/regenerate",
tag = "indexing",
request_body = Option<ThumbnailsRebuildRequest>,
responses(
(status = 200, body = index_jobs::IndexJobResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden - Admin scope required"),
),
security(("Bearer" = []))
)]
pub async fn start_thumbnails_regenerate(
State(state): State<AppState>,
payload: Option<Json<ThumbnailsRebuildRequest>>,
) -> Result<Json<index_jobs::IndexJobResponse>, ApiError> {
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
let job_id = Uuid::new_v4();
let row = sqlx::query(
r#"INSERT INTO index_jobs (id, library_id, type, status)
VALUES ($1, $2, 'thumbnail_regenerate', 'pending')
RETURNING id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at"#,
)
.bind(job_id)
.bind(library_id)
.fetch_one(&state.pool)
.await
.map_err(|e| ApiError::internal(e.to_string()))?;
Ok(Json(index_jobs::map_row(row)))
}
/// POST /index/jobs/:id/thumbnails/checkup — start thumbnail generation for books missing thumbnails (called by indexer at end of build).
pub async fn start_checkup(
State(state): State<AppState>,
AxumPath(job_id): AxumPath<Uuid>,
) -> Result<StatusCode, ApiError> {
let state = state.clone();
tokio::spawn(async move { run_checkup(state, job_id).await });
Ok(StatusCode::ACCEPTED)
}

View File

@@ -0,0 +1,43 @@
import { NextRequest, NextResponse } from "next/server";
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ bookId: string }> }
) {
const { bookId } = await params;
const apiBaseUrl = process.env.API_BASE_URL || "http://api:8080";
const apiUrl = `${apiBaseUrl}/books/${bookId}/thumbnail`;
const token = process.env.API_BOOTSTRAP_TOKEN;
if (!token) {
return new NextResponse("API token not configured", { status: 500 });
}
try {
const response = await fetch(apiUrl, {
headers: {
Authorization: `Bearer ${token}`,
},
});
if (!response.ok) {
return new NextResponse(`Failed to fetch thumbnail: ${response.status}`, {
status: response.status
});
}
const contentType = response.headers.get("content-type") || "image/webp";
const imageBuffer = await response.arrayBuffer();
return new NextResponse(imageBuffer, {
headers: {
"Content-Type": contentType,
"Cache-Control": "public, max-age=31536000, immutable",
},
});
} catch (error) {
console.error("Error fetching thumbnail:", error);
return new NextResponse("Failed to fetch thumbnail", { status: 500 });
}
}

View File

@@ -38,7 +38,7 @@ function BookImage({ src, alt }: { src: string; alt: string }) {
} }
export function BookCard({ book }: BookCardProps) { export function BookCard({ book }: BookCardProps) {
const coverUrl = book.coverUrl || `/api/books/${book.id}/pages/1?format=webp&width=200`; const coverUrl = book.coverUrl || `/api/books/${book.id}/thumbnail`;
return ( return (
<Link <Link

View File

@@ -87,6 +87,8 @@ export function JobProgress({ jobId, onComplete }: JobProgressProps) {
const percent = progress.progress_percent ?? 0; const percent = progress.progress_percent ?? 0;
const processed = progress.processed_files ?? 0; const processed = progress.processed_files ?? 0;
const total = progress.total_files ?? 0; const total = progress.total_files ?? 0;
const isThumbnailsPhase = progress.status === "generating_thumbnails";
const unitLabel = isThumbnailsPhase ? "thumbnails" : "files";
return ( return (
<div className="p-4 bg-card rounded-lg border border-border"> <div className="p-4 bg-card rounded-lg border border-border">
@@ -100,7 +102,7 @@ export function JobProgress({ jobId, onComplete }: JobProgressProps) {
<ProgressBar value={percent} showLabel size="lg" className="mb-3" /> <ProgressBar value={percent} showLabel size="lg" className="mb-3" />
<div className="flex flex-wrap items-center gap-x-4 gap-y-1 text-sm text-muted-foreground mb-3"> <div className="flex flex-wrap items-center gap-x-4 gap-y-1 text-sm text-muted-foreground mb-3">
<span>{processed} / {total} files</span> <span>{processed} / {total} {unitLabel}</span>
{progress.current_file && ( {progress.current_file && (
<span className="truncate max-w-md" title={progress.current_file}> <span className="truncate max-w-md" title={progress.current_file}>
Current: {progress.current_file.length > 40 Current: {progress.current_file.length > 40
@@ -110,7 +112,7 @@ export function JobProgress({ jobId, onComplete }: JobProgressProps) {
)} )}
</div> </div>
{progress.stats_json && ( {progress.stats_json && !isThumbnailsPhase && (
<div className="flex flex-wrap gap-3 text-xs"> <div className="flex flex-wrap gap-3 text-xs">
<Badge variant="primary">Scanned: {progress.stats_json.scanned_files}</Badge> <Badge variant="primary">Scanned: {progress.stats_json.scanned_files}</Badge>
<Badge variant="success">Indexed: {progress.stats_json.indexed_files}</Badge> <Badge variant="success">Indexed: {progress.stats_json.indexed_files}</Badge>

View File

@@ -33,9 +33,8 @@ interface JobRowProps {
} }
export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, formatDuration }: JobRowProps) { export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, formatDuration }: JobRowProps) {
const [showProgress, setShowProgress] = useState( const isActive = job.status === "running" || job.status === "pending" || job.status === "generating_thumbnails";
highlighted || job.status === "running" || job.status === "pending" const [showProgress, setShowProgress] = useState(highlighted || isActive);
);
const handleComplete = () => { const handleComplete = () => {
setShowProgress(false); setShowProgress(false);
@@ -53,12 +52,32 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
const removed = job.stats_json?.removed_files ?? 0; const removed = job.stats_json?.removed_files ?? 0;
const errors = job.stats_json?.errors ?? 0; const errors = job.stats_json?.errors ?? 0;
// Format files display const isThumbnailPhase = job.status === "generating_thumbnails";
const filesDisplay = job.status === "running" && job.total_files const isThumbnailJob = job.type === "thumbnail_rebuild" || job.type === "thumbnail_regenerate";
? `${job.processed_files || 0}/${job.total_files}` const hasThumbnailPhase = isThumbnailPhase || isThumbnailJob;
// Files column: index-phase stats only
const filesDisplay =
job.status === "running" && !isThumbnailPhase
? job.total_files != null
? `${job.processed_files ?? 0}/${job.total_files}`
: scanned > 0 : scanned > 0
? `${scanned} scanned` ? `${scanned} scanned`
: "-"; : "-"
: job.status === "success" && (indexed > 0 || removed > 0 || errors > 0)
? null // rendered below as ✓ / / ⚠
: scanned > 0
? `${scanned} scanned`
: "—";
// Thumbnails column
const thumbInProgress = hasThumbnailPhase && (job.status === "running" || isThumbnailPhase);
const thumbDisplay =
thumbInProgress && job.total_files != null
? `${job.processed_files ?? 0}/${job.total_files}`
: job.status === "success" && job.total_files != null && hasThumbnailPhase
? `${job.total_files}`
: "—";
return ( return (
<> <>
@@ -86,7 +105,7 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
! !
</span> </span>
)} )}
{(job.status === "running" || job.status === "pending") && ( {isActive && (
<button <button
className="text-xs text-primary hover:text-primary/80 hover:underline" className="text-xs text-primary hover:text-primary/80 hover:underline"
onClick={() => setShowProgress(!showProgress)} onClick={() => setShowProgress(!showProgress)}
@@ -98,21 +117,26 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
</td> </td>
<td className="px-4 py-3"> <td className="px-4 py-3">
<div className="flex flex-col gap-1"> <div className="flex flex-col gap-1">
{filesDisplay !== null ? (
<span className="text-sm text-foreground">{filesDisplay}</span> <span className="text-sm text-foreground">{filesDisplay}</span>
{job.status === "running" && job.total_files && ( ) : (
<MiniProgressBar
value={job.processed_files || 0}
max={job.total_files}
className="w-24"
/>
)}
{job.status === "success" && (
<div className="flex items-center gap-2 text-xs"> <div className="flex items-center gap-2 text-xs">
<span className="text-success"> {indexed}</span> <span className="text-success"> {indexed}</span>
{removed > 0 && <span className="text-warning"> {removed}</span>} {removed > 0 && <span className="text-warning"> {removed}</span>}
{errors > 0 && <span className="text-error"> {errors}</span>} {errors > 0 && <span className="text-error"> {errors}</span>}
</div> </div>
)} )}
{job.status === "running" && !isThumbnailPhase && job.total_files != null && (
<MiniProgressBar value={job.processed_files ?? 0} max={job.total_files} className="w-24" />
)}
</div>
</td>
<td className="px-4 py-3">
<div className="flex flex-col gap-1">
<span className="text-sm text-foreground">{thumbDisplay}</span>
{thumbInProgress && job.total_files != null && (
<MiniProgressBar value={job.processed_files ?? 0} max={job.total_files} className="w-24" />
)}
</div> </div>
</td> </td>
<td className="px-4 py-3 text-sm text-muted-foreground"> <td className="px-4 py-3 text-sm text-muted-foreground">
@@ -129,7 +153,7 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
> >
View View
</Link> </Link>
{(job.status === "pending" || job.status === "running") && ( {(job.status === "pending" || job.status === "running" || job.status === "generating_thumbnails") && (
<Button <Button
variant="danger" variant="danger"
size="sm" size="sm"
@@ -141,9 +165,9 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
</div> </div>
</td> </td>
</tr> </tr>
{showProgress && (job.status === "running" || job.status === "pending") && ( {showProgress && isActive && (
<tr> <tr>
<td colSpan={8} className="px-4 py-3 bg-muted/50"> <td colSpan={9} className="px-4 py-3 bg-muted/50">
<JobProgress <JobProgress
jobId={job.id} jobId={job.id}
onComplete={handleComplete} onComplete={handleComplete}

View File

@@ -78,7 +78,7 @@ export function JobsIndicator() {
return () => document.removeEventListener("mousedown", handleClickOutside); return () => document.removeEventListener("mousedown", handleClickOutside);
}, []); }, []);
const runningJobs = activeJobs.filter(j => j.status === "running"); const runningJobs = activeJobs.filter(j => j.status === "running" || j.status === "generating_thumbnails");
const pendingJobs = activeJobs.filter(j => j.status === "pending"); const pendingJobs = activeJobs.filter(j => j.status === "pending");
const totalCount = activeJobs.length; const totalCount = activeJobs.length;
@@ -210,19 +210,19 @@ export function JobsIndicator() {
> >
<div className="flex items-start gap-3"> <div className="flex items-start gap-3">
<div className="mt-0.5"> <div className="mt-0.5">
{job.status === "running" && <span className="animate-spin inline-block"></span>} {(job.status === "running" || job.status === "generating_thumbnails") && <span className="animate-spin inline-block"></span>}
{job.status === "pending" && <span></span>} {job.status === "pending" && <span></span>}
</div> </div>
<div className="flex-1 min-w-0"> <div className="flex-1 min-w-0">
<div className="flex items-center gap-2 mb-1"> <div className="flex items-center gap-2 mb-1">
<code className="text-xs px-1.5 py-0.5 bg-muted rounded font-mono">{job.id.slice(0, 8)}</code> <code className="text-xs px-1.5 py-0.5 bg-muted rounded font-mono">{job.id.slice(0, 8)}</code>
<Badge variant={job.type === 'rebuild' ? 'primary' : 'secondary'} className="text-[10px]"> <Badge variant={job.type === 'rebuild' ? 'primary' : job.type === 'thumbnail_regenerate' ? 'warning' : 'secondary'} className="text-[10px]">
{job.type} {job.type === 'thumbnail_rebuild' ? 'Thumbnails' : job.type === 'thumbnail_regenerate' ? 'Regenerate' : job.type}
</Badge> </Badge>
</div> </div>
{job.status === "running" && job.progress_percent !== null && ( {(job.status === "running" || job.status === "generating_thumbnails") && job.progress_percent != null && (
<div className="flex items-center gap-2 mt-2"> <div className="flex items-center gap-2 mt-2">
<MiniProgressBar value={job.progress_percent} /> <MiniProgressBar value={job.progress_percent} />
<span className="text-xs font-medium text-muted-foreground">{job.progress_percent}%</span> <span className="text-xs font-medium text-muted-foreground">{job.progress_percent}%</span>

View File

@@ -111,6 +111,7 @@ export function JobsList({ initialJobs, libraries, highlightJobId }: JobsListPro
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Type</th> <th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Type</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Status</th> <th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Status</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Files</th> <th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Files</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Thumbnails</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Duration</th> <th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Duration</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Created</th> <th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Created</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Actions</th> <th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Actions</th>

View File

@@ -60,6 +60,7 @@ export function Badge({ children, variant = "default", className = "" }: BadgePr
// Status badge for jobs/tasks // Status badge for jobs/tasks
const statusVariants: Record<string, BadgeVariant> = { const statusVariants: Record<string, BadgeVariant> = {
running: "in-progress", running: "in-progress",
generating_thumbnails: "in-progress",
success: "completed", success: "completed",
completed: "completed", completed: "completed",
failed: "error", failed: "error",
@@ -68,20 +69,33 @@ const statusVariants: Record<string, BadgeVariant> = {
unread: "unread", unread: "unread",
}; };
const statusLabels: Record<string, string> = {
generating_thumbnails: "Thumbnails",
};
interface StatusBadgeProps { interface StatusBadgeProps {
status: string; status: string;
className?: string; className?: string;
} }
export function StatusBadge({ status, className = "" }: StatusBadgeProps) { export function StatusBadge({ status, className = "" }: StatusBadgeProps) {
const variant = statusVariants[status.toLowerCase()] || "default"; const key = status.toLowerCase();
return <Badge variant={variant} className={className}>{status}</Badge>; const variant = statusVariants[key] || "default";
const label = statusLabels[key] ?? status;
return <Badge variant={variant} className={className}>{label}</Badge>;
} }
// Job type badge // Job type badge
const jobTypeVariants: Record<string, BadgeVariant> = { const jobTypeVariants: Record<string, BadgeVariant> = {
rebuild: "primary", rebuild: "primary",
full_rebuild: "warning", full_rebuild: "warning",
thumbnail_rebuild: "secondary",
thumbnail_regenerate: "warning",
};
const jobTypeLabels: Record<string, string> = {
thumbnail_rebuild: "Thumbnails",
thumbnail_regenerate: "Regenerate",
}; };
interface JobTypeBadgeProps { interface JobTypeBadgeProps {
@@ -90,8 +104,10 @@ interface JobTypeBadgeProps {
} }
export function JobTypeBadge({ type, className = "" }: JobTypeBadgeProps) { export function JobTypeBadge({ type, className = "" }: JobTypeBadgeProps) {
const variant = jobTypeVariants[type.toLowerCase()] || "default"; const key = type.toLowerCase();
return <Badge variant={variant} className={className}>{type}</Badge>; const variant = jobTypeVariants[key] || "default";
const label = jobTypeLabels[key] ?? type;
return <Badge variant={variant} className={className}>{label}</Badge>;
} }
// Progress badge (shows percentage) // Progress badge (shows percentage)

View File

@@ -171,19 +171,19 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
</Card> </Card>
{/* Progress Card */} {/* Progress Card */}
{(job.status === "running" || job.status === "success" || job.status === "failed") && ( {(job.status === "running" || job.status === "generating_thumbnails" || job.status === "success" || job.status === "failed") && (
<Card> <Card>
<CardHeader> <CardHeader>
<CardTitle>Progress</CardTitle> <CardTitle>{job.status === "generating_thumbnails" ? "Thumbnails" : "Progress"}</CardTitle>
</CardHeader> </CardHeader>
<CardContent> <CardContent>
{job.total_files && job.total_files > 0 && ( {job.total_files != null && job.total_files > 0 && (
<> <>
<ProgressBar value={job.progress_percent || 0} showLabel size="lg" className="mb-4" /> <ProgressBar value={job.progress_percent || 0} showLabel size="lg" className="mb-4" />
<div className="grid grid-cols-3 gap-4"> <div className="grid grid-cols-3 gap-4">
<StatBox value={job.processed_files || 0} label="Processed" variant="primary" /> <StatBox value={job.processed_files ?? 0} label="Processed" variant="primary" />
<StatBox value={job.total_files} label="Total" /> <StatBox value={job.total_files} label={job.status === "generating_thumbnails" ? "Total thumbnails" : "Total"} />
<StatBox value={job.total_files - (job.processed_files || 0)} label="Remaining" variant="warning" /> <StatBox value={job.total_files - (job.processed_files ?? 0)} label="Remaining" variant="warning" />
</div> </div>
</> </>
)} )}

View File

@@ -1,6 +1,6 @@
import { revalidatePath } from "next/cache"; import { revalidatePath } from "next/cache";
import { redirect } from "next/navigation"; import { redirect } from "next/navigation";
import { listJobs, fetchLibraries, rebuildIndex, IndexJobDto, LibraryDto } from "../../lib/api"; import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, IndexJobDto, LibraryDto } from "../../lib/api";
import { JobsList } from "../components/JobsList"; import { JobsList } from "../components/JobsList";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormSelect, FormRow } from "../components/ui"; import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormSelect, FormRow } from "../components/ui";
@@ -31,6 +31,22 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
redirect(`/jobs?highlight=${result.id}`); redirect(`/jobs?highlight=${result.id}`);
} }
async function triggerThumbnailsRebuild(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await rebuildThumbnails(libraryId || undefined);
revalidatePath("/jobs");
redirect(`/jobs?highlight=${result.id}`);
}
async function triggerThumbnailsRegenerate(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await regenerateThumbnails(libraryId || undefined);
revalidatePath("/jobs");
redirect(`/jobs?highlight=${result.id}`);
}
return ( return (
<> <>
<div className="mb-6"> <div className="mb-6">
@@ -45,7 +61,7 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
<Card className="mb-6"> <Card className="mb-6">
<CardHeader> <CardHeader>
<CardTitle>Queue New Job</CardTitle> <CardTitle>Queue New Job</CardTitle>
<CardDescription>Select a library to rebuild or perform a full rebuild</CardDescription> <CardDescription>Rebuild index, full rebuild, generate missing thumbnails, or regenerate all thumbnails</CardDescription>
</CardHeader> </CardHeader>
<CardContent className="space-y-4"> <CardContent className="space-y-4">
<form action={triggerRebuild}> <form action={triggerRebuild}>
@@ -89,6 +105,48 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
</Button> </Button>
</FormRow> </FormRow>
</form> </form>
<form action={triggerThumbnailsRebuild}>
<FormRow>
<FormField className="flex-1">
<FormSelect name="library_id" defaultValue="">
<option value="">All libraries</option>
{libraries.map((lib) => (
<option key={lib.id} value={lib.id}>
{lib.name}
</option>
))}
</FormSelect>
</FormField>
<Button type="submit" variant="secondary">
<svg className="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" />
</svg>
Generate thumbnails
</Button>
</FormRow>
</form>
<form action={triggerThumbnailsRegenerate}>
<FormRow>
<FormField className="flex-1">
<FormSelect name="library_id" defaultValue="">
<option value="">All libraries</option>
{libraries.map((lib) => (
<option key={lib.id} value={lib.id}>
{lib.name}
</option>
))}
</FormSelect>
</FormField>
<Button type="submit" variant="warning">
<svg className="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
</svg>
Regenerate thumbnails
</Button>
</FormRow>
</form>
</CardContent> </CardContent>
</Card> </Card>

View File

@@ -2,16 +2,21 @@
import { useState } from "react"; import { useState } from "react";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, FormSelect, FormRow, Icon } from "../components/ui"; import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, FormSelect, FormRow, Icon } from "../components/ui";
import { Settings, CacheStats, ClearCacheResponse } from "../../lib/api"; import { Settings, CacheStats, ClearCacheResponse, ThumbnailStats } from "../../lib/api";
interface SettingsPageProps { interface SettingsPageProps {
initialSettings: Settings; initialSettings: Settings;
initialCacheStats: CacheStats; initialCacheStats: CacheStats;
initialThumbnailStats: ThumbnailStats;
} }
export default function SettingsPage({ initialSettings, initialCacheStats }: SettingsPageProps) { export default function SettingsPage({ initialSettings, initialCacheStats, initialThumbnailStats }: SettingsPageProps) {
const [settings, setSettings] = useState<Settings>(initialSettings); const [settings, setSettings] = useState<Settings>({
...initialSettings,
thumbnail: initialSettings.thumbnail || { enabled: true, width: 300, height: 400, quality: 80, format: "webp", directory: "/data/thumbnails" }
});
const [cacheStats, setCacheStats] = useState<CacheStats>(initialCacheStats); const [cacheStats, setCacheStats] = useState<CacheStats>(initialCacheStats);
const [thumbnailStats, setThumbnailStats] = useState<ThumbnailStats>(initialThumbnailStats);
const [isClearing, setIsClearing] = useState(false); const [isClearing, setIsClearing] = useState(false);
const [clearResult, setClearResult] = useState<ClearCacheResponse | null>(null); const [clearResult, setClearResult] = useState<ClearCacheResponse | null>(null);
const [isSaving, setIsSaving] = useState(false); const [isSaving, setIsSaving] = useState(false);
@@ -299,6 +304,131 @@ export default function SettingsPage({ initialSettings, initialCacheStats }: Set
</div> </div>
</CardContent> </CardContent>
</Card> </Card>
{/* Thumbnail Settings */}
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="image" size="md" />
Thumbnails
</CardTitle>
<CardDescription>Configure thumbnail generation during indexing</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Enable Thumbnails</label>
<FormSelect
value={settings.thumbnail.enabled ? "true" : "false"}
onChange={(e) => {
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, enabled: e.target.value === "true" } };
setSettings(newSettings);
handleUpdateSetting("thumbnail", newSettings.thumbnail);
}}
>
<option value="true">Enabled</option>
<option value="false">Disabled</option>
</FormSelect>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Output Format</label>
<FormSelect
value={settings.thumbnail.format}
onChange={(e) => {
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, format: e.target.value } };
setSettings(newSettings);
handleUpdateSetting("thumbnail", newSettings.thumbnail);
}}
>
<option value="webp">WebP (Recommended)</option>
<option value="jpeg">JPEG</option>
<option value="png">PNG</option>
</FormSelect>
</FormField>
</FormRow>
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Width (px)</label>
<FormInput
type="number"
min={50}
max={600}
value={settings.thumbnail.width}
onChange={(e) => {
const width = parseInt(e.target.value) || 300;
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, width } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
/>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Height (px)</label>
<FormInput
type="number"
min={50}
max={800}
value={settings.thumbnail.height}
onChange={(e) => {
const height = parseInt(e.target.value) || 400;
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, height } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
/>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Quality (1-100)</label>
<FormInput
type="number"
min={1}
max={100}
value={settings.thumbnail.quality}
onChange={(e) => {
const quality = parseInt(e.target.value) || 80;
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, quality } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
/>
</FormField>
</FormRow>
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Thumbnail Directory</label>
<FormInput
value={settings.thumbnail.directory}
onChange={(e) => {
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, directory: e.target.value } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
/>
</FormField>
</FormRow>
<div className="grid grid-cols-3 gap-4 p-4 bg-muted/30 rounded-lg">
<div>
<p className="text-sm text-muted-foreground">Total Size</p>
<p className="text-2xl font-semibold">{thumbnailStats.total_size_mb.toFixed(2)} MB</p>
</div>
<div>
<p className="text-sm text-muted-foreground">Files</p>
<p className="text-2xl font-semibold">{thumbnailStats.file_count}</p>
</div>
<div>
<p className="text-sm text-muted-foreground">Directory</p>
<p className="text-sm font-mono truncate" title={thumbnailStats.directory}>{thumbnailStats.directory}</p>
</div>
</div>
<p className="text-sm text-muted-foreground">
Note: Thumbnail settings are used during indexing. Existing thumbnails will not be regenerated automatically.
</p>
</div>
</CardContent>
</Card>
</> </>
); );
} }

View File

@@ -1,4 +1,4 @@
import { getSettings, getCacheStats } from "../../lib/api"; import { getSettings, getCacheStats, getThumbnailStats } from "../../lib/api";
import SettingsPage from "./SettingsPage"; import SettingsPage from "./SettingsPage";
export const dynamic = "force-dynamic"; export const dynamic = "force-dynamic";
@@ -7,7 +7,8 @@ export default async function SettingsPageWrapper() {
const settings = await getSettings().catch(() => ({ const settings = await getSettings().catch(() => ({
image_processing: { format: "webp", quality: 85, filter: "lanczos3", max_width: 2160 }, image_processing: { format: "webp", quality: 85, filter: "lanczos3", max_width: 2160 },
cache: { enabled: true, directory: "/tmp/stripstream-image-cache", max_size_mb: 10000 }, cache: { enabled: true, directory: "/tmp/stripstream-image-cache", max_size_mb: 10000 },
limits: { concurrent_renders: 4, timeout_seconds: 12, rate_limit_per_second: 120 } limits: { concurrent_renders: 4, timeout_seconds: 12, rate_limit_per_second: 120 },
thumbnail: { enabled: true, width: 300, height: 400, quality: 80, format: "webp", directory: "/data/thumbnails" }
})); }));
const cacheStats = await getCacheStats().catch(() => ({ const cacheStats = await getCacheStats().catch(() => ({
@@ -16,5 +17,11 @@ export default async function SettingsPageWrapper() {
directory: "/tmp/stripstream-image-cache" directory: "/tmp/stripstream-image-cache"
})); }));
return <SettingsPage initialSettings={settings} initialCacheStats={cacheStats} />; const thumbnailStats = await getThumbnailStats().catch(() => ({
total_size_mb: 0,
file_count: 0,
directory: "/data/thumbnails"
}));
return <SettingsPage initialSettings={settings} initialCacheStats={cacheStats} initialThumbnailStats={thumbnailStats} />;
} }

View File

@@ -98,7 +98,10 @@ function config() {
return { baseUrl: baseUrl.replace(/\/$/, ""), token }; return { baseUrl: baseUrl.replace(/\/$/, ""), token };
} }
export async function apiFetch<T>(path: string, init?: RequestInit): Promise<T> { export async function apiFetch<T>(
path: string,
init?: RequestInit,
): Promise<T> {
const { baseUrl, token } = config(); const { baseUrl, token } = config();
const headers = new Headers(init?.headers || {}); const headers = new Headers(init?.headers || {});
headers.set("Authorization", `Bearer ${token}`); headers.set("Authorization", `Bearer ${token}`);
@@ -109,7 +112,7 @@ export async function apiFetch<T>(path: string, init?: RequestInit): Promise<T>
const res = await fetch(`${baseUrl}${path}`, { const res = await fetch(`${baseUrl}${path}`, {
...init, ...init,
headers, headers,
cache: "no-store" cache: "no-store",
}); });
if (!res.ok) { if (!res.ok) {
@@ -130,7 +133,7 @@ export async function fetchLibraries() {
export async function createLibrary(name: string, rootPath: string) { export async function createLibrary(name: string, rootPath: string) {
return apiFetch<LibraryDto>("/libraries", { return apiFetch<LibraryDto>("/libraries", {
method: "POST", method: "POST",
body: JSON.stringify({ name, root_path: rootPath }) body: JSON.stringify({ name, root_path: rootPath }),
}); });
} }
@@ -143,12 +146,21 @@ export async function scanLibrary(libraryId: string, full?: boolean) {
if (full) body.full = true; if (full) body.full = true;
return apiFetch<IndexJobDto>(`/libraries/${libraryId}/scan`, { return apiFetch<IndexJobDto>(`/libraries/${libraryId}/scan`, {
method: "POST", method: "POST",
body: JSON.stringify(body) body: JSON.stringify(body),
}); });
} }
export async function updateLibraryMonitoring(libraryId: string, monitorEnabled: boolean, scanMode: string, watcherEnabled?: boolean) { export async function updateLibraryMonitoring(
const body: { monitor_enabled: boolean; scan_mode: string; watcher_enabled?: boolean } = { libraryId: string,
monitorEnabled: boolean,
scanMode: string,
watcherEnabled?: boolean,
) {
const body: {
monitor_enabled: boolean;
scan_mode: string;
watcher_enabled?: boolean;
} = {
monitor_enabled: monitorEnabled, monitor_enabled: monitorEnabled,
scan_mode: scanMode, scan_mode: scanMode,
}; };
@@ -157,7 +169,7 @@ export async function updateLibraryMonitoring(libraryId: string, monitorEnabled:
} }
return apiFetch<LibraryDto>(`/libraries/${libraryId}/monitoring`, { return apiFetch<LibraryDto>(`/libraries/${libraryId}/monitoring`, {
method: "PATCH", method: "PATCH",
body: JSON.stringify(body) body: JSON.stringify(body),
}); });
} }
@@ -171,7 +183,25 @@ export async function rebuildIndex(libraryId?: string, full?: boolean) {
if (full) body.full = true; if (full) body.full = true;
return apiFetch<IndexJobDto>("/index/rebuild", { return apiFetch<IndexJobDto>("/index/rebuild", {
method: "POST", method: "POST",
body: JSON.stringify(body) body: JSON.stringify(body),
});
}
export async function rebuildThumbnails(libraryId?: string) {
const body: { library_id?: string } = {};
if (libraryId) body.library_id = libraryId;
return apiFetch<IndexJobDto>("/index/thumbnails/rebuild", {
method: "POST",
body: JSON.stringify(body),
});
}
export async function regenerateThumbnails(libraryId?: string) {
const body: { library_id?: string } = {};
if (libraryId) body.library_id = libraryId;
return apiFetch<IndexJobDto>("/index/thumbnails/regenerate", {
method: "POST",
body: JSON.stringify(body),
}); });
} }
@@ -191,7 +221,7 @@ export async function listTokens() {
export async function createToken(name: string, scope: string) { export async function createToken(name: string, scope: string) {
return apiFetch<{ token: string }>("/admin/tokens", { return apiFetch<{ token: string }>("/admin/tokens", {
method: "POST", method: "POST",
body: JSON.stringify({ name, scope }) body: JSON.stringify({ name, scope }),
}); });
} }
@@ -199,7 +229,12 @@ export async function revokeToken(id: string) {
return apiFetch<void>(`/admin/tokens/${id}`, { method: "DELETE" }); return apiFetch<void>(`/admin/tokens/${id}`, { method: "DELETE" });
} }
export async function fetchBooks(libraryId?: string, series?: string, cursor?: string, limit: number = 50): Promise<BooksPageDto> { export async function fetchBooks(
libraryId?: string,
series?: string,
cursor?: string,
limit: number = 50,
): Promise<BooksPageDto> {
const params = new URLSearchParams(); const params = new URLSearchParams();
if (libraryId) params.set("library_id", libraryId); if (libraryId) params.set("library_id", libraryId);
if (series) params.set("series", series); if (series) params.set("series", series);
@@ -214,15 +249,25 @@ export type SeriesPageDto = {
next_cursor: string | null; next_cursor: string | null;
}; };
export async function fetchSeries(libraryId: string, cursor?: string, limit: number = 50): Promise<SeriesPageDto> { export async function fetchSeries(
libraryId: string,
cursor?: string,
limit: number = 50,
): Promise<SeriesPageDto> {
const params = new URLSearchParams(); const params = new URLSearchParams();
if (cursor) params.set("cursor", cursor); if (cursor) params.set("cursor", cursor);
params.set("limit", limit.toString()); params.set("limit", limit.toString());
return apiFetch<SeriesPageDto>(`/libraries/${libraryId}/series?${params.toString()}`); return apiFetch<SeriesPageDto>(
`/libraries/${libraryId}/series?${params.toString()}`,
);
} }
export async function searchBooks(query: string, libraryId?: string, limit: number = 20): Promise<SearchResponseDto> { export async function searchBooks(
query: string,
libraryId?: string,
limit: number = 20,
): Promise<SearchResponseDto> {
const params = new URLSearchParams(); const params = new URLSearchParams();
params.set("q", query); params.set("q", query);
if (libraryId) params.set("library_id", libraryId); if (libraryId) params.set("library_id", libraryId);
@@ -232,9 +277,7 @@ export async function searchBooks(query: string, libraryId?: string, limit: numb
} }
export function getBookCoverUrl(bookId: string): string { export function getBookCoverUrl(bookId: string): string {
// Utiliser une route API locale pour éviter les problèmes CORS return `/api/books/${bookId}/thumbnail`;
// Le navigateur ne peut pas accéder à http://api:8080 (hostname Docker interne)
return `/api/books/${bookId}/pages/1?format=webp&width=200`;
} }
export type Settings = { export type Settings = {
@@ -254,6 +297,14 @@ export type Settings = {
timeout_seconds: number; timeout_seconds: number;
rate_limit_per_second: number; rate_limit_per_second: number;
}; };
thumbnail: {
enabled: boolean;
width: number;
height: number;
quality: number;
format: string;
directory: string;
};
}; };
export type CacheStats = { export type CacheStats = {
@@ -267,6 +318,12 @@ export type ClearCacheResponse = {
message: string; message: string;
}; };
export type ThumbnailStats = {
total_size_mb: number;
file_count: number;
directory: string;
};
export async function getSettings() { export async function getSettings() {
return apiFetch<Settings>("/settings"); return apiFetch<Settings>("/settings");
} }
@@ -274,7 +331,7 @@ export async function getSettings() {
export async function updateSetting(key: string, value: unknown) { export async function updateSetting(key: string, value: unknown) {
return apiFetch<unknown>(`/settings/${key}`, { return apiFetch<unknown>(`/settings/${key}`, {
method: "POST", method: "POST",
body: JSON.stringify({ value }) body: JSON.stringify({ value }),
}); });
} }
@@ -283,5 +340,11 @@ export async function getCacheStats() {
} }
export async function clearCache() { export async function clearCache() {
return apiFetch<ClearCacheResponse>("/settings/cache/clear", { method: "POST" }); return apiFetch<ClearCacheResponse>("/settings/cache/clear", {
method: "POST",
});
}
export async function getThumbnailStats() {
return apiFetch<ThumbnailStats>("/settings/thumbnail/stats");
} }

View File

@@ -3,9 +3,9 @@
"version": "0.1.0", "version": "0.1.0",
"private": true, "private": true,
"scripts": { "scripts": {
"dev": "next dev -p 8082", "dev": "next dev -p 7082",
"build": "next build", "build": "next build",
"start": "next start -p 8082" "start": "next start -p 7082"
}, },
"dependencies": { "dependencies": {
"next": "^16.1.6", "next": "^16.1.6",

View File

@@ -11,6 +11,7 @@ chrono.workspace = true
notify = "6.1" notify = "6.1"
parsers = { path = "../../crates/parsers" } parsers = { path = "../../crates/parsers" }
rand.workspace = true rand.workspace = true
rayon.workspace = true
reqwest.workspace = true reqwest.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true

View File

@@ -4,11 +4,12 @@ use chrono::{DateTime, Utc};
use axum::http::StatusCode; use axum::http::StatusCode;
use notify::{Event, RecommendedWatcher, RecursiveMode, Watcher}; use notify::{Event, RecommendedWatcher, RecursiveMode, Watcher};
use parsers::{detect_format, parse_metadata, BookFormat}; use parsers::{detect_format, parse_metadata, BookFormat};
use rayon::prelude::*;
use serde::Serialize; use serde::Serialize;
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use sqlx::{postgres::PgPoolOptions, Row}; use sqlx::{postgres::PgPoolOptions, Row};
use std::{collections::HashMap, path::Path, time::Duration}; use std::{collections::HashMap, path::Path, time::Duration};
use stripstream_core::config::IndexerConfig; use stripstream_core::config::{IndexerConfig, ThumbnailConfig};
use tokio::sync::mpsc; use tokio::sync::mpsc;
use tracing::{error, info, trace, warn}; use tracing::{error, info, trace, warn};
use uuid::Uuid; use uuid::Uuid;
@@ -37,6 +38,9 @@ struct AppState {
pool: sqlx::PgPool, pool: sqlx::PgPool,
meili_url: String, meili_url: String,
meili_master_key: String, meili_master_key: String,
thumbnail_config: ThumbnailConfig,
api_base_url: String,
api_bootstrap_token: String,
} }
#[derive(Serialize)] #[derive(Serialize)]
@@ -65,6 +69,9 @@ async fn main() -> anyhow::Result<()> {
pool, pool,
meili_url: config.meili_url.clone(), meili_url: config.meili_url.clone(),
meili_master_key: config.meili_master_key.clone(), meili_master_key: config.meili_master_key.clone(),
thumbnail_config: config.thumbnail_config.clone(),
api_base_url: config.api_base_url.clone(),
api_bootstrap_token: config.api_bootstrap_token.clone(),
}; };
tokio::spawn(run_worker(state.clone(), config.scan_interval_seconds)); tokio::spawn(run_worker(state.clone(), config.scan_interval_seconds));
@@ -186,7 +193,6 @@ async fn run_file_watcher(state: AppState) -> anyhow::Result<()> {
let pool = state.pool.clone(); let pool = state.pool.clone();
tokio::spawn(async move { tokio::spawn(async move {
let mut watcher: Option<RecommendedWatcher> = None;
let mut watched_libraries: HashMap<Uuid, String> = HashMap::new(); let mut watched_libraries: HashMap<Uuid, String> = HashMap::new();
loop { loop {
@@ -217,17 +223,12 @@ async fn run_file_watcher(state: AppState) -> anyhow::Result<()> {
if needs_restart { if needs_restart {
info!("[WATCHER] Restarting watcher for {} libraries", current_libraries.len()); info!("[WATCHER] Restarting watcher for {} libraries", current_libraries.len());
// Drop old watcher
watcher = None;
watched_libraries.clear();
if !current_libraries.is_empty() { if !current_libraries.is_empty() {
let tx_clone = tx.clone(); let tx_clone = tx.clone();
let libraries_clone = current_libraries.clone(); let libraries_clone = current_libraries.clone();
match setup_watcher(libraries_clone, tx_clone) { match setup_watcher(libraries_clone, tx_clone) {
Ok(new_watcher) => { Ok(_new_watcher) => {
watcher = Some(new_watcher);
watched_libraries = current_libraries; watched_libraries = current_libraries;
info!("[WATCHER] Watching {} libraries", watched_libraries.len()); info!("[WATCHER] Watching {} libraries", watched_libraries.len());
} }
@@ -418,17 +419,54 @@ async fn claim_next_job(pool: &sqlx::PgPool) -> anyhow::Result<Option<(Uuid, Opt
async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<Uuid>) -> anyhow::Result<()> { async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<Uuid>) -> anyhow::Result<()> {
info!("[JOB] Processing {} library={:?}", job_id, target_library_id); info!("[JOB] Processing {} library={:?}", job_id, target_library_id);
// Get job type to check if it's a full rebuild
let job_type: String = sqlx::query_scalar("SELECT type FROM index_jobs WHERE id = $1") let job_type: String = sqlx::query_scalar("SELECT type FROM index_jobs WHERE id = $1")
.bind(job_id) .bind(job_id)
.fetch_one(&state.pool) .fetch_one(&state.pool)
.await?; .await?;
// Thumbnail jobs: hand off to API and wait for completion (same queue as rebuilds)
if job_type == "thumbnail_rebuild" || job_type == "thumbnail_regenerate" {
sqlx::query(
"UPDATE index_jobs SET status = 'generating_thumbnails', started_at = NOW() WHERE id = $1",
)
.bind(job_id)
.execute(&state.pool)
.await?;
let api_base = state.api_base_url.trim_end_matches('/');
let url = format!("{}/index/jobs/{}/thumbnails/checkup", api_base, job_id);
let client = reqwest::Client::new();
let res = client
.post(&url)
.header("Authorization", format!("Bearer {}", state.api_bootstrap_token))
.send()
.await?;
if !res.status().is_success() {
anyhow::bail!("thumbnail checkup API returned {}", res.status());
}
// Poll until job is finished (API updates the same row)
let poll_interval = Duration::from_secs(1);
loop {
tokio::time::sleep(poll_interval).await;
let status: String = sqlx::query_scalar("SELECT status FROM index_jobs WHERE id = $1")
.bind(job_id)
.fetch_one(&state.pool)
.await?;
if status == "success" || status == "failed" {
info!("[JOB] Thumbnail job {} finished with status {}", job_id, status);
return Ok(());
}
}
}
let is_full_rebuild = job_type == "full_rebuild"; let is_full_rebuild = job_type == "full_rebuild";
info!("[JOB] {} type={} full_rebuild={}", job_id, job_type, is_full_rebuild); info!("[JOB] {} type={} full_rebuild={}", job_id, job_type, is_full_rebuild);
// For full rebuilds, delete existing data first // For full rebuilds, delete existing data first
if is_full_rebuild { if is_full_rebuild {
info!("[JOB] Full rebuild: deleting existing data"); info!("[JOB] Full rebuild: deleting existing data");
if let Some(library_id) = target_library_id { if let Some(library_id) = target_library_id {
// Delete books and files for specific library // Delete books and files for specific library
sqlx::query("DELETE FROM book_files WHERE book_id IN (SELECT id FROM books WHERE library_id = $1)") sqlx::query("DELETE FROM book_files WHERE book_id IN (SELECT id FROM books WHERE library_id = $1)")
@@ -459,17 +497,20 @@ async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<U
.await? .await?
}; };
// First pass: count total files for progress estimation // First pass: count total files for progress estimation (parallel)
let mut total_files = 0usize; let library_paths: Vec<String> = libraries.iter()
for library in &libraries { .map(|library| remap_libraries_path(&library.get::<String, _>("root_path")))
let root_path: String = library.get("root_path"); .collect();
let root_path = remap_libraries_path(&root_path);
for entry in WalkDir::new(&root_path).into_iter().filter_map(Result::ok) { let total_files: usize = library_paths.par_iter()
if entry.file_type().is_file() && detect_format(entry.path()).is_some() { .map(|root_path| {
total_files += 1; WalkDir::new(root_path)
} .into_iter()
} .filter_map(Result::ok)
} .filter(|entry| entry.file_type().is_file() && detect_format(entry.path()).is_some())
.count()
})
.sum();
info!("[JOB] Found {} libraries, {} total files to index", libraries.len(), total_files); info!("[JOB] Found {} libraries, {} total files to index", libraries.len(), total_files);
@@ -505,13 +546,34 @@ async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<U
sync_meili(&state.pool, &state.meili_url, &state.meili_master_key).await?; sync_meili(&state.pool, &state.meili_url, &state.meili_master_key).await?;
sqlx::query("UPDATE index_jobs SET status = 'success', finished_at = NOW(), stats_json = $2, current_file = NULL, progress_percent = 100, processed_files = $3 WHERE id = $1") // Hand off to API for thumbnail checkup (API will set status = 'success' when done)
sqlx::query(
"UPDATE index_jobs SET status = 'generating_thumbnails', stats_json = $2, current_file = NULL, processed_files = $3 WHERE id = $1",
)
.bind(job_id) .bind(job_id)
.bind(serde_json::to_value(&stats)?) .bind(serde_json::to_value(&stats)?)
.bind(total_processed_count) .bind(total_processed_count)
.execute(&state.pool) .execute(&state.pool)
.await?; .await?;
let api_base = state.api_base_url.trim_end_matches('/');
let url = format!("{}/index/jobs/{}/thumbnails/checkup", api_base, job_id);
let client = reqwest::Client::new();
let res = client
.post(&url)
.header("Authorization", format!("Bearer {}", state.api_bootstrap_token))
.send()
.await;
if let Err(e) = res {
warn!("[JOB] Failed to trigger thumbnail checkup: {} — API will not generate thumbnails for this job", e);
} else if let Ok(r) = res {
if !r.status().is_success() {
warn!("[JOB] Thumbnail checkup returned {} — API may not generate thumbnails", r.status());
} else {
info!("[JOB] Thumbnail checkup started (job {}), API will complete the job", job_id);
}
}
Ok(()) Ok(())
} }
@@ -550,6 +612,7 @@ struct BookInsert {
series: Option<String>, series: Option<String>,
volume: Option<i32>, volume: Option<i32>,
page_count: Option<i32>, page_count: Option<i32>,
thumbnail_path: Option<String>,
} }
struct FileInsert { struct FileInsert {
@@ -667,12 +730,13 @@ async fn flush_all_batches(
let series: Vec<Option<String>> = books_insert.iter().map(|b| b.series.clone()).collect(); let series: Vec<Option<String>> = books_insert.iter().map(|b| b.series.clone()).collect();
let volumes: Vec<Option<i32>> = books_insert.iter().map(|b| b.volume).collect(); let volumes: Vec<Option<i32>> = books_insert.iter().map(|b| b.volume).collect();
let page_counts: Vec<Option<i32>> = books_insert.iter().map(|b| b.page_count).collect(); let page_counts: Vec<Option<i32>> = books_insert.iter().map(|b| b.page_count).collect();
let thumbnail_paths: Vec<Option<String>> = books_insert.iter().map(|b| b.thumbnail_path.clone()).collect();
sqlx::query( sqlx::query(
r#" r#"
INSERT INTO books (id, library_id, kind, title, series, volume, page_count) INSERT INTO books (id, library_id, kind, title, series, volume, page_count, thumbnail_path)
SELECT * FROM UNNEST($1::uuid[], $2::uuid[], $3::text[], $4::text[], $5::text[], $6::int[], $7::int[]) SELECT * FROM UNNEST($1::uuid[], $2::uuid[], $3::text[], $4::text[], $5::text[], $6::int[], $7::int[], $8::text[])
AS t(id, library_id, kind, title, series, volume, page_count) AS t(id, library_id, kind, title, series, volume, page_count, thumbnail_path)
"# "#
) )
.bind(&book_ids) .bind(&book_ids)
@@ -682,6 +746,7 @@ async fn flush_all_batches(
.bind(&series) .bind(&series)
.bind(&volumes) .bind(&volumes)
.bind(&page_counts) .bind(&page_counts)
.bind(&thumbnail_paths)
.execute(&mut *tx) .execute(&mut *tx)
.await?; .await?;
@@ -948,11 +1013,12 @@ async fn scan_library(
continue; continue;
} }
// New file // New file (thumbnails generated by API after job handoff)
info!("[PROCESS] Inserting new file: {}", file_name); info!("[PROCESS] Inserting new file: {}", file_name);
let book_id = Uuid::new_v4();
match parse_metadata(path, format, root) { match parse_metadata(path, format, root) {
Ok(parsed) => { Ok(parsed) => {
let book_id = Uuid::new_v4();
let file_id = Uuid::new_v4(); let file_id = Uuid::new_v4();
books_to_insert.push(BookInsert { books_to_insert.push(BookInsert {
@@ -963,6 +1029,7 @@ async fn scan_library(
series: parsed.series, series: parsed.series,
volume: parsed.volume, volume: parsed.volume,
page_count: parsed.page_count, page_count: parsed.page_count,
thumbnail_path: None,
}); });
files_to_insert.push(FileInsert { files_to_insert.push(FileInsert {
@@ -993,6 +1060,7 @@ async fn scan_library(
series: None, series: None,
volume: None, volume: None,
page_count: None, page_count: None,
thumbnail_path: None,
}); });
files_to_insert.push(FileInsert { files_to_insert.push(FileInsert {

View File

@@ -12,10 +12,12 @@ pub struct ApiConfig {
impl ApiConfig { impl ApiConfig {
pub fn from_env() -> Result<Self> { pub fn from_env() -> Result<Self> {
Ok(Self { Ok(Self {
listen_addr: std::env::var("API_LISTEN_ADDR").unwrap_or_else(|_| "0.0.0.0:8080".to_string()), listen_addr: std::env::var("API_LISTEN_ADDR")
.unwrap_or_else(|_| "0.0.0.0:8080".to_string()),
database_url: std::env::var("DATABASE_URL").context("DATABASE_URL is required")?, database_url: std::env::var("DATABASE_URL").context("DATABASE_URL is required")?,
meili_url: std::env::var("MEILI_URL").context("MEILI_URL is required")?, meili_url: std::env::var("MEILI_URL").context("MEILI_URL is required")?,
meili_master_key: std::env::var("MEILI_MASTER_KEY").context("MEILI_MASTER_KEY is required")?, meili_master_key: std::env::var("MEILI_MASTER_KEY")
.context("MEILI_MASTER_KEY is required")?,
api_bootstrap_token: std::env::var("API_BOOTSTRAP_TOKEN") api_bootstrap_token: std::env::var("API_BOOTSTRAP_TOKEN")
.context("API_BOOTSTRAP_TOKEN is required")?, .context("API_BOOTSTRAP_TOKEN is required")?,
}) })
@@ -29,20 +31,76 @@ pub struct IndexerConfig {
pub meili_url: String, pub meili_url: String,
pub meili_master_key: String, pub meili_master_key: String,
pub scan_interval_seconds: u64, pub scan_interval_seconds: u64,
pub thumbnail_config: ThumbnailConfig,
/// API base URL for thumbnail checkup at end of build (e.g. http://api:8080)
pub api_base_url: String,
/// Token to call API (e.g. API_BOOTSTRAP_TOKEN)
pub api_bootstrap_token: String,
}
#[derive(Debug, Clone)]
pub struct ThumbnailConfig {
pub enabled: bool,
pub width: u32,
pub height: u32,
pub quality: u8,
pub format: String,
pub directory: String,
}
impl Default for ThumbnailConfig {
fn default() -> Self {
Self {
enabled: true,
width: 300,
height: 400,
quality: 80,
format: "webp".to_string(),
directory: "/data/thumbnails".to_string(),
}
}
} }
impl IndexerConfig { impl IndexerConfig {
pub fn from_env() -> Result<Self> { pub fn from_env() -> Result<Self> {
let thumbnail_config = ThumbnailConfig {
enabled: std::env::var("THUMBNAIL_ENABLED")
.ok()
.and_then(|v| v.parse::<bool>().ok())
.unwrap_or(true),
width: std::env::var("THUMBNAIL_WIDTH")
.ok()
.and_then(|v| v.parse::<u32>().ok())
.unwrap_or(300),
height: std::env::var("THUMBNAIL_HEIGHT")
.ok()
.and_then(|v| v.parse::<u32>().ok())
.unwrap_or(400),
quality: std::env::var("THUMBNAIL_QUALITY")
.ok()
.and_then(|v| v.parse::<u8>().ok())
.unwrap_or(80),
format: std::env::var("THUMBNAIL_FORMAT").unwrap_or_else(|_| "webp".to_string()),
directory: std::env::var("THUMBNAIL_DIRECTORY")
.unwrap_or_else(|_| "/data/thumbnails".to_string()),
};
Ok(Self { Ok(Self {
listen_addr: std::env::var("INDEXER_LISTEN_ADDR") listen_addr: std::env::var("INDEXER_LISTEN_ADDR")
.unwrap_or_else(|_| "0.0.0.0:8081".to_string()), .unwrap_or_else(|_| "0.0.0.0:8081".to_string()),
database_url: std::env::var("DATABASE_URL").context("DATABASE_URL is required")?, database_url: std::env::var("DATABASE_URL").context("DATABASE_URL is required")?,
meili_url: std::env::var("MEILI_URL").context("MEILI_URL is required")?, meili_url: std::env::var("MEILI_URL").context("MEILI_URL is required")?,
meili_master_key: std::env::var("MEILI_MASTER_KEY").context("MEILI_MASTER_KEY is required")?, meili_master_key: std::env::var("MEILI_MASTER_KEY")
.context("MEILI_MASTER_KEY is required")?,
scan_interval_seconds: std::env::var("INDEXER_SCAN_INTERVAL_SECONDS") scan_interval_seconds: std::env::var("INDEXER_SCAN_INTERVAL_SECONDS")
.ok() .ok()
.and_then(|v| v.parse::<u64>().ok()) .and_then(|v| v.parse::<u64>().ok())
.unwrap_or(5), .unwrap_or(5),
thumbnail_config,
api_base_url: std::env::var("API_BASE_URL")
.unwrap_or_else(|_| "http://api:8080".to_string()),
api_bootstrap_token: std::env::var("API_BOOTSTRAP_TOKEN")
.context("API_BOOTSTRAP_TOKEN is required for thumbnail checkup")?,
}) })
} }
} }
@@ -59,8 +117,10 @@ impl AdminUiConfig {
Ok(Self { Ok(Self {
listen_addr: std::env::var("ADMIN_UI_LISTEN_ADDR") listen_addr: std::env::var("ADMIN_UI_LISTEN_ADDR")
.unwrap_or_else(|_| "0.0.0.0:8082".to_string()), .unwrap_or_else(|_| "0.0.0.0:8082".to_string()),
api_base_url: std::env::var("API_BASE_URL").unwrap_or_else(|_| "http://api:8080".to_string()), api_base_url: std::env::var("API_BASE_URL")
api_token: std::env::var("API_BOOTSTRAP_TOKEN").context("API_BOOTSTRAP_TOKEN is required")?, .unwrap_or_else(|_| "http://api:8080".to_string()),
api_token: std::env::var("API_BOOTSTRAP_TOKEN")
.context("API_BOOTSTRAP_TOKEN is required")?,
}) })
} }
} }

View File

@@ -8,4 +8,6 @@ license.workspace = true
anyhow.workspace = true anyhow.workspace = true
lopdf = "0.35" lopdf = "0.35"
regex = "1" regex = "1"
uuid.workspace = true
walkdir.workspace = true
zip = { version = "2.2", default-features = false, features = ["deflate"] } zip = { version = "2.2", default-features = false, features = ["deflate"] }

View File

@@ -1,5 +1,9 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use std::io::Read;
use std::path::Path; use std::path::Path;
use std::process::Command;
use uuid::Uuid;
use walkdir::WalkDir;
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BookFormat { pub enum BookFormat {
@@ -240,3 +244,105 @@ fn is_image_name(name: &str) -> bool {
|| name.ends_with(".webp") || name.ends_with(".webp")
|| name.ends_with(".avif") || name.ends_with(".avif")
} }
pub fn extract_first_page(path: &Path, format: BookFormat) -> Result<Vec<u8>> {
match format {
BookFormat::Cbz => extract_cbz_first_page(path),
BookFormat::Cbr => extract_cbr_first_page(path),
BookFormat::Pdf => extract_pdf_first_page(path),
}
}
fn extract_cbz_first_page(path: &Path) -> Result<Vec<u8>> {
let file = std::fs::File::open(path)
.with_context(|| format!("cannot open cbz: {}", path.display()))?;
let mut archive = zip::ZipArchive::new(file).context("invalid cbz archive")?;
let mut image_names: Vec<String> = Vec::new();
for i in 0..archive.len() {
let entry = archive.by_index(i).context("cannot read cbz entry")?;
let name = entry.name().to_ascii_lowercase();
if is_image_name(&name) {
image_names.push(entry.name().to_string());
}
}
image_names.sort();
let first_image = image_names.first().context("no images found in cbz")?;
let mut entry = archive
.by_name(first_image)
.context("cannot read first image")?;
let mut buf = Vec::new();
entry.read_to_end(&mut buf)?;
Ok(buf)
}
fn extract_cbr_first_page(path: &Path) -> Result<Vec<u8>> {
let tmp_dir = std::env::temp_dir().join(format!("stripstream-cbr-thumb-{}", Uuid::new_v4()));
std::fs::create_dir_all(&tmp_dir).context("cannot create temp dir")?;
// Use env command like the API does
let output = std::process::Command::new("env")
.args(["LC_ALL=en_US.UTF-8", "LANG=en_US.UTF-8", "unar", "-o"])
.arg(&tmp_dir)
.arg(path)
.output()
.context("unar failed")?;
if !output.status.success() {
let _ = std::fs::remove_dir_all(&tmp_dir);
return Err(anyhow::anyhow!(
"unar extract failed: {:?}",
String::from_utf8_lossy(&output.stderr)
));
}
// Use WalkDir for recursive search (CBR can have subdirectories)
let mut image_files: Vec<_> = WalkDir::new(&tmp_dir)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| {
let name = e.file_name().to_string_lossy().to_lowercase();
is_image_name(&name)
})
.collect();
image_files.sort_by_key(|e| e.path().to_string_lossy().to_lowercase());
let first_image = image_files.first().context("no images found in cbr")?;
let data = std::fs::read(first_image.path())?;
let _ = std::fs::remove_dir_all(&tmp_dir);
Ok(data)
}
fn extract_pdf_first_page(path: &Path) -> Result<Vec<u8>> {
let tmp_dir = std::env::temp_dir().join(format!("stripstream-pdf-thumb-{}", Uuid::new_v4()));
std::fs::create_dir_all(&tmp_dir)?;
let output_prefix = tmp_dir.join("page");
let output = Command::new("pdftoppm")
.args([
"-f",
"1",
"-singlefile",
"-png",
"-scale-to",
"800",
path.to_str().unwrap(),
output_prefix.to_str().unwrap(),
])
.output()
.context("pdftoppm failed")?;
if !output.status.success() {
let _ = std::fs::remove_dir_all(&tmp_dir);
return Err(anyhow::anyhow!("pdftoppm failed"));
}
let image_path = output_prefix.with_extension("png");
let data = std::fs::read(&image_path)?;
let _ = std::fs::remove_dir_all(&tmp_dir);
Ok(data)
}

View File

@@ -57,6 +57,7 @@ services:
- "7080:8080" - "7080:8080"
volumes: volumes:
- ${LIBRARIES_HOST_PATH:-../libraries}:/libraries - ${LIBRARIES_HOST_PATH:-../libraries}:/libraries
- ${THUMBNAILS_HOST_PATH:-../data/thumbnails}:/data/thumbnails
depends_on: depends_on:
migrate: migrate:
condition: service_completed_successfully condition: service_completed_successfully
@@ -80,6 +81,7 @@ services:
- "7081:8081" - "7081:8081"
volumes: volumes:
- ${LIBRARIES_HOST_PATH:-../libraries}:/libraries - ${LIBRARIES_HOST_PATH:-../libraries}:/libraries
- ${THUMBNAILS_HOST_PATH:-../data/thumbnails}:/data/thumbnails
depends_on: depends_on:
migrate: migrate:
condition: service_completed_successfully condition: service_completed_successfully

View File

@@ -0,0 +1,5 @@
ALTER TABLE books ADD COLUMN IF NOT EXISTS thumbnail_path TEXT;
INSERT INTO app_settings (key, value) VALUES
('thumbnail', '{"enabled": true, "width": 300, "height": 400, "quality": 80, "format": "webp", "directory": "/data/thumbnails"}')
ON CONFLICT (key) DO UPDATE SET value = '{"enabled": true, "width": 300, "height": 400, "quality": 80, "format": "webp", "directory": "/data/thumbnails"}'::jsonb;

View File

@@ -0,0 +1,6 @@
-- Migration: Add status 'generating_thumbnails' for thumbnail phase after indexing
ALTER TABLE index_jobs
DROP CONSTRAINT IF EXISTS index_jobs_status_check,
ADD CONSTRAINT index_jobs_status_check
CHECK (status IN ('pending', 'running', 'generating_thumbnails', 'success', 'failed'));

View File

@@ -0,0 +1,6 @@
-- Migration: Add job type 'thumbnail_rebuild' for manual thumbnail generation
ALTER TABLE index_jobs
DROP CONSTRAINT IF EXISTS index_jobs_type_check,
ADD CONSTRAINT index_jobs_type_check
CHECK (type IN ('scan', 'rebuild', 'full_rebuild', 'thumbnail_rebuild', 'thumbnail_regenerate'));