Compare commits
6 Commits
f721b248f3
...
feat/thumb
| Author | SHA1 | Date | |
|---|---|---|---|
| e64848a216 | |||
| c93a7d5d29 | |||
| 360d6e85de | |||
| 162b4712e7 | |||
| 217919fa77 | |||
| ee0235b824 |
@@ -48,6 +48,10 @@ LIBRARIES_ROOT_PATH=/libraries
|
||||
# You can change this to an absolute path on your machine
|
||||
LIBRARIES_HOST_PATH=../libraries
|
||||
|
||||
# Path to thumbnails directory on host machine (for Docker volume mount)
|
||||
# Default: ../data/thumbnails (relative to infra/docker-compose.yml)
|
||||
THUMBNAILS_HOST_PATH=../data/thumbnails
|
||||
|
||||
# =============================================================================
|
||||
# Port Configuration
|
||||
# =============================================================================
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -5,3 +5,4 @@ tmp/
|
||||
libraries/
|
||||
node_modules/
|
||||
.next/
|
||||
data/thumbnails
|
||||
|
||||
301
AGENTS.md
Normal file
301
AGENTS.md
Normal file
@@ -0,0 +1,301 @@
|
||||
# AGENTS.md - Agent Coding Guidelines for Stripstream Librarian
|
||||
|
||||
This file provides guidelines for agentic coding agents operating in this repository.
|
||||
|
||||
---
|
||||
|
||||
## 1. Build, Lint, and Test Commands
|
||||
|
||||
### Build Commands
|
||||
|
||||
```bash
|
||||
# Build debug version (fastest for development)
|
||||
cargo build
|
||||
|
||||
# Build release version (optimized)
|
||||
cargo build --release
|
||||
|
||||
# Build specific crate
|
||||
cargo build -p api
|
||||
cargo build -p indexer
|
||||
|
||||
# Watch mode for development (requires cargo-watch)
|
||||
cargo watch -x build
|
||||
```
|
||||
|
||||
### Lint & Format Commands
|
||||
|
||||
```bash
|
||||
# Run clippy lints
|
||||
cargo clippy
|
||||
|
||||
# Fix auto-fixable clippy warnings
|
||||
cargo clippy --fix
|
||||
|
||||
# Format code
|
||||
cargo fmt
|
||||
|
||||
# Check formatting without making changes
|
||||
cargo fmt -- --check
|
||||
```
|
||||
|
||||
### Test Commands
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
cargo test
|
||||
|
||||
# Run tests for specific crate
|
||||
cargo test -p api
|
||||
cargo test -p indexer
|
||||
cargo test -p parsers
|
||||
|
||||
# Run a single test by name
|
||||
cargo test test_name_here
|
||||
|
||||
# Run tests with output display
|
||||
cargo test -- --nocapture
|
||||
|
||||
# Run doc tests
|
||||
cargo test --doc
|
||||
```
|
||||
|
||||
### Database Migrations
|
||||
|
||||
```bash
|
||||
# Run migrations manually (via sqlx CLI)
|
||||
# Ensure DATABASE_URL is set, then:
|
||||
sqlx migrate run
|
||||
|
||||
# Create new migration
|
||||
sqlx migrate add -r migration_name
|
||||
```
|
||||
|
||||
### Docker Development
|
||||
|
||||
```bash
|
||||
# Start infrastructure only
|
||||
cd infra && docker compose up -d postgres meilisearch
|
||||
|
||||
# Start full stack
|
||||
cd infra && docker compose up -d
|
||||
|
||||
# View logs
|
||||
docker compose logs -f api
|
||||
docker compose logs -f indexer
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. Code Style Guidelines
|
||||
|
||||
### General Principles
|
||||
|
||||
- **Conciseness**: Keep responses short and direct. Avoid unnecessary preamble or explanation.
|
||||
- **Idiomatic Rust**: Follow Rust best practices and ecosystem conventions.
|
||||
- **Error Handling**: Use `anyhow::Result<T>` for application code, `std::io::Result<T>` for simple file operations.
|
||||
- **Async**: Use `tokio` for async runtime. Prefer `#[tokio::main]` over manual runtime.
|
||||
|
||||
### Naming Conventions
|
||||
|
||||
| Element | Convention | Example |
|
||||
|---------|------------|---------|
|
||||
| Variables | snake_case | `let book_id = ...` |
|
||||
| Functions | snake_case | `fn get_book(...)` |
|
||||
| Structs/Enums | PascalCase | `struct BookItem` |
|
||||
| Modules | snake_case | `mod books;` |
|
||||
| Constants | SCREAMING_SNAKE_CASE | `const BATCH_SIZE: usize = 100;` |
|
||||
| Types | PascalCase | `type MyResult<T> = Result<T, Error>;` |
|
||||
|
||||
### Imports
|
||||
|
||||
- **Absolute imports** for workspace crates: `use parsers::{detect_format, parse_metadata};`
|
||||
- **Standard library** imports: `use std::path::Path;`
|
||||
- **External crates**: `use sqlx::{postgres::PgPoolOptions, Row};`
|
||||
- **Group by**: std → external → workspace → local (with blank lines between)
|
||||
|
||||
```rust
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Row;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::error::ApiError;
|
||||
use crate::AppState;
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
- Use `anyhow` for application-level error handling with context
|
||||
- Use `with_context()` for adding context to errors
|
||||
- Return `Result<T, ApiError>` in API handlers
|
||||
- Use `?` operator instead of manual match/unwrap where possible
|
||||
|
||||
```rust
|
||||
// Good
|
||||
fn process_book(path: &Path) -> anyhow::Result<Book> {
|
||||
let file = std::fs::File::open(path)
|
||||
.with_context(|| format!("cannot open file: {}", path.display()))?;
|
||||
// ...
|
||||
}
|
||||
|
||||
// Good - API error handling
|
||||
async fn get_book(State(state): State<AppState>, Path(id): Path<Uuid>)
|
||||
-> Result<Json<Book>, ApiError> {
|
||||
let row = sqlx::query("SELECT * FROM books WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await
|
||||
.map_err(ApiError::internal)?;
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
### Database (sqlx)
|
||||
|
||||
- Use **raw SQL queries** with `sqlx::query()` and `sqlx::query_scalar()`
|
||||
- Prefer **batch operations** using `UNNEST` for bulk inserts/updates
|
||||
- Always use **parameterized queries** (`$1`, `$2`, etc.) - never string interpolation
|
||||
- Follow existing patterns for transactions:
|
||||
|
||||
```rust
|
||||
let mut tx = pool.begin().await?;
|
||||
// ... queries ...
|
||||
tx.commit().await?;
|
||||
```
|
||||
|
||||
### Async/Tokio
|
||||
|
||||
- Use `tokio::spawn` for background tasks
|
||||
- Use `spawn_blocking` for CPU-bound work (image processing, file I/O)
|
||||
- Keep async handlers non-blocking
|
||||
- Use `tokio::time::timeout` for operations with timeouts
|
||||
|
||||
```rust
|
||||
let bytes = tokio::time::timeout(
|
||||
Duration::from_secs(60),
|
||||
tokio::task::spawn_blocking(move || {
|
||||
render_page(&abs_path_clone, n)
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.map_err(|_| ApiError::internal("timeout"))?
|
||||
.map_err(ApiError::internal)?;
|
||||
```
|
||||
|
||||
### Structs and Serialization
|
||||
|
||||
- Use `#[derive(Serialize, Deserialize, ToSchema)]` for API types
|
||||
- Add `utoipa` schemas for OpenAPI documentation
|
||||
- Use `Option<T>` for nullable fields
|
||||
- Document public structs briefly
|
||||
|
||||
```rust
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct BookItem {
|
||||
#[schema(value_type = String)]
|
||||
pub id: Uuid,
|
||||
pub title: String,
|
||||
pub author: Option<String>,
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
### Performance Considerations
|
||||
|
||||
- Use **batch operations** for database inserts/updates (100 items recommended)
|
||||
- Use **parallel iterators** (`rayon::par_iter()`) for CPU-intensive scans
|
||||
- Implement **caching** for expensive operations (see `pages.rs` for disk/memory cache examples)
|
||||
- Use **streaming** for large data where applicable
|
||||
|
||||
### Testing
|
||||
|
||||
- Currently there are no test files - consider adding unit tests for:
|
||||
- Parser functions
|
||||
- Thumbnail generation
|
||||
- Configuration parsing
|
||||
- Use `#[cfg(test)]` modules for integration tests
|
||||
|
||||
---
|
||||
|
||||
## 3. Project Structure
|
||||
|
||||
```
|
||||
stripstream-librarian/
|
||||
├── apps/
|
||||
│ ├── api/ # REST API (axum)
|
||||
│ │ └── src/
|
||||
│ │ ├── main.rs
|
||||
│ │ ├── books.rs
|
||||
│ │ ├── pages.rs
|
||||
│ │ └── ...
|
||||
│ ├── indexer/ # Background indexing service
|
||||
│ │ └── src/
|
||||
│ │ └── main.rs
|
||||
│ └── backoffice/ # Next.js admin UI
|
||||
├── crates/
|
||||
│ ├── core/ # Shared config
|
||||
│ │ └── src/config.rs
|
||||
│ └── parsers/ # Book parsing (CBZ, CBR, PDF)
|
||||
├── infra/
|
||||
│ ├── migrations/ # SQL migrations
|
||||
│ └── docker-compose.yml
|
||||
└── libraries/ # Book storage (mounted volume)
|
||||
```
|
||||
|
||||
### Key Files
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `apps/api/src/books.rs` | Book CRUD endpoints |
|
||||
| `apps/api/src/pages.rs` | Page rendering & caching |
|
||||
| `apps/indexer/src/main.rs` | Indexing logic, batch processing |
|
||||
| `crates/parsers/src/lib.rs` | Format detection, metadata parsing |
|
||||
| `crates/core/src/config.rs` | Configuration from environment |
|
||||
| `infra/migrations/*.sql` | Database schema |
|
||||
|
||||
---
|
||||
|
||||
## 4. Common Patterns
|
||||
|
||||
### Configuration from Environment
|
||||
|
||||
```rust
|
||||
// In crates/core/src/config.rs
|
||||
impl IndexerConfig {
|
||||
pub fn from_env() -> Result<Self> {
|
||||
Ok(Self {
|
||||
listen_addr: std::env::var("INDEXER_LISTEN_ADDR")
|
||||
.unwrap_or_else(|_| "0.0.0.0:8081".to_string()),
|
||||
database_url: std::env::var("DATABASE_URL")
|
||||
.context("DATABASE_URL is required")?,
|
||||
// ...
|
||||
})
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Path Remapping
|
||||
|
||||
```rust
|
||||
fn remap_libraries_path(path: &str) -> String {
|
||||
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
||||
if path.starts_with("/libraries/") {
|
||||
return path.replacen("/libraries", &root, 1);
|
||||
}
|
||||
}
|
||||
path.to_string()
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5. Important Notes
|
||||
|
||||
- **Workspace**: This is a Cargo workspace. Always specify the package when building specific apps.
|
||||
- **Dependencies**: External crates are defined in workspace `Cargo.toml`, not individual `Cargo.toml`.
|
||||
- **Database**: PostgreSQL is required. Run migrations before starting services.
|
||||
- **External Tools**: The indexer relies on `unar` (for CBR) and `pdftoppm` (for PDF) being installed on the system.
|
||||
4
Cargo.lock
generated
4
Cargo.lock
generated
@@ -78,6 +78,7 @@ dependencies = [
|
||||
"utoipa",
|
||||
"utoipa-swagger-ui",
|
||||
"uuid",
|
||||
"walkdir",
|
||||
"webp",
|
||||
"zip 2.4.2",
|
||||
]
|
||||
@@ -1148,6 +1149,7 @@ dependencies = [
|
||||
"notify",
|
||||
"parsers",
|
||||
"rand 0.8.5",
|
||||
"rayon",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -1623,6 +1625,8 @@ dependencies = [
|
||||
"anyhow",
|
||||
"lopdf",
|
||||
"regex",
|
||||
"uuid",
|
||||
"walkdir",
|
||||
"zip 2.4.2",
|
||||
]
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ base64 = "0.22"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
image = { version = "0.25", default-features = false, features = ["jpeg", "png", "webp"] }
|
||||
lru = "0.12"
|
||||
rayon = "1.10"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
||||
rand = "0.8"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
141
PLAN_THUMBNAILS.md
Normal file
141
PLAN_THUMBNAILS.md
Normal file
@@ -0,0 +1,141 @@
|
||||
# Plan: Génération des vignettes à l'index
|
||||
|
||||
## 1. Base de données
|
||||
|
||||
### Migration SQL (`0010_add_thumbnails.sql`)
|
||||
- [x] Ajouter `thumbnail_path TEXT` à la table `books` (nullable)
|
||||
- [x] Ajouter settings pour thumbnails dans `app_settings`:
|
||||
```json
|
||||
{
|
||||
"thumbnail": {
|
||||
"enabled": true,
|
||||
"width": 300,
|
||||
"height": 400,
|
||||
"quality": 80,
|
||||
"format": "webp"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. Configuration
|
||||
|
||||
### `crates/core/src/config.rs`
|
||||
- [x] Ajouter `ThumbnailConfig` struct
|
||||
- [x] Ajouter champs dans `IndexerConfig`:
|
||||
- `thumbnail_width: u32` (défaut: 300)
|
||||
- `thumbnail_height: u32` (défaut: 400)
|
||||
- `thumbnail_quality: u8` (défaut: 80)
|
||||
- `thumbnail_dir: String` (défaut: `/data/thumbnails`)
|
||||
- [x] Ajouter getter depuis env vars
|
||||
|
||||
---
|
||||
|
||||
## 3. Indexer - Extraction de la 1ère page
|
||||
|
||||
### Fonction à créer dans `crates/parsers/src/lib.rs`
|
||||
- [x] `extract_first_page(path: &Path, format: BookFormat) -> Result<Vec<u8>>`
|
||||
- Réutiliser logique de `pages.rs:extract_cbz_page`
|
||||
- Réutiliser logique de `pages.rs:extract_cbr_page`
|
||||
- Réutiliser logique de `pages.rs:render_pdf_page`
|
||||
|
||||
### Fonction de génération vignette dans `apps/indexer/src/main.rs`
|
||||
- [x] `generate_thumbnail(image_bytes: &[u8], config: &ThumbnailConfig) -> Result<Vec<u8>>`
|
||||
- Load image avec `image::load_from_memory`
|
||||
- Resize avec `image::resize` (ratio kept)
|
||||
- Encode en WebP avec `webp::Encoder`
|
||||
|
||||
- [x] `save_thumbnail(book_id: Uuid, thumbnail_bytes: &[u8], config: &ThumbnailConfig) -> Result<String>`
|
||||
|
||||
### Intégration dans `scan_library`
|
||||
- [x] Après parsing metadata, extraire 1ère page
|
||||
- [x] Générer vignette et sauvegarder
|
||||
- [x] Stocker chemin en DB (via batch insert)
|
||||
|
||||
---
|
||||
|
||||
## 4. Indexer - WalkDir parallèle
|
||||
|
||||
### Remplacement de `WalkDir` séquentiel
|
||||
- [x] Utiliser `rayon` pour paralléliser le scan:
|
||||
```rust
|
||||
let total_files: usize = library_paths.par_iter()
|
||||
.map(|root_path| { ... })
|
||||
.sum();
|
||||
```
|
||||
- [x] Ajouter `rayon = "1.10"` dans workspace dependencies
|
||||
|
||||
---
|
||||
|
||||
## 5. API - Service des vignettes
|
||||
|
||||
### Mise à jour models dans `apps/api/src/books.rs`
|
||||
- [x] Ajouter `thumbnail_url: Option<String>` à `BookItem`
|
||||
- [x] Ajouter `thumbnail_url: Option<String>` à `BookDetails`
|
||||
- [x] Mise à jour des requêtes SQL pour récupérer `thumbnail_path`
|
||||
|
||||
### Nouvelle route dans `apps/api/src/main.rs`
|
||||
- [x] Route `/books/:id/thumbnail` (GET)
|
||||
- Retourne fichier statique depuis `thumbnail_path`
|
||||
- Content-Type: image/webp
|
||||
- Cache-Control: public, max-age=31536000
|
||||
|
||||
### Suppression cache 1ère page (optionnel)
|
||||
- [ ] Optionnel: simplifier `pages.rs` car thumbnail pré-générée
|
||||
- [ ] Garder render pour pages > 1
|
||||
|
||||
### Adapter backoffice
|
||||
|
||||
La recupération des thumbnail est fait par une route page/1.
|
||||
- [x] Passer par la nouvelle route avec une route clean /thumbnail pour chaque cover.
|
||||
|
||||
### refacto code entre api et indexer
|
||||
|
||||
En fait l'indexer pourrait appeler l'api pour qu'il fasse les vignettes et c'est l'api qui est responsable des images et des lectures ebooks. Je préfère que chaque domaine soit bien respecté. A la fin d'une build, on appelle l'api pour faire le checkup des thumbnails.
|
||||
Il faudra que coté backoffice on voit partout ou on peut voir le traitement live des jobs, une phase ou on voit en sse le traitement des thumbnails. Coté api, si on a pas de thumbnail on passe par le code actuel de pages.
|
||||
|
||||
- [x] Migration `0010_index_job_thumbnails_phase.sql`: status `generating_thumbnails` dans index_jobs
|
||||
- [x] API: `get_thumbnail` fallback sur page 1 si pas de thumbnail_path (via `pages::render_book_page_1`)
|
||||
- [x] API: module `thumbnails.rs`, POST `/index/jobs/:id/thumbnails/checkup` (admin), lance la génération en tâche de fond et met à jour la job
|
||||
- [x] Indexer: plus de génération de thumbnails; en fin de build: status = `generating_thumbnails`, puis appel API checkup; config `api_base_url` + `api_bootstrap_token` (core)
|
||||
- [x] Backoffice: StatusBadge "Thumbnails" pour `generating_thumbnails`; JobProgress/JobRow/JobsIndicator/page job détail: phase thumbnails visible en SSE (X/Y thumbnails, barre de progression)
|
||||
|
||||
---
|
||||
|
||||
## 6. Settings API
|
||||
|
||||
### Endpoint settings existant
|
||||
- [ ] Vérifier que `/settings` expose thumbnail config
|
||||
- [ ] Ajouter endpoint PUT pour mettre à jour thumbnail settings
|
||||
|
||||
---
|
||||
|
||||
## 7. Taches diverses
|
||||
|
||||
- [x] Ajouter dependency `image` et `webp` dans indexer `Cargo.toml`
|
||||
- [x] Build release vérifié
|
||||
|
||||
---
|
||||
|
||||
## Ordre d'implémentation suggéré
|
||||
|
||||
1. [x] Migration DB + settings
|
||||
2. [x] Config + parsers (extract first page)
|
||||
3. [x] Indexer thumbnail generation + save to disk
|
||||
4. [x] API serve thumbnail
|
||||
5. [x] Parallel walkdir
|
||||
6. [ ] Tests & polish (à faire)
|
||||
|
||||
---
|
||||
|
||||
## Post-déploiement
|
||||
|
||||
- [ ] Appliquer migration SQL: `psql -f infra/migrations/0009_add_thumbnails.sql`
|
||||
- [ ] Créer dossier thumbnails: `mkdir -p /data/thumbnails`
|
||||
- [ ] Configurer env vars si besoin:
|
||||
- `THUMBNAIL_ENABLED=true`
|
||||
- `THUMBNAIL_WIDTH=300`
|
||||
- `THUMBNAIL_HEIGHT=400`
|
||||
- `THUMBNAIL_QUALITY=80`
|
||||
- `THUMBNAIL_DIRECTORY=/data/thumbnails`
|
||||
@@ -32,3 +32,4 @@ zip = { version = "2.2", default-features = false, features = ["deflate"] }
|
||||
utoipa.workspace = true
|
||||
utoipa-swagger-ui = { workspace = true, features = ["axum"] }
|
||||
webp = "0.3"
|
||||
walkdir = "2"
|
||||
|
||||
@@ -21,7 +21,10 @@ RUN --mount=type=cache,target=/sccache \
|
||||
cargo build --release -p api
|
||||
|
||||
FROM debian:bookworm-slim
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates wget unrar-free poppler-utils && rm -rf /var/lib/apt/lists/*
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates wget unar poppler-utils locales && rm -rf /var/lib/apt/lists/*
|
||||
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen
|
||||
ENV LANG=en_US.UTF-8
|
||||
ENV LC_ALL=en_US.UTF-8
|
||||
COPY --from=builder /app/target/release/api /usr/local/bin/api
|
||||
EXPOSE 8080
|
||||
CMD ["/usr/local/bin/api"]
|
||||
|
||||
@@ -34,6 +34,7 @@ pub struct BookItem {
|
||||
pub volume: Option<i32>,
|
||||
pub language: Option<String>,
|
||||
pub page_count: Option<i32>,
|
||||
pub thumbnail_url: Option<String>,
|
||||
#[schema(value_type = String)]
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
@@ -58,6 +59,7 @@ pub struct BookDetails {
|
||||
pub volume: Option<i32>,
|
||||
pub language: Option<String>,
|
||||
pub page_count: Option<i32>,
|
||||
pub thumbnail_url: Option<String>,
|
||||
pub file_path: Option<String>,
|
||||
pub file_format: Option<String>,
|
||||
pub file_parse_status: Option<String>,
|
||||
@@ -96,7 +98,7 @@ pub async fn list_books(
|
||||
|
||||
let sql = format!(
|
||||
r#"
|
||||
SELECT id, library_id, kind, title, author, series, volume, language, page_count, updated_at
|
||||
SELECT id, library_id, kind, title, author, series, volume, language, page_count, thumbnail_path, updated_at
|
||||
FROM books
|
||||
WHERE ($1::uuid IS NULL OR library_id = $1)
|
||||
AND ($2::text IS NULL OR kind = $2)
|
||||
@@ -135,17 +137,21 @@ pub async fn list_books(
|
||||
let mut items: Vec<BookItem> = rows
|
||||
.iter()
|
||||
.take(limit as usize)
|
||||
.map(|row| BookItem {
|
||||
id: row.get("id"),
|
||||
library_id: row.get("library_id"),
|
||||
kind: row.get("kind"),
|
||||
title: row.get("title"),
|
||||
author: row.get("author"),
|
||||
series: row.get("series"),
|
||||
volume: row.get("volume"),
|
||||
language: row.get("language"),
|
||||
page_count: row.get("page_count"),
|
||||
updated_at: row.get("updated_at"),
|
||||
.map(|row| {
|
||||
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||
BookItem {
|
||||
id: row.get("id"),
|
||||
library_id: row.get("library_id"),
|
||||
kind: row.get("kind"),
|
||||
title: row.get("title"),
|
||||
author: row.get("author"),
|
||||
series: row.get("series"),
|
||||
volume: row.get("volume"),
|
||||
language: row.get("language"),
|
||||
page_count: row.get("page_count"),
|
||||
thumbnail_url: thumbnail_path.map(|_p| format!("/books/{}/thumbnail", row.get::<Uuid, _>("id"))),
|
||||
updated_at: row.get("updated_at"),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -182,7 +188,7 @@ pub async fn get_book(
|
||||
) -> Result<Json<BookDetails>, ApiError> {
|
||||
let row = sqlx::query(
|
||||
r#"
|
||||
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.series, b.volume, b.language, b.page_count,
|
||||
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.series, b.volume, b.language, b.page_count, b.thumbnail_path,
|
||||
bf.abs_path, bf.format, bf.parse_status
|
||||
FROM books b
|
||||
LEFT JOIN LATERAL (
|
||||
@@ -200,6 +206,7 @@ pub async fn get_book(
|
||||
.await?;
|
||||
|
||||
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||
Ok(Json(BookDetails {
|
||||
id: row.get("id"),
|
||||
library_id: row.get("library_id"),
|
||||
@@ -210,6 +217,7 @@ pub async fn get_book(
|
||||
volume: row.get("volume"),
|
||||
language: row.get("language"),
|
||||
page_count: row.get("page_count"),
|
||||
thumbnail_url: thumbnail_path.map(|_| format!("/books/{}/thumbnail", id)),
|
||||
file_path: row.get("abs_path"),
|
||||
file_format: row.get("format"),
|
||||
file_parse_status: row.get("parse_status"),
|
||||
@@ -332,3 +340,40 @@ pub async fn list_series(
|
||||
next_cursor,
|
||||
}))
|
||||
}
|
||||
|
||||
use axum::{
|
||||
body::Body,
|
||||
http::{header, HeaderMap, HeaderValue, StatusCode},
|
||||
response::IntoResponse,
|
||||
};
|
||||
|
||||
pub async fn get_thumbnail(
|
||||
State(state): State<AppState>,
|
||||
Path(book_id): Path<Uuid>,
|
||||
) -> Result<impl IntoResponse, ApiError> {
|
||||
let row = sqlx::query("SELECT thumbnail_path FROM books WHERE id = $1")
|
||||
.bind(book_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(e.to_string()))?;
|
||||
|
||||
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||
|
||||
let data = if let Some(ref path) = thumbnail_path {
|
||||
std::fs::read(path)
|
||||
.map_err(|e| ApiError::internal(format!("cannot read thumbnail: {}", e)))?
|
||||
} else {
|
||||
// Fallback: render page 1 on the fly (same as pages logic)
|
||||
crate::pages::render_book_page_1(&state, book_id, 300, 80).await?
|
||||
};
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::CONTENT_TYPE, HeaderValue::from_static("image/webp"));
|
||||
headers.insert(
|
||||
header::CACHE_CONTROL,
|
||||
HeaderValue::from_static("public, max-age=31536000, immutable"),
|
||||
);
|
||||
|
||||
Ok((StatusCode::OK, headers, Body::from(data)))
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
use axum::{http::StatusCode, response::{IntoResponse, Response}, Json};
|
||||
use axum::{
|
||||
http::StatusCode,
|
||||
response::{IntoResponse, Response},
|
||||
Json,
|
||||
};
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -51,7 +55,13 @@ impl ApiError {
|
||||
|
||||
impl IntoResponse for ApiError {
|
||||
fn into_response(self) -> Response {
|
||||
(self.status, Json(ErrorBody { error: &self.message })).into_response()
|
||||
(
|
||||
self.status,
|
||||
Json(ErrorBody {
|
||||
error: &self.message,
|
||||
}),
|
||||
)
|
||||
.into_response()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,3 +70,9 @@ impl From<sqlx::Error> for ApiError {
|
||||
Self::internal(format!("database error: {err}"))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for ApiError {
|
||||
fn from(err: std::io::Error) -> Self {
|
||||
Self::internal(format!("IO error: {err}"))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,6 +34,9 @@ pub struct IndexJobResponse {
|
||||
pub error_opt: Option<String>,
|
||||
#[schema(value_type = String)]
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub progress_percent: Option<i32>,
|
||||
pub processed_files: Option<i32>,
|
||||
pub total_files: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
@@ -142,7 +145,7 @@ pub async fn enqueue_rebuild(
|
||||
)]
|
||||
pub async fn list_index_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs ORDER BY created_at DESC LIMIT 100",
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs ORDER BY created_at DESC LIMIT 100",
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
@@ -171,7 +174,7 @@ pub async fn cancel_job(
|
||||
id: axum::extract::Path<Uuid>,
|
||||
) -> Result<Json<IndexJobResponse>, ApiError> {
|
||||
let rows_affected = sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'cancelled' WHERE id = $1 AND status IN ('pending', 'running')",
|
||||
"UPDATE index_jobs SET status = 'cancelled' WHERE id = $1 AND status IN ('pending', 'running', 'generating_thumbnails')",
|
||||
)
|
||||
.bind(id.0)
|
||||
.execute(&state.pool)
|
||||
@@ -182,7 +185,7 @@ pub async fn cancel_job(
|
||||
}
|
||||
|
||||
let row = sqlx::query(
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs WHERE id = $1",
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs WHERE id = $1",
|
||||
)
|
||||
.bind(id.0)
|
||||
.fetch_one(&state.pool)
|
||||
@@ -298,6 +301,9 @@ pub fn map_row(row: sqlx::postgres::PgRow) -> IndexJobResponse {
|
||||
stats_json: row.get("stats_json"),
|
||||
error_opt: row.get("error_opt"),
|
||||
created_at: row.get("created_at"),
|
||||
progress_percent: row.try_get("progress_percent").ok(),
|
||||
processed_files: row.try_get("processed_files").ok(),
|
||||
total_files: row.try_get("total_files").ok(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -333,9 +339,9 @@ fn map_row_detail(row: sqlx::postgres::PgRow) -> IndexJobDetailResponse {
|
||||
)]
|
||||
pub async fn get_active_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files
|
||||
FROM index_jobs
|
||||
WHERE status IN ('pending', 'running')
|
||||
WHERE status IN ('pending', 'running', 'generating_thumbnails')
|
||||
ORDER BY created_at ASC"
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
|
||||
@@ -7,6 +7,7 @@ mod openapi;
|
||||
mod pages;
|
||||
mod search;
|
||||
mod settings;
|
||||
mod thumbnails;
|
||||
mod tokens;
|
||||
|
||||
use std::{
|
||||
@@ -85,7 +86,7 @@ async fn main() -> anyhow::Result<()> {
|
||||
meili_url: Arc::from(config.meili_url),
|
||||
meili_master_key: Arc::from(config.meili_master_key),
|
||||
page_cache: Arc::new(Mutex::new(LruCache::new(NonZeroUsize::new(512).expect("non-zero")))),
|
||||
page_render_limit: Arc::new(Semaphore::new(4)),
|
||||
page_render_limit: Arc::new(Semaphore::new(8)),
|
||||
metrics: Arc::new(Metrics::new()),
|
||||
read_rate_limit: Arc::new(Mutex::new(ReadRateLimit {
|
||||
window_started_at: Instant::now(),
|
||||
@@ -99,10 +100,13 @@ async fn main() -> anyhow::Result<()> {
|
||||
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
|
||||
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
|
||||
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
|
||||
.route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild))
|
||||
.route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate))
|
||||
.route("/index/status", get(index_jobs::list_index_jobs))
|
||||
.route("/index/jobs/active", get(index_jobs::get_active_jobs))
|
||||
.route("/index/jobs/:id", get(index_jobs::get_job_details))
|
||||
.route("/index/jobs/:id/stream", get(index_jobs::stream_job_progress))
|
||||
.route("/index/jobs/:id/thumbnails/checkup", axum::routing::post(thumbnails::start_checkup))
|
||||
.route("/index/jobs/:id/errors", get(index_jobs::get_job_errors))
|
||||
.route("/index/cancel/:id", axum::routing::post(index_jobs::cancel_job))
|
||||
.route("/folders", get(index_jobs::list_folders))
|
||||
@@ -117,6 +121,7 @@ async fn main() -> anyhow::Result<()> {
|
||||
let read_routes = Router::new()
|
||||
.route("/books", get(books::list_books))
|
||||
.route("/books/:id", get(books::get_book))
|
||||
.route("/books/:id/thumbnail", get(books::get_thumbnail))
|
||||
.route("/books/:id/pages/:n", get(pages::get_page))
|
||||
.route("/libraries/:library_id/series", get(books::list_series))
|
||||
.route("/search", get(search::search_books))
|
||||
|
||||
@@ -10,6 +10,8 @@ use utoipa::OpenApi;
|
||||
crate::pages::get_page,
|
||||
crate::search::search_books,
|
||||
crate::index_jobs::enqueue_rebuild,
|
||||
crate::thumbnails::start_thumbnails_rebuild,
|
||||
crate::thumbnails::start_thumbnails_regenerate,
|
||||
crate::index_jobs::list_index_jobs,
|
||||
crate::index_jobs::get_active_jobs,
|
||||
crate::index_jobs::get_job_details,
|
||||
@@ -37,6 +39,7 @@ use utoipa::OpenApi;
|
||||
crate::search::SearchQuery,
|
||||
crate::search::SearchResponse,
|
||||
crate::index_jobs::RebuildRequest,
|
||||
crate::thumbnails::ThumbnailsRebuildRequest,
|
||||
crate::index_jobs::IndexJobResponse,
|
||||
crate::index_jobs::IndexJobDetailResponse,
|
||||
crate::index_jobs::JobErrorResponse,
|
||||
|
||||
@@ -18,6 +18,7 @@ use sha2::{Digest, Sha256};
|
||||
use sqlx::Row;
|
||||
use tracing::{debug, error, info, instrument, warn};
|
||||
use uuid::Uuid;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::{error::ApiError, AppState};
|
||||
|
||||
@@ -220,7 +221,7 @@ pub async fn get_page(
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
let bytes = tokio::time::timeout(
|
||||
Duration::from_secs(12),
|
||||
Duration::from_secs(60),
|
||||
tokio::task::spawn_blocking(move || {
|
||||
render_page(&abs_path_clone, &input_format, n, &format_clone, quality, width)
|
||||
}),
|
||||
@@ -278,6 +279,54 @@ fn image_response(bytes: Arc<Vec<u8>>, content_type: &str, etag_suffix: Option<&
|
||||
(StatusCode::OK, headers, Body::from((*bytes).clone())).into_response()
|
||||
}
|
||||
|
||||
/// Render page 1 of a book (for thumbnail fallback or thumbnail checkup). Uses thumbnail dimensions by default.
|
||||
pub async fn render_book_page_1(
|
||||
state: &AppState,
|
||||
book_id: Uuid,
|
||||
width: u32,
|
||||
quality: u8,
|
||||
) -> Result<Vec<u8>, ApiError> {
|
||||
let row = sqlx::query(
|
||||
r#"SELECT abs_path, format FROM book_files WHERE book_id = $1 ORDER BY updated_at DESC LIMIT 1"#,
|
||||
)
|
||||
.bind(book_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(e.to_string()))?;
|
||||
|
||||
let row = row.ok_or_else(|| ApiError::not_found("book file not found"))?;
|
||||
let abs_path: String = row.get("abs_path");
|
||||
let abs_path = remap_libraries_path(&abs_path);
|
||||
let input_format: String = row.get("format");
|
||||
|
||||
let _permit = state
|
||||
.page_render_limit
|
||||
.clone()
|
||||
.acquire_owned()
|
||||
.await
|
||||
.map_err(|_| ApiError::internal("render limiter unavailable"))?;
|
||||
|
||||
let abs_path_clone = abs_path.clone();
|
||||
let bytes = tokio::time::timeout(
|
||||
Duration::from_secs(60),
|
||||
tokio::task::spawn_blocking(move || {
|
||||
render_page(
|
||||
&abs_path_clone,
|
||||
&input_format,
|
||||
1,
|
||||
&OutputFormat::Webp,
|
||||
quality,
|
||||
width,
|
||||
)
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.map_err(|_| ApiError::internal("page rendering timeout"))?
|
||||
.map_err(|e| ApiError::internal(format!("render task failed: {e}")))?;
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
fn render_page(
|
||||
abs_path: &str,
|
||||
input_format: &str,
|
||||
@@ -342,53 +391,64 @@ fn extract_cbz_page(abs_path: &str, page_number: u32) -> Result<Vec<u8>, ApiErro
|
||||
}
|
||||
|
||||
fn extract_cbr_page(abs_path: &str, page_number: u32) -> Result<Vec<u8>, ApiError> {
|
||||
debug!("Listing CBR archive: {}", abs_path);
|
||||
let list_output = std::process::Command::new("unrar")
|
||||
.arg("lb")
|
||||
info!("Opening CBR archive: {}", abs_path);
|
||||
|
||||
let index = page_number as usize - 1;
|
||||
let tmp_dir = std::env::temp_dir().join(format!("stripstream-cbr-{}", Uuid::new_v4()));
|
||||
debug!("Creating temp dir for CBR extraction: {}", tmp_dir.display());
|
||||
|
||||
std::fs::create_dir_all(&tmp_dir).map_err(|e| {
|
||||
error!("Cannot create temp dir: {}", e);
|
||||
ApiError::internal(format!("temp dir error: {}", e))
|
||||
})?;
|
||||
|
||||
// Extract directly - skip listing which fails on UTF-16 encoded filenames
|
||||
let extract_output = std::process::Command::new("env")
|
||||
.args(["LC_ALL=en_US.UTF-8", "LANG=en_US.UTF-8", "unar", "-o"])
|
||||
.arg(&tmp_dir)
|
||||
.arg(abs_path)
|
||||
.output()
|
||||
.map_err(|e| {
|
||||
error!("unrar list command failed for {}: {}", abs_path, e);
|
||||
ApiError::internal(format!("unrar list failed: {e}"))
|
||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
||||
error!("unar extract failed: {}", e);
|
||||
ApiError::internal(format!("unar extract failed: {e}"))
|
||||
})?;
|
||||
if !list_output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&list_output.stderr);
|
||||
error!("unrar could not list archive {}: {}", abs_path, stderr);
|
||||
return Err(ApiError::internal("unrar could not list archive"));
|
||||
|
||||
if !extract_output.status.success() {
|
||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
||||
let stderr = String::from_utf8_lossy(&extract_output.stderr);
|
||||
error!("unar extract failed {}: {}", abs_path, stderr);
|
||||
return Err(ApiError::internal("unar extract failed"));
|
||||
}
|
||||
|
||||
let mut entries: Vec<String> = String::from_utf8_lossy(&list_output.stdout)
|
||||
.lines()
|
||||
.filter(|line| is_image_name(&line.to_ascii_lowercase()))
|
||||
.map(|s| s.to_string())
|
||||
// Find and read the requested image (recursive search for CBR files with subdirectories)
|
||||
let mut image_files: Vec<_> = WalkDir::new(&tmp_dir)
|
||||
.into_iter()
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| {
|
||||
let name = e.file_name().to_string_lossy().to_lowercase();
|
||||
is_image_name(&name)
|
||||
})
|
||||
.collect();
|
||||
entries.sort();
|
||||
debug!("Found {} images in CBR {}", entries.len(), abs_path);
|
||||
|
||||
image_files.sort_by_key(|e| e.path().to_string_lossy().to_lowercase());
|
||||
|
||||
let index = page_number as usize - 1;
|
||||
let selected = entries.get(index).ok_or_else(|| {
|
||||
error!("Page {} out of range in {} (total: {})", page_number, abs_path, entries.len());
|
||||
let selected = image_files.get(index).ok_or_else(|| {
|
||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
||||
error!("Page {} not found (total: {})", page_number, image_files.len());
|
||||
ApiError::not_found("page out of range")
|
||||
})?;
|
||||
|
||||
debug!("Extracting page {} ({}) from {}", page_number, selected, abs_path);
|
||||
let page_output = std::process::Command::new("unrar")
|
||||
.arg("p")
|
||||
.arg("-inul")
|
||||
.arg(abs_path)
|
||||
.arg(selected)
|
||||
.output()
|
||||
.map_err(|e| {
|
||||
error!("unrar extract command failed for {} page {}: {}", abs_path, selected, e);
|
||||
ApiError::internal(format!("unrar extract failed: {e}"))
|
||||
})?;
|
||||
if !page_output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&page_output.stderr);
|
||||
error!("unrar could not extract page {} from {}: {}", selected, abs_path, stderr);
|
||||
return Err(ApiError::internal("unrar could not extract page"));
|
||||
}
|
||||
debug!("Successfully extracted {} bytes from CBR page {}", page_output.stdout.len(), page_number);
|
||||
Ok(page_output.stdout)
|
||||
let data = std::fs::read(selected.path()).map_err(|e| {
|
||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
||||
error!("read failed: {}", e);
|
||||
ApiError::internal(format!("read error: {}", e))
|
||||
})?;
|
||||
|
||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
||||
|
||||
info!("Successfully extracted CBR page {} ({} bytes)", page_number, data.len());
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
fn render_pdf_page(abs_path: &str, page_number: u32, width: u32) -> Result<Vec<u8>, ApiError> {
|
||||
@@ -499,11 +559,16 @@ fn format_matches(source: &ImageFormat, target: &OutputFormat) -> bool {
|
||||
}
|
||||
|
||||
fn is_image_name(name: &str) -> bool {
|
||||
name.ends_with(".jpg")
|
||||
|| name.ends_with(".jpeg")
|
||||
|| name.ends_with(".png")
|
||||
|| name.ends_with(".webp")
|
||||
|| name.ends_with(".avif")
|
||||
let lower = name.to_lowercase();
|
||||
lower.ends_with(".jpg")
|
||||
|| lower.ends_with(".jpeg")
|
||||
|| lower.ends_with(".png")
|
||||
|| lower.ends_with(".webp")
|
||||
|| lower.ends_with(".avif")
|
||||
|| lower.ends_with(".gif")
|
||||
|| lower.ends_with(".tif")
|
||||
|| lower.ends_with(".tiff")
|
||||
|| lower.ends_with(".bmp")
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use axum::{
|
||||
extract::{Query, State},
|
||||
response::IntoResponse,
|
||||
extract::State,
|
||||
routing::{get, post},
|
||||
Json, Router,
|
||||
};
|
||||
@@ -10,46 +9,38 @@ use sqlx::Row;
|
||||
|
||||
use crate::{error::ApiError, AppState};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ImageProcessingSettings {
|
||||
pub format: String,
|
||||
pub quality: u8,
|
||||
pub filter: String,
|
||||
pub max_width: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CacheSettings {
|
||||
pub enabled: bool,
|
||||
pub directory: String,
|
||||
pub max_size_mb: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LimitsSettings {
|
||||
pub concurrent_renders: u8,
|
||||
pub timeout_seconds: u8,
|
||||
pub rate_limit_per_second: u16,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AppSettings {
|
||||
pub image_processing: ImageProcessingSettings,
|
||||
pub cache: CacheSettings,
|
||||
pub limits: LimitsSettings,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UpdateSettingRequest {
|
||||
pub value: Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ClearCacheResponse {
|
||||
pub success: bool,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CacheStats {
|
||||
pub total_size_mb: f64,
|
||||
pub file_count: u64,
|
||||
pub directory: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ThumbnailStats {
|
||||
pub total_size_mb: f64,
|
||||
pub file_count: u64,
|
||||
pub directory: String,
|
||||
}
|
||||
|
||||
pub fn settings_routes() -> Router<AppState> {
|
||||
Router::new()
|
||||
.route("/settings", get(get_settings))
|
||||
.route("/settings/:key", get(get_setting).post(update_setting))
|
||||
.route("/settings/cache/clear", post(clear_cache))
|
||||
.route("/settings/cache/stats", get(get_cache_stats))
|
||||
.route("/settings/thumbnail/stats", get(get_thumbnail_stats))
|
||||
}
|
||||
|
||||
async fn get_settings(State(state): State<AppState>) -> Result<Json<Value>, ApiError> {
|
||||
@@ -108,12 +99,6 @@ async fn update_setting(
|
||||
Ok(Json(value))
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ClearCacheResponse {
|
||||
pub success: bool,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
async fn clear_cache(State(_state): State<AppState>) -> Result<Json<ClearCacheResponse>, ApiError> {
|
||||
let cache_dir = std::env::var("IMAGE_CACHE_DIR")
|
||||
.unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string());
|
||||
@@ -143,13 +128,6 @@ async fn clear_cache(State(_state): State<AppState>) -> Result<Json<ClearCacheRe
|
||||
Ok(Json(result))
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CacheStats {
|
||||
pub total_size_mb: f64,
|
||||
pub file_count: u64,
|
||||
pub directory: String,
|
||||
}
|
||||
|
||||
async fn get_cache_stats(State(_state): State<AppState>) -> Result<Json<CacheStats>, ApiError> {
|
||||
let cache_dir = std::env::var("IMAGE_CACHE_DIR")
|
||||
.unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string());
|
||||
@@ -202,59 +180,71 @@ async fn get_cache_stats(State(_state): State<AppState>) -> Result<Json<CacheSta
|
||||
Ok(Json(stats))
|
||||
}
|
||||
|
||||
pub async fn get_settings_from_db(
|
||||
pool: &sqlx::PgPool,
|
||||
) -> Result<AppSettings, ApiError> {
|
||||
let settings = get_settings_from_db_raw(pool).await?;
|
||||
|
||||
let image_processing = settings
|
||||
.get("image_processing")
|
||||
.and_then(|v| serde_json::from_value(v.clone()).ok())
|
||||
.unwrap_or_else(|| ImageProcessingSettings {
|
||||
format: "webp".to_string(),
|
||||
quality: 85,
|
||||
filter: "lanczos3".to_string(),
|
||||
max_width: 2160,
|
||||
});
|
||||
fn compute_dir_stats(path: &std::path::Path) -> (u64, u64) {
|
||||
let mut total_size: u64 = 0;
|
||||
let mut file_count: u64 = 0;
|
||||
|
||||
let cache = settings
|
||||
.get("cache")
|
||||
.and_then(|v| serde_json::from_value(v.clone()).ok())
|
||||
.unwrap_or_else(|| CacheSettings {
|
||||
enabled: true,
|
||||
directory: "/tmp/stripstream-image-cache".to_string(),
|
||||
max_size_mb: 10000,
|
||||
});
|
||||
|
||||
let limits = settings
|
||||
.get("limits")
|
||||
.and_then(|v| serde_json::from_value(v.clone()).ok())
|
||||
.unwrap_or_else(|| LimitsSettings {
|
||||
concurrent_renders: 4,
|
||||
timeout_seconds: 12,
|
||||
rate_limit_per_second: 120,
|
||||
});
|
||||
|
||||
Ok(AppSettings {
|
||||
image_processing,
|
||||
cache,
|
||||
limits,
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_settings_from_db_raw(
|
||||
pool: &sqlx::PgPool,
|
||||
) -> Result<std::collections::HashMap<String, Value>, ApiError> {
|
||||
let rows = sqlx::query(r#"SELECT key, value FROM app_settings"#)
|
||||
.fetch_all(pool)
|
||||
.await?;
|
||||
|
||||
let mut settings = std::collections::HashMap::new();
|
||||
for row in rows {
|
||||
let key: String = row.get("key");
|
||||
let value: Value = row.get("value");
|
||||
settings.insert(key, value);
|
||||
fn visit_dirs(
|
||||
dir: &std::path::Path,
|
||||
total_size: &mut u64,
|
||||
file_count: &mut u64,
|
||||
) -> std::io::Result<()> {
|
||||
if dir.is_dir() {
|
||||
for entry in std::fs::read_dir(dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
visit_dirs(&path, total_size, file_count)?;
|
||||
} else {
|
||||
*total_size += entry.metadata()?.len();
|
||||
*file_count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Ok(settings)
|
||||
let _ = visit_dirs(path, &mut total_size, &mut file_count);
|
||||
(total_size, file_count)
|
||||
}
|
||||
|
||||
async fn get_thumbnail_stats(State(_state): State<AppState>) -> Result<Json<ThumbnailStats>, ApiError> {
|
||||
let settings = sqlx::query(r#"SELECT value FROM app_settings WHERE key = 'thumbnail'"#)
|
||||
.fetch_optional(&_state.pool)
|
||||
.await?;
|
||||
|
||||
let directory = match settings {
|
||||
Some(row) => {
|
||||
let value: serde_json::Value = row.get("value");
|
||||
value.get("directory")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("/data/thumbnails")
|
||||
.to_string()
|
||||
}
|
||||
None => "/data/thumbnails".to_string(),
|
||||
};
|
||||
|
||||
let directory_clone = directory.clone();
|
||||
let stats = tokio::task::spawn_blocking(move || {
|
||||
let path = std::path::Path::new(&directory_clone);
|
||||
if !path.exists() {
|
||||
return ThumbnailStats {
|
||||
total_size_mb: 0.0,
|
||||
file_count: 0,
|
||||
directory: directory_clone,
|
||||
};
|
||||
}
|
||||
|
||||
let (total_size, file_count) = compute_dir_stats(path);
|
||||
|
||||
ThumbnailStats {
|
||||
total_size_mb: total_size as f64 / 1024.0 / 1024.0,
|
||||
file_count,
|
||||
directory: directory_clone,
|
||||
}
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(format!("thumbnail stats failed: {}", e)))?;
|
||||
|
||||
Ok(Json(stats))
|
||||
}
|
||||
|
||||
284
apps/api/src/thumbnails.rs
Normal file
284
apps/api/src/thumbnails.rs
Normal file
@@ -0,0 +1,284 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use axum::{
|
||||
extract::{Path as AxumPath, State},
|
||||
http::StatusCode,
|
||||
Json,
|
||||
};
|
||||
use image::GenericImageView;
|
||||
use serde::Deserialize;
|
||||
use sqlx::Row;
|
||||
use tracing::{info, warn};
|
||||
use uuid::Uuid;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::{error::ApiError, index_jobs, pages, AppState};
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ThumbnailConfig {
|
||||
enabled: bool,
|
||||
width: u32,
|
||||
height: u32,
|
||||
quality: u8,
|
||||
directory: String,
|
||||
}
|
||||
|
||||
async fn load_thumbnail_config(pool: &sqlx::PgPool) -> ThumbnailConfig {
|
||||
let fallback = ThumbnailConfig {
|
||||
enabled: true,
|
||||
width: 300,
|
||||
height: 400,
|
||||
quality: 80,
|
||||
directory: "/data/thumbnails".to_string(),
|
||||
};
|
||||
let row = sqlx::query(r#"SELECT value FROM app_settings WHERE key = 'thumbnail'"#)
|
||||
.fetch_optional(pool)
|
||||
.await;
|
||||
|
||||
match row {
|
||||
Ok(Some(row)) => {
|
||||
let value: serde_json::Value = row.get("value");
|
||||
ThumbnailConfig {
|
||||
enabled: value
|
||||
.get("enabled")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(fallback.enabled),
|
||||
width: value
|
||||
.get("width")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|v| v as u32)
|
||||
.unwrap_or(fallback.width),
|
||||
height: value
|
||||
.get("height")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|v| v as u32)
|
||||
.unwrap_or(fallback.height),
|
||||
quality: value
|
||||
.get("quality")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|v| v as u8)
|
||||
.unwrap_or(fallback.quality),
|
||||
directory: value
|
||||
.get("directory")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_else(|| fallback.directory.clone()),
|
||||
}
|
||||
}
|
||||
_ => fallback,
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_thumbnail(image_bytes: &[u8], config: &ThumbnailConfig) -> anyhow::Result<Vec<u8>> {
|
||||
let img = image::load_from_memory(image_bytes).context("failed to load image")?;
|
||||
let (orig_w, orig_h) = img.dimensions();
|
||||
let ratio_w = config.width as f32 / orig_w as f32;
|
||||
let ratio_h = config.height as f32 / orig_h as f32;
|
||||
let ratio = ratio_w.min(ratio_h);
|
||||
let new_w = (orig_w as f32 * ratio) as u32;
|
||||
let new_h = (orig_h as f32 * ratio) as u32;
|
||||
let resized = img.resize(new_w, new_h, image::imageops::FilterType::Lanczos3);
|
||||
let rgba = resized.to_rgba8();
|
||||
let (w, h) = rgba.dimensions();
|
||||
let rgb_data: Vec<u8> = rgba.pixels().flat_map(|p| [p[0], p[1], p[2]]).collect();
|
||||
let quality = f32::max(config.quality as f32, 85.0);
|
||||
let webp_data =
|
||||
webp::Encoder::new(&rgb_data, webp::PixelLayout::Rgb, w, h).encode(quality);
|
||||
Ok(webp_data.to_vec())
|
||||
}
|
||||
|
||||
fn save_thumbnail(book_id: Uuid, thumbnail_bytes: &[u8], config: &ThumbnailConfig) -> anyhow::Result<String> {
|
||||
let dir = Path::new(&config.directory);
|
||||
std::fs::create_dir_all(dir)?;
|
||||
let filename = format!("{}.webp", book_id);
|
||||
let path = dir.join(&filename);
|
||||
std::fs::write(&path, thumbnail_bytes)?;
|
||||
Ok(path.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
async fn run_checkup(state: AppState, job_id: Uuid) {
|
||||
let pool = &state.pool;
|
||||
let row = sqlx::query("SELECT library_id, type FROM index_jobs WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.fetch_optional(pool)
|
||||
.await;
|
||||
|
||||
let (library_id, job_type) = match row {
|
||||
Ok(Some(r)) => (
|
||||
r.get::<Option<Uuid>, _>("library_id"),
|
||||
r.get::<String, _>("type"),
|
||||
),
|
||||
_ => {
|
||||
warn!("thumbnails checkup: job {} not found", job_id);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Regenerate: clear existing thumbnails in scope so they get regenerated
|
||||
if job_type == "thumbnail_regenerate" {
|
||||
let cleared = sqlx::query(
|
||||
r#"UPDATE books SET thumbnail_path = NULL WHERE (library_id = $1 OR $1 IS NULL)"#,
|
||||
)
|
||||
.bind(library_id)
|
||||
.execute(pool)
|
||||
.await;
|
||||
if let Ok(res) = cleared {
|
||||
info!("thumbnails regenerate: cleared {} books", res.rows_affected());
|
||||
}
|
||||
}
|
||||
|
||||
let book_ids: Vec<Uuid> = sqlx::query_scalar(
|
||||
r#"SELECT id FROM books WHERE (library_id = $1 OR $1 IS NULL) AND thumbnail_path IS NULL"#,
|
||||
)
|
||||
.bind(library_id)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
let config = load_thumbnail_config(pool).await;
|
||||
if !config.enabled || book_ids.is_empty() {
|
||||
let _ = sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), progress_percent = 100, current_file = NULL WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.execute(pool)
|
||||
.await;
|
||||
return;
|
||||
}
|
||||
|
||||
let total = book_ids.len() as i32;
|
||||
let _ = sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'generating_thumbnails', total_files = $2, processed_files = 0, current_file = NULL WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(total)
|
||||
.execute(pool)
|
||||
.await;
|
||||
|
||||
for (i, &book_id) in book_ids.iter().enumerate() {
|
||||
match pages::render_book_page_1(&state, book_id, config.width, config.quality).await {
|
||||
Ok(page_bytes) => {
|
||||
match generate_thumbnail(&page_bytes, &config) {
|
||||
Ok(thumb_bytes) => {
|
||||
if let Ok(path) = save_thumbnail(book_id, &thumb_bytes, &config) {
|
||||
if sqlx::query("UPDATE books SET thumbnail_path = $1 WHERE id = $2")
|
||||
.bind(&path)
|
||||
.bind(book_id)
|
||||
.execute(pool)
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
let processed = (i + 1) as i32;
|
||||
let percent = ((i + 1) as f64 / total as f64 * 100.0) as i32;
|
||||
let _ = sqlx::query(
|
||||
"UPDATE index_jobs SET processed_files = $2, progress_percent = $3 WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(processed)
|
||||
.bind(percent)
|
||||
.execute(pool)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => warn!("thumbnail generate failed for book {}: {:?}", book_id, e),
|
||||
}
|
||||
}
|
||||
Err(e) => warn!("render page 1 failed for book {}: {:?}", book_id, e),
|
||||
}
|
||||
}
|
||||
|
||||
let _ = sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), progress_percent = 100, current_file = NULL WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.execute(pool)
|
||||
.await;
|
||||
|
||||
info!("thumbnails checkup finished for job {} ({} books)", job_id, total);
|
||||
}
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
pub struct ThumbnailsRebuildRequest {
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub library_id: Option<Uuid>,
|
||||
}
|
||||
|
||||
/// POST /index/thumbnails/rebuild — create a job and generate thumbnails for books that don't have one (optional library scope).
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/index/thumbnails/rebuild",
|
||||
tag = "indexing",
|
||||
request_body = Option<ThumbnailsRebuildRequest>,
|
||||
responses(
|
||||
(status = 200, body = index_jobs::IndexJobResponse),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn start_thumbnails_rebuild(
|
||||
State(state): State<AppState>,
|
||||
payload: Option<Json<ThumbnailsRebuildRequest>>,
|
||||
) -> Result<Json<index_jobs::IndexJobResponse>, ApiError> {
|
||||
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
|
||||
let job_id = Uuid::new_v4();
|
||||
|
||||
let row = sqlx::query(
|
||||
r#"INSERT INTO index_jobs (id, library_id, type, status)
|
||||
VALUES ($1, $2, 'thumbnail_rebuild', 'pending')
|
||||
RETURNING id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at"#,
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(library_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(e.to_string()))?;
|
||||
|
||||
Ok(Json(index_jobs::map_row(row)))
|
||||
}
|
||||
|
||||
/// POST /index/thumbnails/regenerate — create a job and regenerate all thumbnails in scope (clears then regenerates).
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/index/thumbnails/regenerate",
|
||||
tag = "indexing",
|
||||
request_body = Option<ThumbnailsRebuildRequest>,
|
||||
responses(
|
||||
(status = 200, body = index_jobs::IndexJobResponse),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn start_thumbnails_regenerate(
|
||||
State(state): State<AppState>,
|
||||
payload: Option<Json<ThumbnailsRebuildRequest>>,
|
||||
) -> Result<Json<index_jobs::IndexJobResponse>, ApiError> {
|
||||
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
|
||||
let job_id = Uuid::new_v4();
|
||||
|
||||
let row = sqlx::query(
|
||||
r#"INSERT INTO index_jobs (id, library_id, type, status)
|
||||
VALUES ($1, $2, 'thumbnail_regenerate', 'pending')
|
||||
RETURNING id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at"#,
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(library_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(e.to_string()))?;
|
||||
|
||||
Ok(Json(index_jobs::map_row(row)))
|
||||
}
|
||||
|
||||
/// POST /index/jobs/:id/thumbnails/checkup — start thumbnail generation for books missing thumbnails (called by indexer at end of build).
|
||||
pub async fn start_checkup(
|
||||
State(state): State<AppState>,
|
||||
AxumPath(job_id): AxumPath<Uuid>,
|
||||
) -> Result<StatusCode, ApiError> {
|
||||
let state = state.clone();
|
||||
tokio::spawn(async move { run_checkup(state, job_id).await });
|
||||
Ok(StatusCode::ACCEPTED)
|
||||
}
|
||||
43
apps/backoffice/app/api/books/[bookId]/thumbnail/route.ts
Normal file
43
apps/backoffice/app/api/books/[bookId]/thumbnail/route.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ bookId: string }> }
|
||||
) {
|
||||
const { bookId } = await params;
|
||||
|
||||
const apiBaseUrl = process.env.API_BASE_URL || "http://api:8080";
|
||||
const apiUrl = `${apiBaseUrl}/books/${bookId}/thumbnail`;
|
||||
|
||||
const token = process.env.API_BOOTSTRAP_TOKEN;
|
||||
if (!token) {
|
||||
return new NextResponse("API token not configured", { status: 500 });
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(apiUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return new NextResponse(`Failed to fetch thumbnail: ${response.status}`, {
|
||||
status: response.status
|
||||
});
|
||||
}
|
||||
|
||||
const contentType = response.headers.get("content-type") || "image/webp";
|
||||
const imageBuffer = await response.arrayBuffer();
|
||||
|
||||
return new NextResponse(imageBuffer, {
|
||||
headers: {
|
||||
"Content-Type": contentType,
|
||||
"Cache-Control": "public, max-age=31536000, immutable",
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error fetching thumbnail:", error);
|
||||
return new NextResponse("Failed to fetch thumbnail", { status: 500 });
|
||||
}
|
||||
}
|
||||
@@ -38,7 +38,7 @@ function BookImage({ src, alt }: { src: string; alt: string }) {
|
||||
}
|
||||
|
||||
export function BookCard({ book }: BookCardProps) {
|
||||
const coverUrl = book.coverUrl || `/api/books/${book.id}/pages/1?format=webp&width=200`;
|
||||
const coverUrl = book.coverUrl || `/api/books/${book.id}/thumbnail`;
|
||||
|
||||
return (
|
||||
<Link
|
||||
|
||||
@@ -87,6 +87,8 @@ export function JobProgress({ jobId, onComplete }: JobProgressProps) {
|
||||
const percent = progress.progress_percent ?? 0;
|
||||
const processed = progress.processed_files ?? 0;
|
||||
const total = progress.total_files ?? 0;
|
||||
const isThumbnailsPhase = progress.status === "generating_thumbnails";
|
||||
const unitLabel = isThumbnailsPhase ? "thumbnails" : "files";
|
||||
|
||||
return (
|
||||
<div className="p-4 bg-card rounded-lg border border-border">
|
||||
@@ -100,7 +102,7 @@ export function JobProgress({ jobId, onComplete }: JobProgressProps) {
|
||||
<ProgressBar value={percent} showLabel size="lg" className="mb-3" />
|
||||
|
||||
<div className="flex flex-wrap items-center gap-x-4 gap-y-1 text-sm text-muted-foreground mb-3">
|
||||
<span>{processed} / {total} files</span>
|
||||
<span>{processed} / {total} {unitLabel}</span>
|
||||
{progress.current_file && (
|
||||
<span className="truncate max-w-md" title={progress.current_file}>
|
||||
Current: {progress.current_file.length > 40
|
||||
@@ -110,7 +112,7 @@ export function JobProgress({ jobId, onComplete }: JobProgressProps) {
|
||||
)}
|
||||
</div>
|
||||
|
||||
{progress.stats_json && (
|
||||
{progress.stats_json && !isThumbnailsPhase && (
|
||||
<div className="flex flex-wrap gap-3 text-xs">
|
||||
<Badge variant="primary">Scanned: {progress.stats_json.scanned_files}</Badge>
|
||||
<Badge variant="success">Indexed: {progress.stats_json.indexed_files}</Badge>
|
||||
|
||||
@@ -33,9 +33,8 @@ interface JobRowProps {
|
||||
}
|
||||
|
||||
export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, formatDuration }: JobRowProps) {
|
||||
const [showProgress, setShowProgress] = useState(
|
||||
highlighted || job.status === "running" || job.status === "pending"
|
||||
);
|
||||
const isActive = job.status === "running" || job.status === "pending" || job.status === "generating_thumbnails";
|
||||
const [showProgress, setShowProgress] = useState(highlighted || isActive);
|
||||
|
||||
const handleComplete = () => {
|
||||
setShowProgress(false);
|
||||
@@ -53,12 +52,32 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
|
||||
const removed = job.stats_json?.removed_files ?? 0;
|
||||
const errors = job.stats_json?.errors ?? 0;
|
||||
|
||||
// Format files display
|
||||
const filesDisplay = job.status === "running" && job.total_files
|
||||
? `${job.processed_files || 0}/${job.total_files}`
|
||||
: scanned > 0
|
||||
? `${scanned} scanned`
|
||||
: "-";
|
||||
const isThumbnailPhase = job.status === "generating_thumbnails";
|
||||
const isThumbnailJob = job.type === "thumbnail_rebuild" || job.type === "thumbnail_regenerate";
|
||||
const hasThumbnailPhase = isThumbnailPhase || isThumbnailJob;
|
||||
|
||||
// Files column: index-phase stats only
|
||||
const filesDisplay =
|
||||
job.status === "running" && !isThumbnailPhase
|
||||
? job.total_files != null
|
||||
? `${job.processed_files ?? 0}/${job.total_files}`
|
||||
: scanned > 0
|
||||
? `${scanned} scanned`
|
||||
: "-"
|
||||
: job.status === "success" && (indexed > 0 || removed > 0 || errors > 0)
|
||||
? null // rendered below as ✓ / − / ⚠
|
||||
: scanned > 0
|
||||
? `${scanned} scanned`
|
||||
: "—";
|
||||
|
||||
// Thumbnails column
|
||||
const thumbInProgress = hasThumbnailPhase && (job.status === "running" || isThumbnailPhase);
|
||||
const thumbDisplay =
|
||||
thumbInProgress && job.total_files != null
|
||||
? `${job.processed_files ?? 0}/${job.total_files}`
|
||||
: job.status === "success" && job.total_files != null && hasThumbnailPhase
|
||||
? `✓ ${job.total_files}`
|
||||
: "—";
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -86,7 +105,7 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
|
||||
!
|
||||
</span>
|
||||
)}
|
||||
{(job.status === "running" || job.status === "pending") && (
|
||||
{isActive && (
|
||||
<button
|
||||
className="text-xs text-primary hover:text-primary/80 hover:underline"
|
||||
onClick={() => setShowProgress(!showProgress)}
|
||||
@@ -98,21 +117,26 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
<div className="flex flex-col gap-1">
|
||||
<span className="text-sm text-foreground">{filesDisplay}</span>
|
||||
{job.status === "running" && job.total_files && (
|
||||
<MiniProgressBar
|
||||
value={job.processed_files || 0}
|
||||
max={job.total_files}
|
||||
className="w-24"
|
||||
/>
|
||||
)}
|
||||
{job.status === "success" && (
|
||||
{filesDisplay !== null ? (
|
||||
<span className="text-sm text-foreground">{filesDisplay}</span>
|
||||
) : (
|
||||
<div className="flex items-center gap-2 text-xs">
|
||||
<span className="text-success">✓ {indexed}</span>
|
||||
{removed > 0 && <span className="text-warning">− {removed}</span>}
|
||||
{errors > 0 && <span className="text-error">⚠ {errors}</span>}
|
||||
</div>
|
||||
)}
|
||||
{job.status === "running" && !isThumbnailPhase && job.total_files != null && (
|
||||
<MiniProgressBar value={job.processed_files ?? 0} max={job.total_files} className="w-24" />
|
||||
)}
|
||||
</div>
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
<div className="flex flex-col gap-1">
|
||||
<span className="text-sm text-foreground">{thumbDisplay}</span>
|
||||
{thumbInProgress && job.total_files != null && (
|
||||
<MiniProgressBar value={job.processed_files ?? 0} max={job.total_files} className="w-24" />
|
||||
)}
|
||||
</div>
|
||||
</td>
|
||||
<td className="px-4 py-3 text-sm text-muted-foreground">
|
||||
@@ -129,7 +153,7 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
|
||||
>
|
||||
View
|
||||
</Link>
|
||||
{(job.status === "pending" || job.status === "running") && (
|
||||
{(job.status === "pending" || job.status === "running" || job.status === "generating_thumbnails") && (
|
||||
<Button
|
||||
variant="danger"
|
||||
size="sm"
|
||||
@@ -141,9 +165,9 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{showProgress && (job.status === "running" || job.status === "pending") && (
|
||||
{showProgress && isActive && (
|
||||
<tr>
|
||||
<td colSpan={8} className="px-4 py-3 bg-muted/50">
|
||||
<td colSpan={9} className="px-4 py-3 bg-muted/50">
|
||||
<JobProgress
|
||||
jobId={job.id}
|
||||
onComplete={handleComplete}
|
||||
|
||||
@@ -78,7 +78,7 @@ export function JobsIndicator() {
|
||||
return () => document.removeEventListener("mousedown", handleClickOutside);
|
||||
}, []);
|
||||
|
||||
const runningJobs = activeJobs.filter(j => j.status === "running");
|
||||
const runningJobs = activeJobs.filter(j => j.status === "running" || j.status === "generating_thumbnails");
|
||||
const pendingJobs = activeJobs.filter(j => j.status === "pending");
|
||||
const totalCount = activeJobs.length;
|
||||
|
||||
@@ -210,19 +210,19 @@ export function JobsIndicator() {
|
||||
>
|
||||
<div className="flex items-start gap-3">
|
||||
<div className="mt-0.5">
|
||||
{job.status === "running" && <span className="animate-spin inline-block">⏳</span>}
|
||||
{(job.status === "running" || job.status === "generating_thumbnails") && <span className="animate-spin inline-block">⏳</span>}
|
||||
{job.status === "pending" && <span>⏸</span>}
|
||||
</div>
|
||||
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-center gap-2 mb-1">
|
||||
<code className="text-xs px-1.5 py-0.5 bg-muted rounded font-mono">{job.id.slice(0, 8)}</code>
|
||||
<Badge variant={job.type === 'rebuild' ? 'primary' : 'secondary'} className="text-[10px]">
|
||||
{job.type}
|
||||
<Badge variant={job.type === 'rebuild' ? 'primary' : job.type === 'thumbnail_regenerate' ? 'warning' : 'secondary'} className="text-[10px]">
|
||||
{job.type === 'thumbnail_rebuild' ? 'Thumbnails' : job.type === 'thumbnail_regenerate' ? 'Regenerate' : job.type}
|
||||
</Badge>
|
||||
</div>
|
||||
|
||||
{job.status === "running" && job.progress_percent !== null && (
|
||||
{(job.status === "running" || job.status === "generating_thumbnails") && job.progress_percent != null && (
|
||||
<div className="flex items-center gap-2 mt-2">
|
||||
<MiniProgressBar value={job.progress_percent} />
|
||||
<span className="text-xs font-medium text-muted-foreground">{job.progress_percent}%</span>
|
||||
|
||||
@@ -111,6 +111,7 @@ export function JobsList({ initialJobs, libraries, highlightJobId }: JobsListPro
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Type</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Status</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Files</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Thumbnails</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Duration</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Created</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Actions</th>
|
||||
|
||||
@@ -60,6 +60,7 @@ export function Badge({ children, variant = "default", className = "" }: BadgePr
|
||||
// Status badge for jobs/tasks
|
||||
const statusVariants: Record<string, BadgeVariant> = {
|
||||
running: "in-progress",
|
||||
generating_thumbnails: "in-progress",
|
||||
success: "completed",
|
||||
completed: "completed",
|
||||
failed: "error",
|
||||
@@ -68,20 +69,33 @@ const statusVariants: Record<string, BadgeVariant> = {
|
||||
unread: "unread",
|
||||
};
|
||||
|
||||
const statusLabels: Record<string, string> = {
|
||||
generating_thumbnails: "Thumbnails",
|
||||
};
|
||||
|
||||
interface StatusBadgeProps {
|
||||
status: string;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function StatusBadge({ status, className = "" }: StatusBadgeProps) {
|
||||
const variant = statusVariants[status.toLowerCase()] || "default";
|
||||
return <Badge variant={variant} className={className}>{status}</Badge>;
|
||||
const key = status.toLowerCase();
|
||||
const variant = statusVariants[key] || "default";
|
||||
const label = statusLabels[key] ?? status;
|
||||
return <Badge variant={variant} className={className}>{label}</Badge>;
|
||||
}
|
||||
|
||||
// Job type badge
|
||||
const jobTypeVariants: Record<string, BadgeVariant> = {
|
||||
rebuild: "primary",
|
||||
full_rebuild: "warning",
|
||||
thumbnail_rebuild: "secondary",
|
||||
thumbnail_regenerate: "warning",
|
||||
};
|
||||
|
||||
const jobTypeLabels: Record<string, string> = {
|
||||
thumbnail_rebuild: "Thumbnails",
|
||||
thumbnail_regenerate: "Regenerate",
|
||||
};
|
||||
|
||||
interface JobTypeBadgeProps {
|
||||
@@ -90,8 +104,10 @@ interface JobTypeBadgeProps {
|
||||
}
|
||||
|
||||
export function JobTypeBadge({ type, className = "" }: JobTypeBadgeProps) {
|
||||
const variant = jobTypeVariants[type.toLowerCase()] || "default";
|
||||
return <Badge variant={variant} className={className}>{type}</Badge>;
|
||||
const key = type.toLowerCase();
|
||||
const variant = jobTypeVariants[key] || "default";
|
||||
const label = jobTypeLabels[key] ?? type;
|
||||
return <Badge variant={variant} className={className}>{label}</Badge>;
|
||||
}
|
||||
|
||||
// Progress badge (shows percentage)
|
||||
|
||||
@@ -171,19 +171,19 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||
</Card>
|
||||
|
||||
{/* Progress Card */}
|
||||
{(job.status === "running" || job.status === "success" || job.status === "failed") && (
|
||||
{(job.status === "running" || job.status === "generating_thumbnails" || job.status === "success" || job.status === "failed") && (
|
||||
<Card>
|
||||
<CardHeader>
|
||||
<CardTitle>Progress</CardTitle>
|
||||
<CardTitle>{job.status === "generating_thumbnails" ? "Thumbnails" : "Progress"}</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
{job.total_files && job.total_files > 0 && (
|
||||
{job.total_files != null && job.total_files > 0 && (
|
||||
<>
|
||||
<ProgressBar value={job.progress_percent || 0} showLabel size="lg" className="mb-4" />
|
||||
<div className="grid grid-cols-3 gap-4">
|
||||
<StatBox value={job.processed_files || 0} label="Processed" variant="primary" />
|
||||
<StatBox value={job.total_files} label="Total" />
|
||||
<StatBox value={job.total_files - (job.processed_files || 0)} label="Remaining" variant="warning" />
|
||||
<StatBox value={job.processed_files ?? 0} label="Processed" variant="primary" />
|
||||
<StatBox value={job.total_files} label={job.status === "generating_thumbnails" ? "Total thumbnails" : "Total"} />
|
||||
<StatBox value={job.total_files - (job.processed_files ?? 0)} label="Remaining" variant="warning" />
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { revalidatePath } from "next/cache";
|
||||
import { redirect } from "next/navigation";
|
||||
import { listJobs, fetchLibraries, rebuildIndex, IndexJobDto, LibraryDto } from "../../lib/api";
|
||||
import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, IndexJobDto, LibraryDto } from "../../lib/api";
|
||||
import { JobsList } from "../components/JobsList";
|
||||
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormSelect, FormRow } from "../components/ui";
|
||||
|
||||
@@ -31,6 +31,22 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
||||
redirect(`/jobs?highlight=${result.id}`);
|
||||
}
|
||||
|
||||
async function triggerThumbnailsRebuild(formData: FormData) {
|
||||
"use server";
|
||||
const libraryId = formData.get("library_id") as string;
|
||||
const result = await rebuildThumbnails(libraryId || undefined);
|
||||
revalidatePath("/jobs");
|
||||
redirect(`/jobs?highlight=${result.id}`);
|
||||
}
|
||||
|
||||
async function triggerThumbnailsRegenerate(formData: FormData) {
|
||||
"use server";
|
||||
const libraryId = formData.get("library_id") as string;
|
||||
const result = await regenerateThumbnails(libraryId || undefined);
|
||||
revalidatePath("/jobs");
|
||||
redirect(`/jobs?highlight=${result.id}`);
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="mb-6">
|
||||
@@ -45,7 +61,7 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
||||
<Card className="mb-6">
|
||||
<CardHeader>
|
||||
<CardTitle>Queue New Job</CardTitle>
|
||||
<CardDescription>Select a library to rebuild or perform a full rebuild</CardDescription>
|
||||
<CardDescription>Rebuild index, full rebuild, generate missing thumbnails, or regenerate all thumbnails</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-4">
|
||||
<form action={triggerRebuild}>
|
||||
@@ -89,6 +105,48 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
|
||||
</Button>
|
||||
</FormRow>
|
||||
</form>
|
||||
|
||||
<form action={triggerThumbnailsRebuild}>
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<FormSelect name="library_id" defaultValue="">
|
||||
<option value="">All libraries</option>
|
||||
{libraries.map((lib) => (
|
||||
<option key={lib.id} value={lib.id}>
|
||||
{lib.name}
|
||||
</option>
|
||||
))}
|
||||
</FormSelect>
|
||||
</FormField>
|
||||
<Button type="submit" variant="secondary">
|
||||
<svg className="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" />
|
||||
</svg>
|
||||
Generate thumbnails
|
||||
</Button>
|
||||
</FormRow>
|
||||
</form>
|
||||
|
||||
<form action={triggerThumbnailsRegenerate}>
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<FormSelect name="library_id" defaultValue="">
|
||||
<option value="">All libraries</option>
|
||||
{libraries.map((lib) => (
|
||||
<option key={lib.id} value={lib.id}>
|
||||
{lib.name}
|
||||
</option>
|
||||
))}
|
||||
</FormSelect>
|
||||
</FormField>
|
||||
<Button type="submit" variant="warning">
|
||||
<svg className="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||
</svg>
|
||||
Regenerate thumbnails
|
||||
</Button>
|
||||
</FormRow>
|
||||
</form>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
|
||||
@@ -2,16 +2,21 @@
|
||||
|
||||
import { useState } from "react";
|
||||
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, FormSelect, FormRow, Icon } from "../components/ui";
|
||||
import { Settings, CacheStats, ClearCacheResponse } from "../../lib/api";
|
||||
import { Settings, CacheStats, ClearCacheResponse, ThumbnailStats } from "../../lib/api";
|
||||
|
||||
interface SettingsPageProps {
|
||||
initialSettings: Settings;
|
||||
initialCacheStats: CacheStats;
|
||||
initialThumbnailStats: ThumbnailStats;
|
||||
}
|
||||
|
||||
export default function SettingsPage({ initialSettings, initialCacheStats }: SettingsPageProps) {
|
||||
const [settings, setSettings] = useState<Settings>(initialSettings);
|
||||
export default function SettingsPage({ initialSettings, initialCacheStats, initialThumbnailStats }: SettingsPageProps) {
|
||||
const [settings, setSettings] = useState<Settings>({
|
||||
...initialSettings,
|
||||
thumbnail: initialSettings.thumbnail || { enabled: true, width: 300, height: 400, quality: 80, format: "webp", directory: "/data/thumbnails" }
|
||||
});
|
||||
const [cacheStats, setCacheStats] = useState<CacheStats>(initialCacheStats);
|
||||
const [thumbnailStats, setThumbnailStats] = useState<ThumbnailStats>(initialThumbnailStats);
|
||||
const [isClearing, setIsClearing] = useState(false);
|
||||
const [clearResult, setClearResult] = useState<ClearCacheResponse | null>(null);
|
||||
const [isSaving, setIsSaving] = useState(false);
|
||||
@@ -299,6 +304,131 @@ export default function SettingsPage({ initialSettings, initialCacheStats }: Set
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
{/* Thumbnail Settings */}
|
||||
<Card className="mb-6">
|
||||
<CardHeader>
|
||||
<CardTitle className="flex items-center gap-2">
|
||||
<Icon name="image" size="md" />
|
||||
Thumbnails
|
||||
</CardTitle>
|
||||
<CardDescription>Configure thumbnail generation during indexing</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="space-y-4">
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">Enable Thumbnails</label>
|
||||
<FormSelect
|
||||
value={settings.thumbnail.enabled ? "true" : "false"}
|
||||
onChange={(e) => {
|
||||
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, enabled: e.target.value === "true" } };
|
||||
setSettings(newSettings);
|
||||
handleUpdateSetting("thumbnail", newSettings.thumbnail);
|
||||
}}
|
||||
>
|
||||
<option value="true">Enabled</option>
|
||||
<option value="false">Disabled</option>
|
||||
</FormSelect>
|
||||
</FormField>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">Output Format</label>
|
||||
<FormSelect
|
||||
value={settings.thumbnail.format}
|
||||
onChange={(e) => {
|
||||
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, format: e.target.value } };
|
||||
setSettings(newSettings);
|
||||
handleUpdateSetting("thumbnail", newSettings.thumbnail);
|
||||
}}
|
||||
>
|
||||
<option value="webp">WebP (Recommended)</option>
|
||||
<option value="jpeg">JPEG</option>
|
||||
<option value="png">PNG</option>
|
||||
</FormSelect>
|
||||
</FormField>
|
||||
</FormRow>
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">Width (px)</label>
|
||||
<FormInput
|
||||
type="number"
|
||||
min={50}
|
||||
max={600}
|
||||
value={settings.thumbnail.width}
|
||||
onChange={(e) => {
|
||||
const width = parseInt(e.target.value) || 300;
|
||||
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, width } };
|
||||
setSettings(newSettings);
|
||||
}}
|
||||
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
|
||||
/>
|
||||
</FormField>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">Height (px)</label>
|
||||
<FormInput
|
||||
type="number"
|
||||
min={50}
|
||||
max={800}
|
||||
value={settings.thumbnail.height}
|
||||
onChange={(e) => {
|
||||
const height = parseInt(e.target.value) || 400;
|
||||
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, height } };
|
||||
setSettings(newSettings);
|
||||
}}
|
||||
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
|
||||
/>
|
||||
</FormField>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">Quality (1-100)</label>
|
||||
<FormInput
|
||||
type="number"
|
||||
min={1}
|
||||
max={100}
|
||||
value={settings.thumbnail.quality}
|
||||
onChange={(e) => {
|
||||
const quality = parseInt(e.target.value) || 80;
|
||||
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, quality } };
|
||||
setSettings(newSettings);
|
||||
}}
|
||||
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
|
||||
/>
|
||||
</FormField>
|
||||
</FormRow>
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">Thumbnail Directory</label>
|
||||
<FormInput
|
||||
value={settings.thumbnail.directory}
|
||||
onChange={(e) => {
|
||||
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, directory: e.target.value } };
|
||||
setSettings(newSettings);
|
||||
}}
|
||||
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
|
||||
/>
|
||||
</FormField>
|
||||
</FormRow>
|
||||
|
||||
<div className="grid grid-cols-3 gap-4 p-4 bg-muted/30 rounded-lg">
|
||||
<div>
|
||||
<p className="text-sm text-muted-foreground">Total Size</p>
|
||||
<p className="text-2xl font-semibold">{thumbnailStats.total_size_mb.toFixed(2)} MB</p>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-sm text-muted-foreground">Files</p>
|
||||
<p className="text-2xl font-semibold">{thumbnailStats.file_count}</p>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-sm text-muted-foreground">Directory</p>
|
||||
<p className="text-sm font-mono truncate" title={thumbnailStats.directory}>{thumbnailStats.directory}</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Note: Thumbnail settings are used during indexing. Existing thumbnails will not be regenerated automatically.
|
||||
</p>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { getSettings, getCacheStats } from "../../lib/api";
|
||||
import { getSettings, getCacheStats, getThumbnailStats } from "../../lib/api";
|
||||
import SettingsPage from "./SettingsPage";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
@@ -7,7 +7,8 @@ export default async function SettingsPageWrapper() {
|
||||
const settings = await getSettings().catch(() => ({
|
||||
image_processing: { format: "webp", quality: 85, filter: "lanczos3", max_width: 2160 },
|
||||
cache: { enabled: true, directory: "/tmp/stripstream-image-cache", max_size_mb: 10000 },
|
||||
limits: { concurrent_renders: 4, timeout_seconds: 12, rate_limit_per_second: 120 }
|
||||
limits: { concurrent_renders: 4, timeout_seconds: 12, rate_limit_per_second: 120 },
|
||||
thumbnail: { enabled: true, width: 300, height: 400, quality: 80, format: "webp", directory: "/data/thumbnails" }
|
||||
}));
|
||||
|
||||
const cacheStats = await getCacheStats().catch(() => ({
|
||||
@@ -16,5 +17,11 @@ export default async function SettingsPageWrapper() {
|
||||
directory: "/tmp/stripstream-image-cache"
|
||||
}));
|
||||
|
||||
return <SettingsPage initialSettings={settings} initialCacheStats={cacheStats} />;
|
||||
const thumbnailStats = await getThumbnailStats().catch(() => ({
|
||||
total_size_mb: 0,
|
||||
file_count: 0,
|
||||
directory: "/data/thumbnails"
|
||||
}));
|
||||
|
||||
return <SettingsPage initialSettings={settings} initialCacheStats={cacheStats} initialThumbnailStats={thumbnailStats} />;
|
||||
}
|
||||
|
||||
@@ -98,7 +98,10 @@ function config() {
|
||||
return { baseUrl: baseUrl.replace(/\/$/, ""), token };
|
||||
}
|
||||
|
||||
export async function apiFetch<T>(path: string, init?: RequestInit): Promise<T> {
|
||||
export async function apiFetch<T>(
|
||||
path: string,
|
||||
init?: RequestInit,
|
||||
): Promise<T> {
|
||||
const { baseUrl, token } = config();
|
||||
const headers = new Headers(init?.headers || {});
|
||||
headers.set("Authorization", `Bearer ${token}`);
|
||||
@@ -109,7 +112,7 @@ export async function apiFetch<T>(path: string, init?: RequestInit): Promise<T>
|
||||
const res = await fetch(`${baseUrl}${path}`, {
|
||||
...init,
|
||||
headers,
|
||||
cache: "no-store"
|
||||
cache: "no-store",
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
@@ -130,7 +133,7 @@ export async function fetchLibraries() {
|
||||
export async function createLibrary(name: string, rootPath: string) {
|
||||
return apiFetch<LibraryDto>("/libraries", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ name, root_path: rootPath })
|
||||
body: JSON.stringify({ name, root_path: rootPath }),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -143,12 +146,21 @@ export async function scanLibrary(libraryId: string, full?: boolean) {
|
||||
if (full) body.full = true;
|
||||
return apiFetch<IndexJobDto>(`/libraries/${libraryId}/scan`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body)
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
}
|
||||
|
||||
export async function updateLibraryMonitoring(libraryId: string, monitorEnabled: boolean, scanMode: string, watcherEnabled?: boolean) {
|
||||
const body: { monitor_enabled: boolean; scan_mode: string; watcher_enabled?: boolean } = {
|
||||
export async function updateLibraryMonitoring(
|
||||
libraryId: string,
|
||||
monitorEnabled: boolean,
|
||||
scanMode: string,
|
||||
watcherEnabled?: boolean,
|
||||
) {
|
||||
const body: {
|
||||
monitor_enabled: boolean;
|
||||
scan_mode: string;
|
||||
watcher_enabled?: boolean;
|
||||
} = {
|
||||
monitor_enabled: monitorEnabled,
|
||||
scan_mode: scanMode,
|
||||
};
|
||||
@@ -157,7 +169,7 @@ export async function updateLibraryMonitoring(libraryId: string, monitorEnabled:
|
||||
}
|
||||
return apiFetch<LibraryDto>(`/libraries/${libraryId}/monitoring`, {
|
||||
method: "PATCH",
|
||||
body: JSON.stringify(body)
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -171,7 +183,25 @@ export async function rebuildIndex(libraryId?: string, full?: boolean) {
|
||||
if (full) body.full = true;
|
||||
return apiFetch<IndexJobDto>("/index/rebuild", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body)
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
}
|
||||
|
||||
export async function rebuildThumbnails(libraryId?: string) {
|
||||
const body: { library_id?: string } = {};
|
||||
if (libraryId) body.library_id = libraryId;
|
||||
return apiFetch<IndexJobDto>("/index/thumbnails/rebuild", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
}
|
||||
|
||||
export async function regenerateThumbnails(libraryId?: string) {
|
||||
const body: { library_id?: string } = {};
|
||||
if (libraryId) body.library_id = libraryId;
|
||||
return apiFetch<IndexJobDto>("/index/thumbnails/regenerate", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -191,7 +221,7 @@ export async function listTokens() {
|
||||
export async function createToken(name: string, scope: string) {
|
||||
return apiFetch<{ token: string }>("/admin/tokens", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ name, scope })
|
||||
body: JSON.stringify({ name, scope }),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -199,13 +229,18 @@ export async function revokeToken(id: string) {
|
||||
return apiFetch<void>(`/admin/tokens/${id}`, { method: "DELETE" });
|
||||
}
|
||||
|
||||
export async function fetchBooks(libraryId?: string, series?: string, cursor?: string, limit: number = 50): Promise<BooksPageDto> {
|
||||
export async function fetchBooks(
|
||||
libraryId?: string,
|
||||
series?: string,
|
||||
cursor?: string,
|
||||
limit: number = 50,
|
||||
): Promise<BooksPageDto> {
|
||||
const params = new URLSearchParams();
|
||||
if (libraryId) params.set("library_id", libraryId);
|
||||
if (series) params.set("series", series);
|
||||
if (cursor) params.set("cursor", cursor);
|
||||
params.set("limit", limit.toString());
|
||||
|
||||
|
||||
return apiFetch<BooksPageDto>(`/books?${params.toString()}`);
|
||||
}
|
||||
|
||||
@@ -214,27 +249,35 @@ export type SeriesPageDto = {
|
||||
next_cursor: string | null;
|
||||
};
|
||||
|
||||
export async function fetchSeries(libraryId: string, cursor?: string, limit: number = 50): Promise<SeriesPageDto> {
|
||||
export async function fetchSeries(
|
||||
libraryId: string,
|
||||
cursor?: string,
|
||||
limit: number = 50,
|
||||
): Promise<SeriesPageDto> {
|
||||
const params = new URLSearchParams();
|
||||
if (cursor) params.set("cursor", cursor);
|
||||
params.set("limit", limit.toString());
|
||||
|
||||
return apiFetch<SeriesPageDto>(`/libraries/${libraryId}/series?${params.toString()}`);
|
||||
|
||||
return apiFetch<SeriesPageDto>(
|
||||
`/libraries/${libraryId}/series?${params.toString()}`,
|
||||
);
|
||||
}
|
||||
|
||||
export async function searchBooks(query: string, libraryId?: string, limit: number = 20): Promise<SearchResponseDto> {
|
||||
export async function searchBooks(
|
||||
query: string,
|
||||
libraryId?: string,
|
||||
limit: number = 20,
|
||||
): Promise<SearchResponseDto> {
|
||||
const params = new URLSearchParams();
|
||||
params.set("q", query);
|
||||
if (libraryId) params.set("library_id", libraryId);
|
||||
params.set("limit", limit.toString());
|
||||
|
||||
|
||||
return apiFetch<SearchResponseDto>(`/search?${params.toString()}`);
|
||||
}
|
||||
|
||||
export function getBookCoverUrl(bookId: string): string {
|
||||
// Utiliser une route API locale pour éviter les problèmes CORS
|
||||
// Le navigateur ne peut pas accéder à http://api:8080 (hostname Docker interne)
|
||||
return `/api/books/${bookId}/pages/1?format=webp&width=200`;
|
||||
return `/api/books/${bookId}/thumbnail`;
|
||||
}
|
||||
|
||||
export type Settings = {
|
||||
@@ -254,6 +297,14 @@ export type Settings = {
|
||||
timeout_seconds: number;
|
||||
rate_limit_per_second: number;
|
||||
};
|
||||
thumbnail: {
|
||||
enabled: boolean;
|
||||
width: number;
|
||||
height: number;
|
||||
quality: number;
|
||||
format: string;
|
||||
directory: string;
|
||||
};
|
||||
};
|
||||
|
||||
export type CacheStats = {
|
||||
@@ -267,6 +318,12 @@ export type ClearCacheResponse = {
|
||||
message: string;
|
||||
};
|
||||
|
||||
export type ThumbnailStats = {
|
||||
total_size_mb: number;
|
||||
file_count: number;
|
||||
directory: string;
|
||||
};
|
||||
|
||||
export async function getSettings() {
|
||||
return apiFetch<Settings>("/settings");
|
||||
}
|
||||
@@ -274,7 +331,7 @@ export async function getSettings() {
|
||||
export async function updateSetting(key: string, value: unknown) {
|
||||
return apiFetch<unknown>(`/settings/${key}`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ value })
|
||||
body: JSON.stringify({ value }),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -283,5 +340,11 @@ export async function getCacheStats() {
|
||||
}
|
||||
|
||||
export async function clearCache() {
|
||||
return apiFetch<ClearCacheResponse>("/settings/cache/clear", { method: "POST" });
|
||||
return apiFetch<ClearCacheResponse>("/settings/cache/clear", {
|
||||
method: "POST",
|
||||
});
|
||||
}
|
||||
|
||||
export async function getThumbnailStats() {
|
||||
return apiFetch<ThumbnailStats>("/settings/thumbnail/stats");
|
||||
}
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev -p 8082",
|
||||
"dev": "next dev -p 7082",
|
||||
"build": "next build",
|
||||
"start": "next start -p 8082"
|
||||
"start": "next start -p 7082"
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "^16.1.6",
|
||||
|
||||
@@ -11,6 +11,7 @@ chrono.workspace = true
|
||||
notify = "6.1"
|
||||
parsers = { path = "../../crates/parsers" }
|
||||
rand.workspace = true
|
||||
rayon.workspace = true
|
||||
reqwest.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
|
||||
@@ -4,11 +4,12 @@ use chrono::{DateTime, Utc};
|
||||
use axum::http::StatusCode;
|
||||
use notify::{Event, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
use parsers::{detect_format, parse_metadata, BookFormat};
|
||||
use rayon::prelude::*;
|
||||
use serde::Serialize;
|
||||
use sha2::{Digest, Sha256};
|
||||
use sqlx::{postgres::PgPoolOptions, Row};
|
||||
use std::{collections::HashMap, path::Path, time::Duration};
|
||||
use stripstream_core::config::IndexerConfig;
|
||||
use stripstream_core::config::{IndexerConfig, ThumbnailConfig};
|
||||
use tokio::sync::mpsc;
|
||||
use tracing::{error, info, trace, warn};
|
||||
use uuid::Uuid;
|
||||
@@ -37,6 +38,9 @@ struct AppState {
|
||||
pool: sqlx::PgPool,
|
||||
meili_url: String,
|
||||
meili_master_key: String,
|
||||
thumbnail_config: ThumbnailConfig,
|
||||
api_base_url: String,
|
||||
api_bootstrap_token: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
@@ -65,6 +69,9 @@ async fn main() -> anyhow::Result<()> {
|
||||
pool,
|
||||
meili_url: config.meili_url.clone(),
|
||||
meili_master_key: config.meili_master_key.clone(),
|
||||
thumbnail_config: config.thumbnail_config.clone(),
|
||||
api_base_url: config.api_base_url.clone(),
|
||||
api_bootstrap_token: config.api_bootstrap_token.clone(),
|
||||
};
|
||||
|
||||
tokio::spawn(run_worker(state.clone(), config.scan_interval_seconds));
|
||||
@@ -186,7 +193,6 @@ async fn run_file_watcher(state: AppState) -> anyhow::Result<()> {
|
||||
let pool = state.pool.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let mut watcher: Option<RecommendedWatcher> = None;
|
||||
let mut watched_libraries: HashMap<Uuid, String> = HashMap::new();
|
||||
|
||||
loop {
|
||||
@@ -217,17 +223,12 @@ async fn run_file_watcher(state: AppState) -> anyhow::Result<()> {
|
||||
if needs_restart {
|
||||
info!("[WATCHER] Restarting watcher for {} libraries", current_libraries.len());
|
||||
|
||||
// Drop old watcher
|
||||
watcher = None;
|
||||
watched_libraries.clear();
|
||||
|
||||
if !current_libraries.is_empty() {
|
||||
let tx_clone = tx.clone();
|
||||
let libraries_clone = current_libraries.clone();
|
||||
|
||||
match setup_watcher(libraries_clone, tx_clone) {
|
||||
Ok(new_watcher) => {
|
||||
watcher = Some(new_watcher);
|
||||
Ok(_new_watcher) => {
|
||||
watched_libraries = current_libraries;
|
||||
info!("[WATCHER] Watching {} libraries", watched_libraries.len());
|
||||
}
|
||||
@@ -417,18 +418,55 @@ async fn claim_next_job(pool: &sqlx::PgPool) -> anyhow::Result<Option<(Uuid, Opt
|
||||
|
||||
async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<Uuid>) -> anyhow::Result<()> {
|
||||
info!("[JOB] Processing {} library={:?}", job_id, target_library_id);
|
||||
|
||||
// Get job type to check if it's a full rebuild
|
||||
|
||||
let job_type: String = sqlx::query_scalar("SELECT type FROM index_jobs WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
|
||||
// Thumbnail jobs: hand off to API and wait for completion (same queue as rebuilds)
|
||||
if job_type == "thumbnail_rebuild" || job_type == "thumbnail_regenerate" {
|
||||
sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'generating_thumbnails', started_at = NOW() WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
let api_base = state.api_base_url.trim_end_matches('/');
|
||||
let url = format!("{}/index/jobs/{}/thumbnails/checkup", api_base, job_id);
|
||||
let client = reqwest::Client::new();
|
||||
let res = client
|
||||
.post(&url)
|
||||
.header("Authorization", format!("Bearer {}", state.api_bootstrap_token))
|
||||
.send()
|
||||
.await?;
|
||||
if !res.status().is_success() {
|
||||
anyhow::bail!("thumbnail checkup API returned {}", res.status());
|
||||
}
|
||||
|
||||
// Poll until job is finished (API updates the same row)
|
||||
let poll_interval = Duration::from_secs(1);
|
||||
loop {
|
||||
tokio::time::sleep(poll_interval).await;
|
||||
let status: String = sqlx::query_scalar("SELECT status FROM index_jobs WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
if status == "success" || status == "failed" {
|
||||
info!("[JOB] Thumbnail job {} finished with status {}", job_id, status);
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let is_full_rebuild = job_type == "full_rebuild";
|
||||
info!("[JOB] {} type={} full_rebuild={}", job_id, job_type, is_full_rebuild);
|
||||
|
||||
// For full rebuilds, delete existing data first
|
||||
if is_full_rebuild {
|
||||
info!("[JOB] Full rebuild: deleting existing data");
|
||||
|
||||
if let Some(library_id) = target_library_id {
|
||||
// Delete books and files for specific library
|
||||
sqlx::query("DELETE FROM book_files WHERE book_id IN (SELECT id FROM books WHERE library_id = $1)")
|
||||
@@ -459,17 +497,20 @@ async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<U
|
||||
.await?
|
||||
};
|
||||
|
||||
// First pass: count total files for progress estimation
|
||||
let mut total_files = 0usize;
|
||||
for library in &libraries {
|
||||
let root_path: String = library.get("root_path");
|
||||
let root_path = remap_libraries_path(&root_path);
|
||||
for entry in WalkDir::new(&root_path).into_iter().filter_map(Result::ok) {
|
||||
if entry.file_type().is_file() && detect_format(entry.path()).is_some() {
|
||||
total_files += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
// First pass: count total files for progress estimation (parallel)
|
||||
let library_paths: Vec<String> = libraries.iter()
|
||||
.map(|library| remap_libraries_path(&library.get::<String, _>("root_path")))
|
||||
.collect();
|
||||
|
||||
let total_files: usize = library_paths.par_iter()
|
||||
.map(|root_path| {
|
||||
WalkDir::new(root_path)
|
||||
.into_iter()
|
||||
.filter_map(Result::ok)
|
||||
.filter(|entry| entry.file_type().is_file() && detect_format(entry.path()).is_some())
|
||||
.count()
|
||||
})
|
||||
.sum();
|
||||
|
||||
info!("[JOB] Found {} libraries, {} total files to index", libraries.len(), total_files);
|
||||
|
||||
@@ -505,12 +546,33 @@ async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<U
|
||||
|
||||
sync_meili(&state.pool, &state.meili_url, &state.meili_master_key).await?;
|
||||
|
||||
sqlx::query("UPDATE index_jobs SET status = 'success', finished_at = NOW(), stats_json = $2, current_file = NULL, progress_percent = 100, processed_files = $3 WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.bind(serde_json::to_value(&stats)?)
|
||||
.bind(total_processed_count)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
// Hand off to API for thumbnail checkup (API will set status = 'success' when done)
|
||||
sqlx::query(
|
||||
"UPDATE index_jobs SET status = 'generating_thumbnails', stats_json = $2, current_file = NULL, processed_files = $3 WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(serde_json::to_value(&stats)?)
|
||||
.bind(total_processed_count)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
let api_base = state.api_base_url.trim_end_matches('/');
|
||||
let url = format!("{}/index/jobs/{}/thumbnails/checkup", api_base, job_id);
|
||||
let client = reqwest::Client::new();
|
||||
let res = client
|
||||
.post(&url)
|
||||
.header("Authorization", format!("Bearer {}", state.api_bootstrap_token))
|
||||
.send()
|
||||
.await;
|
||||
if let Err(e) = res {
|
||||
warn!("[JOB] Failed to trigger thumbnail checkup: {} — API will not generate thumbnails for this job", e);
|
||||
} else if let Ok(r) = res {
|
||||
if !r.status().is_success() {
|
||||
warn!("[JOB] Thumbnail checkup returned {} — API may not generate thumbnails", r.status());
|
||||
} else {
|
||||
info!("[JOB] Thumbnail checkup started (job {}), API will complete the job", job_id);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -550,6 +612,7 @@ struct BookInsert {
|
||||
series: Option<String>,
|
||||
volume: Option<i32>,
|
||||
page_count: Option<i32>,
|
||||
thumbnail_path: Option<String>,
|
||||
}
|
||||
|
||||
struct FileInsert {
|
||||
@@ -667,12 +730,13 @@ async fn flush_all_batches(
|
||||
let series: Vec<Option<String>> = books_insert.iter().map(|b| b.series.clone()).collect();
|
||||
let volumes: Vec<Option<i32>> = books_insert.iter().map(|b| b.volume).collect();
|
||||
let page_counts: Vec<Option<i32>> = books_insert.iter().map(|b| b.page_count).collect();
|
||||
let thumbnail_paths: Vec<Option<String>> = books_insert.iter().map(|b| b.thumbnail_path.clone()).collect();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO books (id, library_id, kind, title, series, volume, page_count)
|
||||
SELECT * FROM UNNEST($1::uuid[], $2::uuid[], $3::text[], $4::text[], $5::text[], $6::int[], $7::int[])
|
||||
AS t(id, library_id, kind, title, series, volume, page_count)
|
||||
INSERT INTO books (id, library_id, kind, title, series, volume, page_count, thumbnail_path)
|
||||
SELECT * FROM UNNEST($1::uuid[], $2::uuid[], $3::text[], $4::text[], $5::text[], $6::int[], $7::int[], $8::text[])
|
||||
AS t(id, library_id, kind, title, series, volume, page_count, thumbnail_path)
|
||||
"#
|
||||
)
|
||||
.bind(&book_ids)
|
||||
@@ -682,6 +746,7 @@ async fn flush_all_batches(
|
||||
.bind(&series)
|
||||
.bind(&volumes)
|
||||
.bind(&page_counts)
|
||||
.bind(&thumbnail_paths)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
@@ -948,13 +1013,14 @@ async fn scan_library(
|
||||
continue;
|
||||
}
|
||||
|
||||
// New file
|
||||
// New file (thumbnails generated by API after job handoff)
|
||||
info!("[PROCESS] Inserting new file: {}", file_name);
|
||||
let book_id = Uuid::new_v4();
|
||||
|
||||
match parse_metadata(path, format, root) {
|
||||
Ok(parsed) => {
|
||||
let book_id = Uuid::new_v4();
|
||||
let file_id = Uuid::new_v4();
|
||||
|
||||
|
||||
books_to_insert.push(BookInsert {
|
||||
book_id,
|
||||
library_id,
|
||||
@@ -963,6 +1029,7 @@ async fn scan_library(
|
||||
series: parsed.series,
|
||||
volume: parsed.volume,
|
||||
page_count: parsed.page_count,
|
||||
thumbnail_path: None,
|
||||
});
|
||||
|
||||
files_to_insert.push(FileInsert {
|
||||
@@ -993,6 +1060,7 @@ async fn scan_library(
|
||||
series: None,
|
||||
volume: None,
|
||||
page_count: None,
|
||||
thumbnail_path: None,
|
||||
});
|
||||
|
||||
files_to_insert.push(FileInsert {
|
||||
|
||||
@@ -12,10 +12,12 @@ pub struct ApiConfig {
|
||||
impl ApiConfig {
|
||||
pub fn from_env() -> Result<Self> {
|
||||
Ok(Self {
|
||||
listen_addr: std::env::var("API_LISTEN_ADDR").unwrap_or_else(|_| "0.0.0.0:8080".to_string()),
|
||||
listen_addr: std::env::var("API_LISTEN_ADDR")
|
||||
.unwrap_or_else(|_| "0.0.0.0:8080".to_string()),
|
||||
database_url: std::env::var("DATABASE_URL").context("DATABASE_URL is required")?,
|
||||
meili_url: std::env::var("MEILI_URL").context("MEILI_URL is required")?,
|
||||
meili_master_key: std::env::var("MEILI_MASTER_KEY").context("MEILI_MASTER_KEY is required")?,
|
||||
meili_master_key: std::env::var("MEILI_MASTER_KEY")
|
||||
.context("MEILI_MASTER_KEY is required")?,
|
||||
api_bootstrap_token: std::env::var("API_BOOTSTRAP_TOKEN")
|
||||
.context("API_BOOTSTRAP_TOKEN is required")?,
|
||||
})
|
||||
@@ -29,20 +31,76 @@ pub struct IndexerConfig {
|
||||
pub meili_url: String,
|
||||
pub meili_master_key: String,
|
||||
pub scan_interval_seconds: u64,
|
||||
pub thumbnail_config: ThumbnailConfig,
|
||||
/// API base URL for thumbnail checkup at end of build (e.g. http://api:8080)
|
||||
pub api_base_url: String,
|
||||
/// Token to call API (e.g. API_BOOTSTRAP_TOKEN)
|
||||
pub api_bootstrap_token: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ThumbnailConfig {
|
||||
pub enabled: bool,
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub quality: u8,
|
||||
pub format: String,
|
||||
pub directory: String,
|
||||
}
|
||||
|
||||
impl Default for ThumbnailConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: true,
|
||||
width: 300,
|
||||
height: 400,
|
||||
quality: 80,
|
||||
format: "webp".to_string(),
|
||||
directory: "/data/thumbnails".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexerConfig {
|
||||
pub fn from_env() -> Result<Self> {
|
||||
let thumbnail_config = ThumbnailConfig {
|
||||
enabled: std::env::var("THUMBNAIL_ENABLED")
|
||||
.ok()
|
||||
.and_then(|v| v.parse::<bool>().ok())
|
||||
.unwrap_or(true),
|
||||
width: std::env::var("THUMBNAIL_WIDTH")
|
||||
.ok()
|
||||
.and_then(|v| v.parse::<u32>().ok())
|
||||
.unwrap_or(300),
|
||||
height: std::env::var("THUMBNAIL_HEIGHT")
|
||||
.ok()
|
||||
.and_then(|v| v.parse::<u32>().ok())
|
||||
.unwrap_or(400),
|
||||
quality: std::env::var("THUMBNAIL_QUALITY")
|
||||
.ok()
|
||||
.and_then(|v| v.parse::<u8>().ok())
|
||||
.unwrap_or(80),
|
||||
format: std::env::var("THUMBNAIL_FORMAT").unwrap_or_else(|_| "webp".to_string()),
|
||||
directory: std::env::var("THUMBNAIL_DIRECTORY")
|
||||
.unwrap_or_else(|_| "/data/thumbnails".to_string()),
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
listen_addr: std::env::var("INDEXER_LISTEN_ADDR")
|
||||
.unwrap_or_else(|_| "0.0.0.0:8081".to_string()),
|
||||
database_url: std::env::var("DATABASE_URL").context("DATABASE_URL is required")?,
|
||||
meili_url: std::env::var("MEILI_URL").context("MEILI_URL is required")?,
|
||||
meili_master_key: std::env::var("MEILI_MASTER_KEY").context("MEILI_MASTER_KEY is required")?,
|
||||
meili_master_key: std::env::var("MEILI_MASTER_KEY")
|
||||
.context("MEILI_MASTER_KEY is required")?,
|
||||
scan_interval_seconds: std::env::var("INDEXER_SCAN_INTERVAL_SECONDS")
|
||||
.ok()
|
||||
.and_then(|v| v.parse::<u64>().ok())
|
||||
.unwrap_or(5),
|
||||
thumbnail_config,
|
||||
api_base_url: std::env::var("API_BASE_URL")
|
||||
.unwrap_or_else(|_| "http://api:8080".to_string()),
|
||||
api_bootstrap_token: std::env::var("API_BOOTSTRAP_TOKEN")
|
||||
.context("API_BOOTSTRAP_TOKEN is required for thumbnail checkup")?,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -59,8 +117,10 @@ impl AdminUiConfig {
|
||||
Ok(Self {
|
||||
listen_addr: std::env::var("ADMIN_UI_LISTEN_ADDR")
|
||||
.unwrap_or_else(|_| "0.0.0.0:8082".to_string()),
|
||||
api_base_url: std::env::var("API_BASE_URL").unwrap_or_else(|_| "http://api:8080".to_string()),
|
||||
api_token: std::env::var("API_BOOTSTRAP_TOKEN").context("API_BOOTSTRAP_TOKEN is required")?,
|
||||
api_base_url: std::env::var("API_BASE_URL")
|
||||
.unwrap_or_else(|_| "http://api:8080".to_string()),
|
||||
api_token: std::env::var("API_BOOTSTRAP_TOKEN")
|
||||
.context("API_BOOTSTRAP_TOKEN is required")?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,4 +8,6 @@ license.workspace = true
|
||||
anyhow.workspace = true
|
||||
lopdf = "0.35"
|
||||
regex = "1"
|
||||
uuid.workspace = true
|
||||
walkdir.workspace = true
|
||||
zip = { version = "2.2", default-features = false, features = ["deflate"] }
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
use anyhow::{Context, Result};
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
use uuid::Uuid;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum BookFormat {
|
||||
@@ -240,3 +244,105 @@ fn is_image_name(name: &str) -> bool {
|
||||
|| name.ends_with(".webp")
|
||||
|| name.ends_with(".avif")
|
||||
}
|
||||
|
||||
pub fn extract_first_page(path: &Path, format: BookFormat) -> Result<Vec<u8>> {
|
||||
match format {
|
||||
BookFormat::Cbz => extract_cbz_first_page(path),
|
||||
BookFormat::Cbr => extract_cbr_first_page(path),
|
||||
BookFormat::Pdf => extract_pdf_first_page(path),
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_cbz_first_page(path: &Path) -> Result<Vec<u8>> {
|
||||
let file = std::fs::File::open(path)
|
||||
.with_context(|| format!("cannot open cbz: {}", path.display()))?;
|
||||
let mut archive = zip::ZipArchive::new(file).context("invalid cbz archive")?;
|
||||
|
||||
let mut image_names: Vec<String> = Vec::new();
|
||||
for i in 0..archive.len() {
|
||||
let entry = archive.by_index(i).context("cannot read cbz entry")?;
|
||||
let name = entry.name().to_ascii_lowercase();
|
||||
if is_image_name(&name) {
|
||||
image_names.push(entry.name().to_string());
|
||||
}
|
||||
}
|
||||
image_names.sort();
|
||||
|
||||
let first_image = image_names.first().context("no images found in cbz")?;
|
||||
|
||||
let mut entry = archive
|
||||
.by_name(first_image)
|
||||
.context("cannot read first image")?;
|
||||
let mut buf = Vec::new();
|
||||
entry.read_to_end(&mut buf)?;
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
fn extract_cbr_first_page(path: &Path) -> Result<Vec<u8>> {
|
||||
let tmp_dir = std::env::temp_dir().join(format!("stripstream-cbr-thumb-{}", Uuid::new_v4()));
|
||||
std::fs::create_dir_all(&tmp_dir).context("cannot create temp dir")?;
|
||||
|
||||
// Use env command like the API does
|
||||
let output = std::process::Command::new("env")
|
||||
.args(["LC_ALL=en_US.UTF-8", "LANG=en_US.UTF-8", "unar", "-o"])
|
||||
.arg(&tmp_dir)
|
||||
.arg(path)
|
||||
.output()
|
||||
.context("unar failed")?;
|
||||
|
||||
if !output.status.success() {
|
||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
||||
return Err(anyhow::anyhow!(
|
||||
"unar extract failed: {:?}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
// Use WalkDir for recursive search (CBR can have subdirectories)
|
||||
let mut image_files: Vec<_> = WalkDir::new(&tmp_dir)
|
||||
.into_iter()
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| {
|
||||
let name = e.file_name().to_string_lossy().to_lowercase();
|
||||
is_image_name(&name)
|
||||
})
|
||||
.collect();
|
||||
|
||||
image_files.sort_by_key(|e| e.path().to_string_lossy().to_lowercase());
|
||||
|
||||
let first_image = image_files.first().context("no images found in cbr")?;
|
||||
|
||||
let data = std::fs::read(first_image.path())?;
|
||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
fn extract_pdf_first_page(path: &Path) -> Result<Vec<u8>> {
|
||||
let tmp_dir = std::env::temp_dir().join(format!("stripstream-pdf-thumb-{}", Uuid::new_v4()));
|
||||
std::fs::create_dir_all(&tmp_dir)?;
|
||||
let output_prefix = tmp_dir.join("page");
|
||||
|
||||
let output = Command::new("pdftoppm")
|
||||
.args([
|
||||
"-f",
|
||||
"1",
|
||||
"-singlefile",
|
||||
"-png",
|
||||
"-scale-to",
|
||||
"800",
|
||||
path.to_str().unwrap(),
|
||||
output_prefix.to_str().unwrap(),
|
||||
])
|
||||
.output()
|
||||
.context("pdftoppm failed")?;
|
||||
|
||||
if !output.status.success() {
|
||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
||||
return Err(anyhow::anyhow!("pdftoppm failed"));
|
||||
}
|
||||
|
||||
let image_path = output_prefix.with_extension("png");
|
||||
let data = std::fs::read(&image_path)?;
|
||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
@@ -57,6 +57,7 @@ services:
|
||||
- "7080:8080"
|
||||
volumes:
|
||||
- ${LIBRARIES_HOST_PATH:-../libraries}:/libraries
|
||||
- ${THUMBNAILS_HOST_PATH:-../data/thumbnails}:/data/thumbnails
|
||||
depends_on:
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
@@ -80,6 +81,7 @@ services:
|
||||
- "7081:8081"
|
||||
volumes:
|
||||
- ${LIBRARIES_HOST_PATH:-../libraries}:/libraries
|
||||
- ${THUMBNAILS_HOST_PATH:-../data/thumbnails}:/data/thumbnails
|
||||
depends_on:
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
|
||||
5
infra/migrations/0009_add_thumbnails.sql
Normal file
5
infra/migrations/0009_add_thumbnails.sql
Normal file
@@ -0,0 +1,5 @@
|
||||
ALTER TABLE books ADD COLUMN IF NOT EXISTS thumbnail_path TEXT;
|
||||
|
||||
INSERT INTO app_settings (key, value) VALUES
|
||||
('thumbnail', '{"enabled": true, "width": 300, "height": 400, "quality": 80, "format": "webp", "directory": "/data/thumbnails"}')
|
||||
ON CONFLICT (key) DO UPDATE SET value = '{"enabled": true, "width": 300, "height": 400, "quality": 80, "format": "webp", "directory": "/data/thumbnails"}'::jsonb;
|
||||
6
infra/migrations/0010_index_job_thumbnails_phase.sql
Normal file
6
infra/migrations/0010_index_job_thumbnails_phase.sql
Normal file
@@ -0,0 +1,6 @@
|
||||
-- Migration: Add status 'generating_thumbnails' for thumbnail phase after indexing
|
||||
|
||||
ALTER TABLE index_jobs
|
||||
DROP CONSTRAINT IF EXISTS index_jobs_status_check,
|
||||
ADD CONSTRAINT index_jobs_status_check
|
||||
CHECK (status IN ('pending', 'running', 'generating_thumbnails', 'success', 'failed'));
|
||||
6
infra/migrations/0011_thumbnail_rebuild_type.sql
Normal file
6
infra/migrations/0011_thumbnail_rebuild_type.sql
Normal file
@@ -0,0 +1,6 @@
|
||||
-- Migration: Add job type 'thumbnail_rebuild' for manual thumbnail generation
|
||||
|
||||
ALTER TABLE index_jobs
|
||||
DROP CONSTRAINT IF EXISTS index_jobs_type_check,
|
||||
ADD CONSTRAINT index_jobs_type_check
|
||||
CHECK (type IN ('scan', 'rebuild', 'full_rebuild', 'thumbnail_rebuild', 'thumbnail_regenerate'));
|
||||
Reference in New Issue
Block a user