feat: thumbnails : part1
This commit is contained in:
@@ -48,6 +48,10 @@ LIBRARIES_ROOT_PATH=/libraries
|
||||
# You can change this to an absolute path on your machine
|
||||
LIBRARIES_HOST_PATH=../libraries
|
||||
|
||||
# Path to thumbnails directory on host machine (for Docker volume mount)
|
||||
# Default: ../data/thumbnails (relative to infra/docker-compose.yml)
|
||||
THUMBNAILS_HOST_PATH=../data/thumbnails
|
||||
|
||||
# =============================================================================
|
||||
# Port Configuration
|
||||
# =============================================================================
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -5,3 +5,4 @@ tmp/
|
||||
libraries/
|
||||
node_modules/
|
||||
.next/
|
||||
data/thumbnails
|
||||
|
||||
301
AGENTS.md
Normal file
301
AGENTS.md
Normal file
@@ -0,0 +1,301 @@
|
||||
# AGENTS.md - Agent Coding Guidelines for Stripstream Librarian
|
||||
|
||||
This file provides guidelines for agentic coding agents operating in this repository.
|
||||
|
||||
---
|
||||
|
||||
## 1. Build, Lint, and Test Commands
|
||||
|
||||
### Build Commands
|
||||
|
||||
```bash
|
||||
# Build debug version (fastest for development)
|
||||
cargo build
|
||||
|
||||
# Build release version (optimized)
|
||||
cargo build --release
|
||||
|
||||
# Build specific crate
|
||||
cargo build -p api
|
||||
cargo build -p indexer
|
||||
|
||||
# Watch mode for development (requires cargo-watch)
|
||||
cargo watch -x build
|
||||
```
|
||||
|
||||
### Lint & Format Commands
|
||||
|
||||
```bash
|
||||
# Run clippy lints
|
||||
cargo clippy
|
||||
|
||||
# Fix auto-fixable clippy warnings
|
||||
cargo clippy --fix
|
||||
|
||||
# Format code
|
||||
cargo fmt
|
||||
|
||||
# Check formatting without making changes
|
||||
cargo fmt -- --check
|
||||
```
|
||||
|
||||
### Test Commands
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
cargo test
|
||||
|
||||
# Run tests for specific crate
|
||||
cargo test -p api
|
||||
cargo test -p indexer
|
||||
cargo test -p parsers
|
||||
|
||||
# Run a single test by name
|
||||
cargo test test_name_here
|
||||
|
||||
# Run tests with output display
|
||||
cargo test -- --nocapture
|
||||
|
||||
# Run doc tests
|
||||
cargo test --doc
|
||||
```
|
||||
|
||||
### Database Migrations
|
||||
|
||||
```bash
|
||||
# Run migrations manually (via sqlx CLI)
|
||||
# Ensure DATABASE_URL is set, then:
|
||||
sqlx migrate run
|
||||
|
||||
# Create new migration
|
||||
sqlx migrate add -r migration_name
|
||||
```
|
||||
|
||||
### Docker Development
|
||||
|
||||
```bash
|
||||
# Start infrastructure only
|
||||
cd infra && docker compose up -d postgres meilisearch
|
||||
|
||||
# Start full stack
|
||||
cd infra && docker compose up -d
|
||||
|
||||
# View logs
|
||||
docker compose logs -f api
|
||||
docker compose logs -f indexer
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. Code Style Guidelines
|
||||
|
||||
### General Principles
|
||||
|
||||
- **Conciseness**: Keep responses short and direct. Avoid unnecessary preamble or explanation.
|
||||
- **Idiomatic Rust**: Follow Rust best practices and ecosystem conventions.
|
||||
- **Error Handling**: Use `anyhow::Result<T>` for application code, `std::io::Result<T>` for simple file operations.
|
||||
- **Async**: Use `tokio` for async runtime. Prefer `#[tokio::main]` over manual runtime.
|
||||
|
||||
### Naming Conventions
|
||||
|
||||
| Element | Convention | Example |
|
||||
|---------|------------|---------|
|
||||
| Variables | snake_case | `let book_id = ...` |
|
||||
| Functions | snake_case | `fn get_book(...)` |
|
||||
| Structs/Enums | PascalCase | `struct BookItem` |
|
||||
| Modules | snake_case | `mod books;` |
|
||||
| Constants | SCREAMING_SNAKE_CASE | `const BATCH_SIZE: usize = 100;` |
|
||||
| Types | PascalCase | `type MyResult<T> = Result<T, Error>;` |
|
||||
|
||||
### Imports
|
||||
|
||||
- **Absolute imports** for workspace crates: `use parsers::{detect_format, parse_metadata};`
|
||||
- **Standard library** imports: `use std::path::Path;`
|
||||
- **External crates**: `use sqlx::{postgres::PgPoolOptions, Row};`
|
||||
- **Group by**: std → external → workspace → local (with blank lines between)
|
||||
|
||||
```rust
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Context;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Row;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::error::ApiError;
|
||||
use crate::AppState;
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
- Use `anyhow` for application-level error handling with context
|
||||
- Use `with_context()` for adding context to errors
|
||||
- Return `Result<T, ApiError>` in API handlers
|
||||
- Use `?` operator instead of manual match/unwrap where possible
|
||||
|
||||
```rust
|
||||
// Good
|
||||
fn process_book(path: &Path) -> anyhow::Result<Book> {
|
||||
let file = std::fs::File::open(path)
|
||||
.with_context(|| format!("cannot open file: {}", path.display()))?;
|
||||
// ...
|
||||
}
|
||||
|
||||
// Good - API error handling
|
||||
async fn get_book(State(state): State<AppState>, Path(id): Path<Uuid>)
|
||||
-> Result<Json<Book>, ApiError> {
|
||||
let row = sqlx::query("SELECT * FROM books WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await
|
||||
.map_err(ApiError::internal)?;
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
### Database (sqlx)
|
||||
|
||||
- Use **raw SQL queries** with `sqlx::query()` and `sqlx::query_scalar()`
|
||||
- Prefer **batch operations** using `UNNEST` for bulk inserts/updates
|
||||
- Always use **parameterized queries** (`$1`, `$2`, etc.) - never string interpolation
|
||||
- Follow existing patterns for transactions:
|
||||
|
||||
```rust
|
||||
let mut tx = pool.begin().await?;
|
||||
// ... queries ...
|
||||
tx.commit().await?;
|
||||
```
|
||||
|
||||
### Async/Tokio
|
||||
|
||||
- Use `tokio::spawn` for background tasks
|
||||
- Use `spawn_blocking` for CPU-bound work (image processing, file I/O)
|
||||
- Keep async handlers non-blocking
|
||||
- Use `tokio::time::timeout` for operations with timeouts
|
||||
|
||||
```rust
|
||||
let bytes = tokio::time::timeout(
|
||||
Duration::from_secs(60),
|
||||
tokio::task::spawn_blocking(move || {
|
||||
render_page(&abs_path_clone, n)
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.map_err(|_| ApiError::internal("timeout"))?
|
||||
.map_err(ApiError::internal)?;
|
||||
```
|
||||
|
||||
### Structs and Serialization
|
||||
|
||||
- Use `#[derive(Serialize, Deserialize, ToSchema)]` for API types
|
||||
- Add `utoipa` schemas for OpenAPI documentation
|
||||
- Use `Option<T>` for nullable fields
|
||||
- Document public structs briefly
|
||||
|
||||
```rust
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct BookItem {
|
||||
#[schema(value_type = String)]
|
||||
pub id: Uuid,
|
||||
pub title: String,
|
||||
pub author: Option<String>,
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
### Performance Considerations
|
||||
|
||||
- Use **batch operations** for database inserts/updates (100 items recommended)
|
||||
- Use **parallel iterators** (`rayon::par_iter()`) for CPU-intensive scans
|
||||
- Implement **caching** for expensive operations (see `pages.rs` for disk/memory cache examples)
|
||||
- Use **streaming** for large data where applicable
|
||||
|
||||
### Testing
|
||||
|
||||
- Currently there are no test files - consider adding unit tests for:
|
||||
- Parser functions
|
||||
- Thumbnail generation
|
||||
- Configuration parsing
|
||||
- Use `#[cfg(test)]` modules for integration tests
|
||||
|
||||
---
|
||||
|
||||
## 3. Project Structure
|
||||
|
||||
```
|
||||
stripstream-librarian/
|
||||
├── apps/
|
||||
│ ├── api/ # REST API (axum)
|
||||
│ │ └── src/
|
||||
│ │ ├── main.rs
|
||||
│ │ ├── books.rs
|
||||
│ │ ├── pages.rs
|
||||
│ │ └── ...
|
||||
│ ├── indexer/ # Background indexing service
|
||||
│ │ └── src/
|
||||
│ │ └── main.rs
|
||||
│ └── backoffice/ # Next.js admin UI
|
||||
├── crates/
|
||||
│ ├── core/ # Shared config
|
||||
│ │ └── src/config.rs
|
||||
│ └── parsers/ # Book parsing (CBZ, CBR, PDF)
|
||||
├── infra/
|
||||
│ ├── migrations/ # SQL migrations
|
||||
│ └── docker-compose.yml
|
||||
└── libraries/ # Book storage (mounted volume)
|
||||
```
|
||||
|
||||
### Key Files
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `apps/api/src/books.rs` | Book CRUD endpoints |
|
||||
| `apps/api/src/pages.rs` | Page rendering & caching |
|
||||
| `apps/indexer/src/main.rs` | Indexing logic, batch processing |
|
||||
| `crates/parsers/src/lib.rs` | Format detection, metadata parsing |
|
||||
| `crates/core/src/config.rs` | Configuration from environment |
|
||||
| `infra/migrations/*.sql` | Database schema |
|
||||
|
||||
---
|
||||
|
||||
## 4. Common Patterns
|
||||
|
||||
### Configuration from Environment
|
||||
|
||||
```rust
|
||||
// In crates/core/src/config.rs
|
||||
impl IndexerConfig {
|
||||
pub fn from_env() -> Result<Self> {
|
||||
Ok(Self {
|
||||
listen_addr: std::env::var("INDEXER_LISTEN_ADDR")
|
||||
.unwrap_or_else(|_| "0.0.0.0:8081".to_string()),
|
||||
database_url: std::env::var("DATABASE_URL")
|
||||
.context("DATABASE_URL is required")?,
|
||||
// ...
|
||||
})
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Path Remapping
|
||||
|
||||
```rust
|
||||
fn remap_libraries_path(path: &str) -> String {
|
||||
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
||||
if path.starts_with("/libraries/") {
|
||||
return path.replacen("/libraries", &root, 1);
|
||||
}
|
||||
}
|
||||
path.to_string()
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 5. Important Notes
|
||||
|
||||
- **Workspace**: This is a Cargo workspace. Always specify the package when building specific apps.
|
||||
- **Dependencies**: External crates are defined in workspace `Cargo.toml`, not individual `Cargo.toml`.
|
||||
- **Database**: PostgreSQL is required. Run migrations before starting services.
|
||||
- **External Tools**: The indexer relies on `unar` (for CBR) and `pdftoppm` (for PDF) being installed on the system.
|
||||
5
Cargo.lock
generated
5
Cargo.lock
generated
@@ -1146,9 +1146,11 @@ dependencies = [
|
||||
"anyhow",
|
||||
"axum",
|
||||
"chrono",
|
||||
"image",
|
||||
"notify",
|
||||
"parsers",
|
||||
"rand 0.8.5",
|
||||
"rayon",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -1160,6 +1162,7 @@ dependencies = [
|
||||
"tracing-subscriber",
|
||||
"uuid",
|
||||
"walkdir",
|
||||
"webp",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1624,6 +1627,8 @@ dependencies = [
|
||||
"anyhow",
|
||||
"lopdf",
|
||||
"regex",
|
||||
"uuid",
|
||||
"walkdir",
|
||||
"zip 2.4.2",
|
||||
]
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ base64 = "0.22"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
image = { version = "0.25", default-features = false, features = ["jpeg", "png", "webp"] }
|
||||
lru = "0.12"
|
||||
rayon = "1.10"
|
||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
||||
rand = "0.8"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
135
PLAN_THUMBNAILS.md
Normal file
135
PLAN_THUMBNAILS.md
Normal file
@@ -0,0 +1,135 @@
|
||||
# Plan: Génération des vignettes à l'index
|
||||
|
||||
## 1. Base de données
|
||||
|
||||
### Migration SQL (`0010_add_thumbnails.sql`)
|
||||
- [x] Ajouter `thumbnail_path TEXT` à la table `books` (nullable)
|
||||
- [x] Ajouter settings pour thumbnails dans `app_settings`:
|
||||
```json
|
||||
{
|
||||
"thumbnail": {
|
||||
"enabled": true,
|
||||
"width": 300,
|
||||
"height": 400,
|
||||
"quality": 80,
|
||||
"format": "webp"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. Configuration
|
||||
|
||||
### `crates/core/src/config.rs`
|
||||
- [x] Ajouter `ThumbnailConfig` struct
|
||||
- [x] Ajouter champs dans `IndexerConfig`:
|
||||
- `thumbnail_width: u32` (défaut: 300)
|
||||
- `thumbnail_height: u32` (défaut: 400)
|
||||
- `thumbnail_quality: u8` (défaut: 80)
|
||||
- `thumbnail_dir: String` (défaut: `/data/thumbnails`)
|
||||
- [x] Ajouter getter depuis env vars
|
||||
|
||||
---
|
||||
|
||||
## 3. Indexer - Extraction de la 1ère page
|
||||
|
||||
### Fonction à créer dans `crates/parsers/src/lib.rs`
|
||||
- [x] `extract_first_page(path: &Path, format: BookFormat) -> Result<Vec<u8>>`
|
||||
- Réutiliser logique de `pages.rs:extract_cbz_page`
|
||||
- Réutiliser logique de `pages.rs:extract_cbr_page`
|
||||
- Réutiliser logique de `pages.rs:render_pdf_page`
|
||||
|
||||
### Fonction de génération vignette dans `apps/indexer/src/main.rs`
|
||||
- [x] `generate_thumbnail(image_bytes: &[u8], config: &ThumbnailConfig) -> Result<Vec<u8>>`
|
||||
- Load image avec `image::load_from_memory`
|
||||
- Resize avec `image::resize` (ratio kept)
|
||||
- Encode en WebP avec `webp::Encoder`
|
||||
|
||||
- [x] `save_thumbnail(book_id: Uuid, thumbnail_bytes: &[u8], config: &ThumbnailConfig) -> Result<String>`
|
||||
|
||||
### Intégration dans `scan_library`
|
||||
- [x] Après parsing metadata, extraire 1ère page
|
||||
- [x] Générer vignette et sauvegarder
|
||||
- [x] Stocker chemin en DB (via batch insert)
|
||||
|
||||
---
|
||||
|
||||
## 4. Indexer - WalkDir parallèle
|
||||
|
||||
### Remplacement de `WalkDir` séquentiel
|
||||
- [x] Utiliser `rayon` pour paralléliser le scan:
|
||||
```rust
|
||||
let total_files: usize = library_paths.par_iter()
|
||||
.map(|root_path| { ... })
|
||||
.sum();
|
||||
```
|
||||
- [x] Ajouter `rayon = "1.10"` dans workspace dependencies
|
||||
|
||||
---
|
||||
|
||||
## 5. API - Service des vignettes
|
||||
|
||||
### Mise à jour models dans `apps/api/src/books.rs`
|
||||
- [x] Ajouter `thumbnail_url: Option<String>` à `BookItem`
|
||||
- [x] Ajouter `thumbnail_url: Option<String>` à `BookDetails`
|
||||
- [x] Mise à jour des requêtes SQL pour récupérer `thumbnail_path`
|
||||
|
||||
### Nouvelle route dans `apps/api/src/main.rs`
|
||||
- [x] Route `/books/:id/thumbnail` (GET)
|
||||
- Retourne fichier statique depuis `thumbnail_path`
|
||||
- Content-Type: image/webp
|
||||
- Cache-Control: public, max-age=31536000
|
||||
|
||||
### Suppression cache 1ère page (optionnel)
|
||||
- [ ] Optionnel: simplifier `pages.rs` car thumbnail pré-générée
|
||||
- [ ] Garder render pour pages > 1
|
||||
|
||||
### Adapter backoffice
|
||||
|
||||
La recupération des thumbnail est fait par une route page/1.
|
||||
- [x] Passer par la nouvelle route avec une route clean /thumbnail pour chaque cover.
|
||||
|
||||
### refacto code entre api et indexer
|
||||
|
||||
En fait l'indexer pourrait appeler l'api pour qu'il fasse les vignettes et c'est l'api qui est responsable des images et des lectures ebooks. Je préfère que chaque domaine soit bien respecté. A la fin d'une build, on appelle l'api pour faire le checkup des thumbnails.
|
||||
Il faudra que coté backoffice on voit partout ou on peut voir le traitement live des jobs, une phase ou on voit en sse le traitement des thumbnails. Coté api, si on a pas de thumbnail on passe par le code actuel de pages.
|
||||
|
||||
---
|
||||
|
||||
## 6. Settings API
|
||||
|
||||
### Endpoint settings existant
|
||||
- [ ] Vérifier que `/settings` expose thumbnail config
|
||||
- [ ] Ajouter endpoint PUT pour mettre à jour thumbnail settings
|
||||
|
||||
---
|
||||
|
||||
## 7. Taches diverses
|
||||
|
||||
- [x] Ajouter dependency `image` et `webp` dans indexer `Cargo.toml`
|
||||
- [x] Build release vérifié
|
||||
|
||||
---
|
||||
|
||||
## Ordre d'implémentation suggéré
|
||||
|
||||
1. [x] Migration DB + settings
|
||||
2. [x] Config + parsers (extract first page)
|
||||
3. [x] Indexer thumbnail generation + save to disk
|
||||
4. [x] API serve thumbnail
|
||||
5. [x] Parallel walkdir
|
||||
6. [ ] Tests & polish (à faire)
|
||||
|
||||
---
|
||||
|
||||
## Post-déploiement
|
||||
|
||||
- [ ] Appliquer migration SQL: `psql -f infra/migrations/0009_add_thumbnails.sql`
|
||||
- [ ] Créer dossier thumbnails: `mkdir -p /data/thumbnails`
|
||||
- [ ] Configurer env vars si besoin:
|
||||
- `THUMBNAIL_ENABLED=true`
|
||||
- `THUMBNAIL_WIDTH=300`
|
||||
- `THUMBNAIL_HEIGHT=400`
|
||||
- `THUMBNAIL_QUALITY=80`
|
||||
- `THUMBNAIL_DIRECTORY=/data/thumbnails`
|
||||
@@ -34,6 +34,7 @@ pub struct BookItem {
|
||||
pub volume: Option<i32>,
|
||||
pub language: Option<String>,
|
||||
pub page_count: Option<i32>,
|
||||
pub thumbnail_url: Option<String>,
|
||||
#[schema(value_type = String)]
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
@@ -58,6 +59,7 @@ pub struct BookDetails {
|
||||
pub volume: Option<i32>,
|
||||
pub language: Option<String>,
|
||||
pub page_count: Option<i32>,
|
||||
pub thumbnail_url: Option<String>,
|
||||
pub file_path: Option<String>,
|
||||
pub file_format: Option<String>,
|
||||
pub file_parse_status: Option<String>,
|
||||
@@ -96,7 +98,7 @@ pub async fn list_books(
|
||||
|
||||
let sql = format!(
|
||||
r#"
|
||||
SELECT id, library_id, kind, title, author, series, volume, language, page_count, updated_at
|
||||
SELECT id, library_id, kind, title, author, series, volume, language, page_count, thumbnail_path, updated_at
|
||||
FROM books
|
||||
WHERE ($1::uuid IS NULL OR library_id = $1)
|
||||
AND ($2::text IS NULL OR kind = $2)
|
||||
@@ -135,17 +137,21 @@ pub async fn list_books(
|
||||
let mut items: Vec<BookItem> = rows
|
||||
.iter()
|
||||
.take(limit as usize)
|
||||
.map(|row| BookItem {
|
||||
id: row.get("id"),
|
||||
library_id: row.get("library_id"),
|
||||
kind: row.get("kind"),
|
||||
title: row.get("title"),
|
||||
author: row.get("author"),
|
||||
series: row.get("series"),
|
||||
volume: row.get("volume"),
|
||||
language: row.get("language"),
|
||||
page_count: row.get("page_count"),
|
||||
updated_at: row.get("updated_at"),
|
||||
.map(|row| {
|
||||
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||
BookItem {
|
||||
id: row.get("id"),
|
||||
library_id: row.get("library_id"),
|
||||
kind: row.get("kind"),
|
||||
title: row.get("title"),
|
||||
author: row.get("author"),
|
||||
series: row.get("series"),
|
||||
volume: row.get("volume"),
|
||||
language: row.get("language"),
|
||||
page_count: row.get("page_count"),
|
||||
thumbnail_url: thumbnail_path.map(|_p| format!("/books/{}/thumbnail", row.get::<Uuid, _>("id"))),
|
||||
updated_at: row.get("updated_at"),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -182,7 +188,7 @@ pub async fn get_book(
|
||||
) -> Result<Json<BookDetails>, ApiError> {
|
||||
let row = sqlx::query(
|
||||
r#"
|
||||
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.series, b.volume, b.language, b.page_count,
|
||||
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.series, b.volume, b.language, b.page_count, b.thumbnail_path,
|
||||
bf.abs_path, bf.format, bf.parse_status
|
||||
FROM books b
|
||||
LEFT JOIN LATERAL (
|
||||
@@ -200,6 +206,7 @@ pub async fn get_book(
|
||||
.await?;
|
||||
|
||||
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||
Ok(Json(BookDetails {
|
||||
id: row.get("id"),
|
||||
library_id: row.get("library_id"),
|
||||
@@ -210,6 +217,7 @@ pub async fn get_book(
|
||||
volume: row.get("volume"),
|
||||
language: row.get("language"),
|
||||
page_count: row.get("page_count"),
|
||||
thumbnail_url: thumbnail_path.map(|_| format!("/books/{}/thumbnail", id)),
|
||||
file_path: row.get("abs_path"),
|
||||
file_format: row.get("format"),
|
||||
file_parse_status: row.get("parse_status"),
|
||||
@@ -332,3 +340,36 @@ pub async fn list_series(
|
||||
next_cursor,
|
||||
}))
|
||||
}
|
||||
|
||||
use axum::{
|
||||
body::Body,
|
||||
http::{header, HeaderMap, HeaderValue, StatusCode},
|
||||
response::IntoResponse,
|
||||
};
|
||||
|
||||
pub async fn get_thumbnail(
|
||||
State(state): State<AppState>,
|
||||
Path(book_id): Path<Uuid>,
|
||||
) -> Result<impl IntoResponse, ApiError> {
|
||||
let row = sqlx::query(
|
||||
"SELECT thumbnail_path FROM books WHERE id = $1"
|
||||
)
|
||||
.bind(book_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(e.to_string()))?;
|
||||
|
||||
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||
|
||||
let path = thumbnail_path.ok_or_else(|| ApiError::not_found("thumbnail not found"))?;
|
||||
|
||||
let data = std::fs::read(&path)
|
||||
.map_err(|e| ApiError::internal(format!("cannot read thumbnail: {}", e)))?;
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(header::CONTENT_TYPE, HeaderValue::from_static("image/webp"));
|
||||
headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("public, max-age=31536000, immutable"));
|
||||
|
||||
Ok((StatusCode::OK, headers, Body::from(data)))
|
||||
}
|
||||
|
||||
@@ -117,6 +117,7 @@ async fn main() -> anyhow::Result<()> {
|
||||
let read_routes = Router::new()
|
||||
.route("/books", get(books::list_books))
|
||||
.route("/books/:id", get(books::get_book))
|
||||
.route("/books/:id/thumbnail", get(books::get_thumbnail))
|
||||
.route("/books/:id/pages/:n", get(pages::get_page))
|
||||
.route("/libraries/:library_id/series", get(books::list_series))
|
||||
.route("/search", get(search::search_books))
|
||||
|
||||
@@ -27,12 +27,20 @@ pub struct CacheStats {
|
||||
pub directory: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ThumbnailStats {
|
||||
pub total_size_mb: f64,
|
||||
pub file_count: u64,
|
||||
pub directory: String,
|
||||
}
|
||||
|
||||
pub fn settings_routes() -> Router<AppState> {
|
||||
Router::new()
|
||||
.route("/settings", get(get_settings))
|
||||
.route("/settings/:key", get(get_setting).post(update_setting))
|
||||
.route("/settings/cache/clear", post(clear_cache))
|
||||
.route("/settings/cache/stats", get(get_cache_stats))
|
||||
.route("/settings/thumbnail/stats", get(get_thumbnail_stats))
|
||||
}
|
||||
|
||||
async fn get_settings(State(state): State<AppState>) -> Result<Json<Value>, ApiError> {
|
||||
@@ -171,3 +179,72 @@ async fn get_cache_stats(State(_state): State<AppState>) -> Result<Json<CacheSta
|
||||
|
||||
Ok(Json(stats))
|
||||
}
|
||||
|
||||
fn compute_dir_stats(path: &std::path::Path) -> (u64, u64) {
|
||||
let mut total_size: u64 = 0;
|
||||
let mut file_count: u64 = 0;
|
||||
|
||||
fn visit_dirs(
|
||||
dir: &std::path::Path,
|
||||
total_size: &mut u64,
|
||||
file_count: &mut u64,
|
||||
) -> std::io::Result<()> {
|
||||
if dir.is_dir() {
|
||||
for entry in std::fs::read_dir(dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
visit_dirs(&path, total_size, file_count)?;
|
||||
} else {
|
||||
*total_size += entry.metadata()?.len();
|
||||
*file_count += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
let _ = visit_dirs(path, &mut total_size, &mut file_count);
|
||||
(total_size, file_count)
|
||||
}
|
||||
|
||||
async fn get_thumbnail_stats(State(_state): State<AppState>) -> Result<Json<ThumbnailStats>, ApiError> {
|
||||
let settings = sqlx::query(r#"SELECT value FROM app_settings WHERE key = 'thumbnail'"#)
|
||||
.fetch_optional(&_state.pool)
|
||||
.await?;
|
||||
|
||||
let directory = match settings {
|
||||
Some(row) => {
|
||||
let value: serde_json::Value = row.get("value");
|
||||
value.get("directory")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or("/data/thumbnails")
|
||||
.to_string()
|
||||
}
|
||||
None => "/data/thumbnails".to_string(),
|
||||
};
|
||||
|
||||
let directory_clone = directory.clone();
|
||||
let stats = tokio::task::spawn_blocking(move || {
|
||||
let path = std::path::Path::new(&directory_clone);
|
||||
if !path.exists() {
|
||||
return ThumbnailStats {
|
||||
total_size_mb: 0.0,
|
||||
file_count: 0,
|
||||
directory: directory_clone,
|
||||
};
|
||||
}
|
||||
|
||||
let (total_size, file_count) = compute_dir_stats(path);
|
||||
|
||||
ThumbnailStats {
|
||||
total_size_mb: total_size as f64 / 1024.0 / 1024.0,
|
||||
file_count,
|
||||
directory: directory_clone,
|
||||
}
|
||||
})
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(format!("thumbnail stats failed: {}", e)))?;
|
||||
|
||||
Ok(Json(stats))
|
||||
}
|
||||
|
||||
43
apps/backoffice/app/api/books/[bookId]/thumbnail/route.ts
Normal file
43
apps/backoffice/app/api/books/[bookId]/thumbnail/route.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ bookId: string }> }
|
||||
) {
|
||||
const { bookId } = await params;
|
||||
|
||||
const apiBaseUrl = process.env.API_BASE_URL || "http://api:8080";
|
||||
const apiUrl = `${apiBaseUrl}/books/${bookId}/thumbnail`;
|
||||
|
||||
const token = process.env.API_BOOTSTRAP_TOKEN;
|
||||
if (!token) {
|
||||
return new NextResponse("API token not configured", { status: 500 });
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(apiUrl, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return new NextResponse(`Failed to fetch thumbnail: ${response.status}`, {
|
||||
status: response.status
|
||||
});
|
||||
}
|
||||
|
||||
const contentType = response.headers.get("content-type") || "image/webp";
|
||||
const imageBuffer = await response.arrayBuffer();
|
||||
|
||||
return new NextResponse(imageBuffer, {
|
||||
headers: {
|
||||
"Content-Type": contentType,
|
||||
"Cache-Control": "public, max-age=31536000, immutable",
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error fetching thumbnail:", error);
|
||||
return new NextResponse("Failed to fetch thumbnail", { status: 500 });
|
||||
}
|
||||
}
|
||||
@@ -38,7 +38,7 @@ function BookImage({ src, alt }: { src: string; alt: string }) {
|
||||
}
|
||||
|
||||
export function BookCard({ book }: BookCardProps) {
|
||||
const coverUrl = book.coverUrl || `/api/books/${book.id}/pages/1?format=webp&width=200`;
|
||||
const coverUrl = book.coverUrl || `/api/books/${book.id}/thumbnail`;
|
||||
|
||||
return (
|
||||
<Link
|
||||
|
||||
@@ -2,16 +2,21 @@
|
||||
|
||||
import { useState } from "react";
|
||||
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, FormSelect, FormRow, Icon } from "../components/ui";
|
||||
import { Settings, CacheStats, ClearCacheResponse } from "../../lib/api";
|
||||
import { Settings, CacheStats, ClearCacheResponse, ThumbnailStats } from "../../lib/api";
|
||||
|
||||
interface SettingsPageProps {
|
||||
initialSettings: Settings;
|
||||
initialCacheStats: CacheStats;
|
||||
initialThumbnailStats: ThumbnailStats;
|
||||
}
|
||||
|
||||
export default function SettingsPage({ initialSettings, initialCacheStats }: SettingsPageProps) {
|
||||
const [settings, setSettings] = useState<Settings>(initialSettings);
|
||||
export default function SettingsPage({ initialSettings, initialCacheStats, initialThumbnailStats }: SettingsPageProps) {
|
||||
const [settings, setSettings] = useState<Settings>({
|
||||
...initialSettings,
|
||||
thumbnail: initialSettings.thumbnail || { enabled: true, width: 300, height: 400, quality: 80, format: "webp", directory: "/data/thumbnails" }
|
||||
});
|
||||
const [cacheStats, setCacheStats] = useState<CacheStats>(initialCacheStats);
|
||||
const [thumbnailStats, setThumbnailStats] = useState<ThumbnailStats>(initialThumbnailStats);
|
||||
const [isClearing, setIsClearing] = useState(false);
|
||||
const [clearResult, setClearResult] = useState<ClearCacheResponse | null>(null);
|
||||
const [isSaving, setIsSaving] = useState(false);
|
||||
@@ -299,6 +304,131 @@ export default function SettingsPage({ initialSettings, initialCacheStats }: Set
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
{/* Thumbnail Settings */}
|
||||
<Card className="mb-6">
|
||||
<CardHeader>
|
||||
<CardTitle className="flex items-center gap-2">
|
||||
<Icon name="image" size="md" />
|
||||
Thumbnails
|
||||
</CardTitle>
|
||||
<CardDescription>Configure thumbnail generation during indexing</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<div className="space-y-4">
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">Enable Thumbnails</label>
|
||||
<FormSelect
|
||||
value={settings.thumbnail.enabled ? "true" : "false"}
|
||||
onChange={(e) => {
|
||||
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, enabled: e.target.value === "true" } };
|
||||
setSettings(newSettings);
|
||||
handleUpdateSetting("thumbnail", newSettings.thumbnail);
|
||||
}}
|
||||
>
|
||||
<option value="true">Enabled</option>
|
||||
<option value="false">Disabled</option>
|
||||
</FormSelect>
|
||||
</FormField>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">Output Format</label>
|
||||
<FormSelect
|
||||
value={settings.thumbnail.format}
|
||||
onChange={(e) => {
|
||||
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, format: e.target.value } };
|
||||
setSettings(newSettings);
|
||||
handleUpdateSetting("thumbnail", newSettings.thumbnail);
|
||||
}}
|
||||
>
|
||||
<option value="webp">WebP (Recommended)</option>
|
||||
<option value="jpeg">JPEG</option>
|
||||
<option value="png">PNG</option>
|
||||
</FormSelect>
|
||||
</FormField>
|
||||
</FormRow>
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">Width (px)</label>
|
||||
<FormInput
|
||||
type="number"
|
||||
min={50}
|
||||
max={600}
|
||||
value={settings.thumbnail.width}
|
||||
onChange={(e) => {
|
||||
const width = parseInt(e.target.value) || 300;
|
||||
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, width } };
|
||||
setSettings(newSettings);
|
||||
}}
|
||||
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
|
||||
/>
|
||||
</FormField>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">Height (px)</label>
|
||||
<FormInput
|
||||
type="number"
|
||||
min={50}
|
||||
max={800}
|
||||
value={settings.thumbnail.height}
|
||||
onChange={(e) => {
|
||||
const height = parseInt(e.target.value) || 400;
|
||||
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, height } };
|
||||
setSettings(newSettings);
|
||||
}}
|
||||
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
|
||||
/>
|
||||
</FormField>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">Quality (1-100)</label>
|
||||
<FormInput
|
||||
type="number"
|
||||
min={1}
|
||||
max={100}
|
||||
value={settings.thumbnail.quality}
|
||||
onChange={(e) => {
|
||||
const quality = parseInt(e.target.value) || 80;
|
||||
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, quality } };
|
||||
setSettings(newSettings);
|
||||
}}
|
||||
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
|
||||
/>
|
||||
</FormField>
|
||||
</FormRow>
|
||||
<FormRow>
|
||||
<FormField className="flex-1">
|
||||
<label className="text-sm font-medium text-muted-foreground mb-1 block">Thumbnail Directory</label>
|
||||
<FormInput
|
||||
value={settings.thumbnail.directory}
|
||||
onChange={(e) => {
|
||||
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, directory: e.target.value } };
|
||||
setSettings(newSettings);
|
||||
}}
|
||||
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
|
||||
/>
|
||||
</FormField>
|
||||
</FormRow>
|
||||
|
||||
<div className="grid grid-cols-3 gap-4 p-4 bg-muted/30 rounded-lg">
|
||||
<div>
|
||||
<p className="text-sm text-muted-foreground">Total Size</p>
|
||||
<p className="text-2xl font-semibold">{thumbnailStats.total_size_mb.toFixed(2)} MB</p>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-sm text-muted-foreground">Files</p>
|
||||
<p className="text-2xl font-semibold">{thumbnailStats.file_count}</p>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-sm text-muted-foreground">Directory</p>
|
||||
<p className="text-sm font-mono truncate" title={thumbnailStats.directory}>{thumbnailStats.directory}</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<p className="text-sm text-muted-foreground">
|
||||
Note: Thumbnail settings are used during indexing. Existing thumbnails will not be regenerated automatically.
|
||||
</p>
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { getSettings, getCacheStats } from "../../lib/api";
|
||||
import { getSettings, getCacheStats, getThumbnailStats } from "../../lib/api";
|
||||
import SettingsPage from "./SettingsPage";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
@@ -7,7 +7,8 @@ export default async function SettingsPageWrapper() {
|
||||
const settings = await getSettings().catch(() => ({
|
||||
image_processing: { format: "webp", quality: 85, filter: "lanczos3", max_width: 2160 },
|
||||
cache: { enabled: true, directory: "/tmp/stripstream-image-cache", max_size_mb: 10000 },
|
||||
limits: { concurrent_renders: 4, timeout_seconds: 12, rate_limit_per_second: 120 }
|
||||
limits: { concurrent_renders: 4, timeout_seconds: 12, rate_limit_per_second: 120 },
|
||||
thumbnail: { enabled: true, width: 300, height: 400, quality: 80, format: "webp", directory: "/data/thumbnails" }
|
||||
}));
|
||||
|
||||
const cacheStats = await getCacheStats().catch(() => ({
|
||||
@@ -16,5 +17,11 @@ export default async function SettingsPageWrapper() {
|
||||
directory: "/tmp/stripstream-image-cache"
|
||||
}));
|
||||
|
||||
return <SettingsPage initialSettings={settings} initialCacheStats={cacheStats} />;
|
||||
const thumbnailStats = await getThumbnailStats().catch(() => ({
|
||||
total_size_mb: 0,
|
||||
file_count: 0,
|
||||
directory: "/data/thumbnails"
|
||||
}));
|
||||
|
||||
return <SettingsPage initialSettings={settings} initialCacheStats={cacheStats} initialThumbnailStats={thumbnailStats} />;
|
||||
}
|
||||
|
||||
@@ -98,7 +98,10 @@ function config() {
|
||||
return { baseUrl: baseUrl.replace(/\/$/, ""), token };
|
||||
}
|
||||
|
||||
export async function apiFetch<T>(path: string, init?: RequestInit): Promise<T> {
|
||||
export async function apiFetch<T>(
|
||||
path: string,
|
||||
init?: RequestInit,
|
||||
): Promise<T> {
|
||||
const { baseUrl, token } = config();
|
||||
const headers = new Headers(init?.headers || {});
|
||||
headers.set("Authorization", `Bearer ${token}`);
|
||||
@@ -109,7 +112,7 @@ export async function apiFetch<T>(path: string, init?: RequestInit): Promise<T>
|
||||
const res = await fetch(`${baseUrl}${path}`, {
|
||||
...init,
|
||||
headers,
|
||||
cache: "no-store"
|
||||
cache: "no-store",
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
@@ -130,7 +133,7 @@ export async function fetchLibraries() {
|
||||
export async function createLibrary(name: string, rootPath: string) {
|
||||
return apiFetch<LibraryDto>("/libraries", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ name, root_path: rootPath })
|
||||
body: JSON.stringify({ name, root_path: rootPath }),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -143,12 +146,21 @@ export async function scanLibrary(libraryId: string, full?: boolean) {
|
||||
if (full) body.full = true;
|
||||
return apiFetch<IndexJobDto>(`/libraries/${libraryId}/scan`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body)
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
}
|
||||
|
||||
export async function updateLibraryMonitoring(libraryId: string, monitorEnabled: boolean, scanMode: string, watcherEnabled?: boolean) {
|
||||
const body: { monitor_enabled: boolean; scan_mode: string; watcher_enabled?: boolean } = {
|
||||
export async function updateLibraryMonitoring(
|
||||
libraryId: string,
|
||||
monitorEnabled: boolean,
|
||||
scanMode: string,
|
||||
watcherEnabled?: boolean,
|
||||
) {
|
||||
const body: {
|
||||
monitor_enabled: boolean;
|
||||
scan_mode: string;
|
||||
watcher_enabled?: boolean;
|
||||
} = {
|
||||
monitor_enabled: monitorEnabled,
|
||||
scan_mode: scanMode,
|
||||
};
|
||||
@@ -157,7 +169,7 @@ export async function updateLibraryMonitoring(libraryId: string, monitorEnabled:
|
||||
}
|
||||
return apiFetch<LibraryDto>(`/libraries/${libraryId}/monitoring`, {
|
||||
method: "PATCH",
|
||||
body: JSON.stringify(body)
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -171,7 +183,7 @@ export async function rebuildIndex(libraryId?: string, full?: boolean) {
|
||||
if (full) body.full = true;
|
||||
return apiFetch<IndexJobDto>("/index/rebuild", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body)
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -191,7 +203,7 @@ export async function listTokens() {
|
||||
export async function createToken(name: string, scope: string) {
|
||||
return apiFetch<{ token: string }>("/admin/tokens", {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ name, scope })
|
||||
body: JSON.stringify({ name, scope }),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -199,13 +211,18 @@ export async function revokeToken(id: string) {
|
||||
return apiFetch<void>(`/admin/tokens/${id}`, { method: "DELETE" });
|
||||
}
|
||||
|
||||
export async function fetchBooks(libraryId?: string, series?: string, cursor?: string, limit: number = 50): Promise<BooksPageDto> {
|
||||
export async function fetchBooks(
|
||||
libraryId?: string,
|
||||
series?: string,
|
||||
cursor?: string,
|
||||
limit: number = 50,
|
||||
): Promise<BooksPageDto> {
|
||||
const params = new URLSearchParams();
|
||||
if (libraryId) params.set("library_id", libraryId);
|
||||
if (series) params.set("series", series);
|
||||
if (cursor) params.set("cursor", cursor);
|
||||
params.set("limit", limit.toString());
|
||||
|
||||
|
||||
return apiFetch<BooksPageDto>(`/books?${params.toString()}`);
|
||||
}
|
||||
|
||||
@@ -214,27 +231,35 @@ export type SeriesPageDto = {
|
||||
next_cursor: string | null;
|
||||
};
|
||||
|
||||
export async function fetchSeries(libraryId: string, cursor?: string, limit: number = 50): Promise<SeriesPageDto> {
|
||||
export async function fetchSeries(
|
||||
libraryId: string,
|
||||
cursor?: string,
|
||||
limit: number = 50,
|
||||
): Promise<SeriesPageDto> {
|
||||
const params = new URLSearchParams();
|
||||
if (cursor) params.set("cursor", cursor);
|
||||
params.set("limit", limit.toString());
|
||||
|
||||
return apiFetch<SeriesPageDto>(`/libraries/${libraryId}/series?${params.toString()}`);
|
||||
|
||||
return apiFetch<SeriesPageDto>(
|
||||
`/libraries/${libraryId}/series?${params.toString()}`,
|
||||
);
|
||||
}
|
||||
|
||||
export async function searchBooks(query: string, libraryId?: string, limit: number = 20): Promise<SearchResponseDto> {
|
||||
export async function searchBooks(
|
||||
query: string,
|
||||
libraryId?: string,
|
||||
limit: number = 20,
|
||||
): Promise<SearchResponseDto> {
|
||||
const params = new URLSearchParams();
|
||||
params.set("q", query);
|
||||
if (libraryId) params.set("library_id", libraryId);
|
||||
params.set("limit", limit.toString());
|
||||
|
||||
|
||||
return apiFetch<SearchResponseDto>(`/search?${params.toString()}`);
|
||||
}
|
||||
|
||||
export function getBookCoverUrl(bookId: string): string {
|
||||
// Utiliser une route API locale pour éviter les problèmes CORS
|
||||
// Le navigateur ne peut pas accéder à http://api:8080 (hostname Docker interne)
|
||||
return `/api/books/${bookId}/pages/1?format=webp&width=200`;
|
||||
return `/api/books/${bookId}/thumbnail`;
|
||||
}
|
||||
|
||||
export type Settings = {
|
||||
@@ -254,6 +279,14 @@ export type Settings = {
|
||||
timeout_seconds: number;
|
||||
rate_limit_per_second: number;
|
||||
};
|
||||
thumbnail: {
|
||||
enabled: boolean;
|
||||
width: number;
|
||||
height: number;
|
||||
quality: number;
|
||||
format: string;
|
||||
directory: string;
|
||||
};
|
||||
};
|
||||
|
||||
export type CacheStats = {
|
||||
@@ -267,6 +300,12 @@ export type ClearCacheResponse = {
|
||||
message: string;
|
||||
};
|
||||
|
||||
export type ThumbnailStats = {
|
||||
total_size_mb: number;
|
||||
file_count: number;
|
||||
directory: string;
|
||||
};
|
||||
|
||||
export async function getSettings() {
|
||||
return apiFetch<Settings>("/settings");
|
||||
}
|
||||
@@ -274,7 +313,7 @@ export async function getSettings() {
|
||||
export async function updateSetting(key: string, value: unknown) {
|
||||
return apiFetch<unknown>(`/settings/${key}`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ value })
|
||||
body: JSON.stringify({ value }),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -283,5 +322,11 @@ export async function getCacheStats() {
|
||||
}
|
||||
|
||||
export async function clearCache() {
|
||||
return apiFetch<ClearCacheResponse>("/settings/cache/clear", { method: "POST" });
|
||||
return apiFetch<ClearCacheResponse>("/settings/cache/clear", {
|
||||
method: "POST",
|
||||
});
|
||||
}
|
||||
|
||||
export async function getThumbnailStats() {
|
||||
return apiFetch<ThumbnailStats>("/settings/thumbnail/stats");
|
||||
}
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev -p 8082",
|
||||
"dev": "next dev -p 7082",
|
||||
"build": "next build",
|
||||
"start": "next start -p 8082"
|
||||
"start": "next start -p 7082"
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "^16.1.6",
|
||||
|
||||
@@ -8,9 +8,11 @@ license.workspace = true
|
||||
anyhow.workspace = true
|
||||
axum.workspace = true
|
||||
chrono.workspace = true
|
||||
image.workspace = true
|
||||
notify = "6.1"
|
||||
parsers = { path = "../../crates/parsers" }
|
||||
rand.workspace = true
|
||||
rayon.workspace = true
|
||||
reqwest.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
@@ -22,3 +24,4 @@ tracing.workspace = true
|
||||
tracing-subscriber.workspace = true
|
||||
uuid.workspace = true
|
||||
walkdir.workspace = true
|
||||
webp = "0.3"
|
||||
|
||||
@@ -2,13 +2,15 @@ use anyhow::Context;
|
||||
use axum::{extract::State, routing::get, Json, Router};
|
||||
use chrono::{DateTime, Utc};
|
||||
use axum::http::StatusCode;
|
||||
use image::GenericImageView;
|
||||
use notify::{Event, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
use parsers::{detect_format, parse_metadata, BookFormat};
|
||||
use parsers::{detect_format, parse_metadata, BookFormat, extract_first_page};
|
||||
use rayon::prelude::*;
|
||||
use serde::Serialize;
|
||||
use sha2::{Digest, Sha256};
|
||||
use sqlx::{postgres::PgPoolOptions, Row};
|
||||
use std::{collections::HashMap, path::Path, time::Duration};
|
||||
use stripstream_core::config::IndexerConfig;
|
||||
use stripstream_core::config::{IndexerConfig, ThumbnailConfig};
|
||||
use tokio::sync::mpsc;
|
||||
use tracing::{error, info, trace, warn};
|
||||
use uuid::Uuid;
|
||||
@@ -37,6 +39,7 @@ struct AppState {
|
||||
pool: sqlx::PgPool,
|
||||
meili_url: String,
|
||||
meili_master_key: String,
|
||||
thumbnail_config: ThumbnailConfig,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
@@ -65,6 +68,7 @@ async fn main() -> anyhow::Result<()> {
|
||||
pool,
|
||||
meili_url: config.meili_url.clone(),
|
||||
meili_master_key: config.meili_master_key.clone(),
|
||||
thumbnail_config: config.thumbnail_config.clone(),
|
||||
};
|
||||
|
||||
tokio::spawn(run_worker(state.clone(), config.scan_interval_seconds));
|
||||
@@ -411,7 +415,11 @@ async fn claim_next_job(pool: &sqlx::PgPool) -> anyhow::Result<Option<(Uuid, Opt
|
||||
|
||||
async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<Uuid>) -> anyhow::Result<()> {
|
||||
info!("[JOB] Processing {} library={:?}", job_id, target_library_id);
|
||||
|
||||
|
||||
// Load thumbnail config from database (fallback to env/default)
|
||||
let thumbnail_config = load_thumbnail_config(&state.pool, &state.thumbnail_config).await;
|
||||
info!("[THUMB] Config: enabled={}, dir={}", thumbnail_config.enabled, thumbnail_config.directory);
|
||||
|
||||
// Get job type to check if it's a full rebuild
|
||||
let job_type: String = sqlx::query_scalar("SELECT type FROM index_jobs WHERE id = $1")
|
||||
.bind(job_id)
|
||||
@@ -423,6 +431,35 @@ async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<U
|
||||
// For full rebuilds, delete existing data first
|
||||
if is_full_rebuild {
|
||||
info!("[JOB] Full rebuild: deleting existing data");
|
||||
|
||||
// Clean thumbnail directory - only for affected books
|
||||
let thumb_dir = Path::new(&thumbnail_config.directory);
|
||||
if thumb_dir.exists() {
|
||||
if let Some(library_id) = target_library_id {
|
||||
// Get book IDs for this library to delete their thumbnails
|
||||
let book_ids: Vec<Uuid> = sqlx::query_scalar(
|
||||
"SELECT id FROM books WHERE library_id = $1"
|
||||
)
|
||||
.bind(target_library_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
for book_id in &book_ids {
|
||||
let thumb_path = thumb_dir.join(format!("{}.webp", book_id));
|
||||
let _ = std::fs::remove_file(thumb_path);
|
||||
}
|
||||
info!("[JOB] Cleaned {} thumbnails for library {}", book_ids.len(), library_id);
|
||||
} else {
|
||||
// Delete all thumbnails
|
||||
if let Ok(entries) = std::fs::read_dir(thumb_dir) {
|
||||
for entry in entries.flatten() {
|
||||
let _ = std::fs::remove_file(entry.path());
|
||||
}
|
||||
}
|
||||
info!("[JOB] Cleaned all thumbnails");
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(library_id) = target_library_id {
|
||||
// Delete books and files for specific library
|
||||
sqlx::query("DELETE FROM book_files WHERE book_id IN (SELECT id FROM books WHERE library_id = $1)")
|
||||
@@ -453,17 +490,20 @@ async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<U
|
||||
.await?
|
||||
};
|
||||
|
||||
// First pass: count total files for progress estimation
|
||||
let mut total_files = 0usize;
|
||||
for library in &libraries {
|
||||
let root_path: String = library.get("root_path");
|
||||
let root_path = remap_libraries_path(&root_path);
|
||||
for entry in WalkDir::new(&root_path).into_iter().filter_map(Result::ok) {
|
||||
if entry.file_type().is_file() && detect_format(entry.path()).is_some() {
|
||||
total_files += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
// First pass: count total files for progress estimation (parallel)
|
||||
let library_paths: Vec<String> = libraries.iter()
|
||||
.map(|library| remap_libraries_path(&library.get::<String, _>("root_path")))
|
||||
.collect();
|
||||
|
||||
let total_files: usize = library_paths.par_iter()
|
||||
.map(|root_path| {
|
||||
WalkDir::new(root_path)
|
||||
.into_iter()
|
||||
.filter_map(Result::ok)
|
||||
.filter(|entry| entry.file_type().is_file() && detect_format(entry.path()).is_some())
|
||||
.count()
|
||||
})
|
||||
.sum();
|
||||
|
||||
info!("[JOB] Found {} libraries, {} total files to index", libraries.len(), total_files);
|
||||
|
||||
@@ -488,7 +528,7 @@ async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<U
|
||||
let library_id: Uuid = library.get("id");
|
||||
let root_path: String = library.get("root_path");
|
||||
let root_path = remap_libraries_path(&root_path);
|
||||
match scan_library(state, job_id, library_id, Path::new(&root_path), &mut stats, &mut total_processed_count, total_files, is_full_rebuild).await {
|
||||
match scan_library(state, job_id, library_id, Path::new(&root_path), &mut stats, &mut total_processed_count, total_files, is_full_rebuild, thumbnail_config.clone()).await {
|
||||
Ok(()) => {}
|
||||
Err(err) => {
|
||||
stats.errors += 1;
|
||||
@@ -544,6 +584,7 @@ struct BookInsert {
|
||||
series: Option<String>,
|
||||
volume: Option<i32>,
|
||||
page_count: Option<i32>,
|
||||
thumbnail_path: Option<String>,
|
||||
}
|
||||
|
||||
struct FileInsert {
|
||||
@@ -661,12 +702,13 @@ async fn flush_all_batches(
|
||||
let series: Vec<Option<String>> = books_insert.iter().map(|b| b.series.clone()).collect();
|
||||
let volumes: Vec<Option<i32>> = books_insert.iter().map(|b| b.volume).collect();
|
||||
let page_counts: Vec<Option<i32>> = books_insert.iter().map(|b| b.page_count).collect();
|
||||
let thumbnail_paths: Vec<Option<String>> = books_insert.iter().map(|b| b.thumbnail_path.clone()).collect();
|
||||
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO books (id, library_id, kind, title, series, volume, page_count)
|
||||
SELECT * FROM UNNEST($1::uuid[], $2::uuid[], $3::text[], $4::text[], $5::text[], $6::int[], $7::int[])
|
||||
AS t(id, library_id, kind, title, series, volume, page_count)
|
||||
INSERT INTO books (id, library_id, kind, title, series, volume, page_count, thumbnail_path)
|
||||
SELECT * FROM UNNEST($1::uuid[], $2::uuid[], $3::text[], $4::text[], $5::text[], $6::int[], $7::int[], $8::text[])
|
||||
AS t(id, library_id, kind, title, series, volume, page_count, thumbnail_path)
|
||||
"#
|
||||
)
|
||||
.bind(&book_ids)
|
||||
@@ -676,6 +718,7 @@ async fn flush_all_batches(
|
||||
.bind(&series)
|
||||
.bind(&volumes)
|
||||
.bind(&page_counts)
|
||||
.bind(&thumbnail_paths)
|
||||
.execute(&mut *tx)
|
||||
.await?;
|
||||
|
||||
@@ -765,6 +808,7 @@ async fn scan_library(
|
||||
total_processed_count: &mut i32,
|
||||
total_files: usize,
|
||||
is_full_rebuild: bool,
|
||||
thumbnail_config: ThumbnailConfig,
|
||||
) -> anyhow::Result<()> {
|
||||
info!("[SCAN] Starting scan of library {} at path: {} (full_rebuild={})", library_id, root.display(), is_full_rebuild);
|
||||
|
||||
@@ -884,6 +928,36 @@ async fn scan_library(
|
||||
|
||||
info!("[PROCESS] Updating existing file: {} (full_rebuild={}, fingerprint_match={})", file_name, is_full_rebuild, old_fingerprint == fingerprint);
|
||||
|
||||
// Generate thumbnail for existing files if enabled and fingerprint changed
|
||||
let thumbnail_path = if thumbnail_config.enabled && fingerprint != old_fingerprint {
|
||||
info!("[THUMB] Generating thumbnail for updated file: {}", file_name);
|
||||
match extract_first_page(path, format) {
|
||||
Ok(page_bytes) => {
|
||||
match generate_thumbnail(&page_bytes, &thumbnail_config) {
|
||||
Ok(thumb_bytes) => {
|
||||
match save_thumbnail(book_id, &thumb_bytes, &thumbnail_config) {
|
||||
Ok(path) => Some(path),
|
||||
Err(e) => {
|
||||
warn!("[THUMB] Failed to save thumbnail for {}: {}", file_name, e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("[THUMB] Failed to generate thumbnail for {}: {}", file_name, e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("[THUMB] Failed to extract first page for {}: {}", file_name, e);
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
match parse_metadata(path, format, root) {
|
||||
Ok(parsed) => {
|
||||
books_to_update.push(BookUpdate {
|
||||
@@ -903,6 +977,17 @@ async fn scan_library(
|
||||
fingerprint,
|
||||
});
|
||||
|
||||
// Update thumbnail_path if we generated one
|
||||
if let Some(thumb_path) = thumbnail_path {
|
||||
let book_id_for_update = book_id;
|
||||
let thumb_path_clone = thumb_path.clone();
|
||||
sqlx::query("UPDATE books SET thumbnail_path = $1 WHERE id = $2")
|
||||
.bind(thumb_path_clone)
|
||||
.bind(book_id_for_update)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
}
|
||||
|
||||
stats.indexed_files += 1;
|
||||
}
|
||||
Err(err) => {
|
||||
@@ -944,11 +1029,49 @@ async fn scan_library(
|
||||
|
||||
// New file
|
||||
info!("[PROCESS] Inserting new file: {}", file_name);
|
||||
|
||||
// Generate book_id early for thumbnail naming
|
||||
let book_id = Uuid::new_v4();
|
||||
|
||||
let thumbnail_path = if thumbnail_config.enabled {
|
||||
info!("[THUMB] Generating thumbnail for {} (enabled={}, dir={})", file_name, thumbnail_config.enabled, thumbnail_config.directory);
|
||||
match extract_first_page(path, format) {
|
||||
Ok(page_bytes) => {
|
||||
info!("[THUMB] Extracted first page: {} bytes", page_bytes.len());
|
||||
match generate_thumbnail(&page_bytes, &thumbnail_config) {
|
||||
Ok(thumb_bytes) => {
|
||||
info!("[THUMB] Generated thumbnail: {} bytes", thumb_bytes.len());
|
||||
match save_thumbnail(book_id, &thumb_bytes, &thumbnail_config) {
|
||||
Ok(path) => {
|
||||
info!("[THUMB] Saved thumbnail to {}", path);
|
||||
Some(path)
|
||||
},
|
||||
Err(e) => {
|
||||
warn!("[THUMB] Failed to save thumbnail for {}: {}", file_name, e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("[THUMB] Failed to generate thumbnail for {}: {}", file_name, e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
warn!("[THUMB] Failed to extract first page for {}: {}", file_name, e);
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
info!("[THUMB] Skipping thumbnail (disabled)");
|
||||
None
|
||||
};
|
||||
|
||||
match parse_metadata(path, format, root) {
|
||||
Ok(parsed) => {
|
||||
let book_id = Uuid::new_v4();
|
||||
let file_id = Uuid::new_v4();
|
||||
|
||||
|
||||
books_to_insert.push(BookInsert {
|
||||
book_id,
|
||||
library_id,
|
||||
@@ -957,6 +1080,7 @@ async fn scan_library(
|
||||
series: parsed.series,
|
||||
volume: parsed.volume,
|
||||
page_count: parsed.page_count,
|
||||
thumbnail_path,
|
||||
});
|
||||
|
||||
files_to_insert.push(FileInsert {
|
||||
@@ -987,6 +1111,7 @@ async fn scan_library(
|
||||
series: None,
|
||||
volume: None,
|
||||
page_count: None,
|
||||
thumbnail_path: None,
|
||||
});
|
||||
|
||||
files_to_insert.push(FileInsert {
|
||||
@@ -1063,6 +1188,30 @@ fn compute_fingerprint(path: &Path, size: u64, mtime: &DateTime<Utc>) -> anyhow:
|
||||
Ok(format!("{:x}", hasher.finalize()))
|
||||
}
|
||||
|
||||
async fn load_thumbnail_config(pool: &sqlx::PgPool, fallback: &ThumbnailConfig) -> ThumbnailConfig {
|
||||
let row = sqlx::query(r#"SELECT value FROM app_settings WHERE key = 'thumbnail'"#)
|
||||
.fetch_optional(pool)
|
||||
.await;
|
||||
|
||||
match row {
|
||||
Ok(Some(row)) => {
|
||||
let value: serde_json::Value = row.get("value");
|
||||
ThumbnailConfig {
|
||||
enabled: value.get("enabled").and_then(|v| v.as_bool()).unwrap_or(fallback.enabled),
|
||||
width: value.get("width").and_then(|v| v.as_u64()).map(|v| v as u32).unwrap_or(fallback.width),
|
||||
height: value.get("height").and_then(|v| v.as_u64()).map(|v| v as u32).unwrap_or(fallback.height),
|
||||
quality: value.get("quality").and_then(|v| v.as_u64()).map(|v| v as u8).unwrap_or(fallback.quality),
|
||||
format: value.get("format").and_then(|v| v.as_str()).map(|s| s.to_string()).unwrap_or_else(|| fallback.format.clone()),
|
||||
directory: value.get("directory").and_then(|v| v.as_str()).map(|s| s.to_string()).unwrap_or_else(|| fallback.directory.clone()),
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
warn!("[THUMB] Could not load thumbnail config from DB, using fallback");
|
||||
fallback.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn kind_from_format(format: BookFormat) -> &'static str {
|
||||
match format {
|
||||
BookFormat::Pdf => "ebook",
|
||||
@@ -1076,6 +1225,50 @@ fn file_display_name(path: &Path) -> String {
|
||||
.unwrap_or_else(|| "Untitled".to_string())
|
||||
}
|
||||
|
||||
fn generate_thumbnail(image_bytes: &[u8], config: &ThumbnailConfig) -> anyhow::Result<Vec<u8>> {
|
||||
let img = image::load_from_memory(image_bytes)
|
||||
.context("failed to load image")?;
|
||||
|
||||
let (orig_w, orig_h) = img.dimensions();
|
||||
let target_w = config.width;
|
||||
let target_h = config.height;
|
||||
|
||||
let ratio_w = target_w as f32 / orig_w as f32;
|
||||
let ratio_h = target_h as f32 / orig_h as f32;
|
||||
let ratio = ratio_w.min(ratio_h);
|
||||
|
||||
let new_w = (orig_w as f32 * ratio) as u32;
|
||||
let new_h = (orig_h as f32 * ratio) as u32;
|
||||
|
||||
let resized = img.resize(new_w, new_h, image::imageops::FilterType::Lanczos3);
|
||||
|
||||
let rgba = resized.to_rgba8();
|
||||
let (w, h) = rgba.dimensions();
|
||||
|
||||
let rgb_data: Vec<u8> = rgba
|
||||
.pixels()
|
||||
.flat_map(|p| [p[0], p[1], p[2]])
|
||||
.collect();
|
||||
|
||||
let quality = f32::max(config.quality as f32, 85.0);
|
||||
let webp_data = webp::Encoder::new(&rgb_data, webp::PixelLayout::Rgb, w, h)
|
||||
.encode(quality);
|
||||
|
||||
Ok(webp_data.to_vec())
|
||||
}
|
||||
|
||||
fn save_thumbnail(book_id: Uuid, thumbnail_bytes: &[u8], config: &ThumbnailConfig) -> anyhow::Result<String> {
|
||||
let dir = Path::new(&config.directory);
|
||||
std::fs::create_dir_all(dir)?;
|
||||
|
||||
let filename = format!("{}.webp", book_id);
|
||||
let path = dir.join(&filename);
|
||||
|
||||
std::fs::write(&path, thumbnail_bytes)?;
|
||||
|
||||
Ok(path.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct SearchDoc {
|
||||
id: String,
|
||||
|
||||
@@ -12,10 +12,12 @@ pub struct ApiConfig {
|
||||
impl ApiConfig {
|
||||
pub fn from_env() -> Result<Self> {
|
||||
Ok(Self {
|
||||
listen_addr: std::env::var("API_LISTEN_ADDR").unwrap_or_else(|_| "0.0.0.0:8080".to_string()),
|
||||
listen_addr: std::env::var("API_LISTEN_ADDR")
|
||||
.unwrap_or_else(|_| "0.0.0.0:8080".to_string()),
|
||||
database_url: std::env::var("DATABASE_URL").context("DATABASE_URL is required")?,
|
||||
meili_url: std::env::var("MEILI_URL").context("MEILI_URL is required")?,
|
||||
meili_master_key: std::env::var("MEILI_MASTER_KEY").context("MEILI_MASTER_KEY is required")?,
|
||||
meili_master_key: std::env::var("MEILI_MASTER_KEY")
|
||||
.context("MEILI_MASTER_KEY is required")?,
|
||||
api_bootstrap_token: std::env::var("API_BOOTSTRAP_TOKEN")
|
||||
.context("API_BOOTSTRAP_TOKEN is required")?,
|
||||
})
|
||||
@@ -29,20 +31,68 @@ pub struct IndexerConfig {
|
||||
pub meili_url: String,
|
||||
pub meili_master_key: String,
|
||||
pub scan_interval_seconds: u64,
|
||||
pub thumbnail_config: ThumbnailConfig,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ThumbnailConfig {
|
||||
pub enabled: bool,
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub quality: u8,
|
||||
pub format: String,
|
||||
pub directory: String,
|
||||
}
|
||||
|
||||
impl Default for ThumbnailConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enabled: true,
|
||||
width: 300,
|
||||
height: 400,
|
||||
quality: 80,
|
||||
format: "webp".to_string(),
|
||||
directory: "/data/thumbnails".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexerConfig {
|
||||
pub fn from_env() -> Result<Self> {
|
||||
let thumbnail_config = ThumbnailConfig {
|
||||
enabled: std::env::var("THUMBNAIL_ENABLED")
|
||||
.ok()
|
||||
.and_then(|v| v.parse::<bool>().ok())
|
||||
.unwrap_or(true),
|
||||
width: std::env::var("THUMBNAIL_WIDTH")
|
||||
.ok()
|
||||
.and_then(|v| v.parse::<u32>().ok())
|
||||
.unwrap_or(300),
|
||||
height: std::env::var("THUMBNAIL_HEIGHT")
|
||||
.ok()
|
||||
.and_then(|v| v.parse::<u32>().ok())
|
||||
.unwrap_or(400),
|
||||
quality: std::env::var("THUMBNAIL_QUALITY")
|
||||
.ok()
|
||||
.and_then(|v| v.parse::<u8>().ok())
|
||||
.unwrap_or(80),
|
||||
format: std::env::var("THUMBNAIL_FORMAT").unwrap_or_else(|_| "webp".to_string()),
|
||||
directory: std::env::var("THUMBNAIL_DIRECTORY")
|
||||
.unwrap_or_else(|_| "/data/thumbnails".to_string()),
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
listen_addr: std::env::var("INDEXER_LISTEN_ADDR")
|
||||
.unwrap_or_else(|_| "0.0.0.0:8081".to_string()),
|
||||
database_url: std::env::var("DATABASE_URL").context("DATABASE_URL is required")?,
|
||||
meili_url: std::env::var("MEILI_URL").context("MEILI_URL is required")?,
|
||||
meili_master_key: std::env::var("MEILI_MASTER_KEY").context("MEILI_MASTER_KEY is required")?,
|
||||
meili_master_key: std::env::var("MEILI_MASTER_KEY")
|
||||
.context("MEILI_MASTER_KEY is required")?,
|
||||
scan_interval_seconds: std::env::var("INDEXER_SCAN_INTERVAL_SECONDS")
|
||||
.ok()
|
||||
.and_then(|v| v.parse::<u64>().ok())
|
||||
.unwrap_or(5),
|
||||
thumbnail_config,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -59,8 +109,10 @@ impl AdminUiConfig {
|
||||
Ok(Self {
|
||||
listen_addr: std::env::var("ADMIN_UI_LISTEN_ADDR")
|
||||
.unwrap_or_else(|_| "0.0.0.0:8082".to_string()),
|
||||
api_base_url: std::env::var("API_BASE_URL").unwrap_or_else(|_| "http://api:8080".to_string()),
|
||||
api_token: std::env::var("API_BOOTSTRAP_TOKEN").context("API_BOOTSTRAP_TOKEN is required")?,
|
||||
api_base_url: std::env::var("API_BASE_URL")
|
||||
.unwrap_or_else(|_| "http://api:8080".to_string()),
|
||||
api_token: std::env::var("API_BOOTSTRAP_TOKEN")
|
||||
.context("API_BOOTSTRAP_TOKEN is required")?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,4 +8,6 @@ license.workspace = true
|
||||
anyhow.workspace = true
|
||||
lopdf = "0.35"
|
||||
regex = "1"
|
||||
uuid.workspace = true
|
||||
walkdir.workspace = true
|
||||
zip = { version = "2.2", default-features = false, features = ["deflate"] }
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
use anyhow::{Context, Result};
|
||||
use std::io::Read;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
use uuid::Uuid;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum BookFormat {
|
||||
@@ -240,3 +244,105 @@ fn is_image_name(name: &str) -> bool {
|
||||
|| name.ends_with(".webp")
|
||||
|| name.ends_with(".avif")
|
||||
}
|
||||
|
||||
pub fn extract_first_page(path: &Path, format: BookFormat) -> Result<Vec<u8>> {
|
||||
match format {
|
||||
BookFormat::Cbz => extract_cbz_first_page(path),
|
||||
BookFormat::Cbr => extract_cbr_first_page(path),
|
||||
BookFormat::Pdf => extract_pdf_first_page(path),
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_cbz_first_page(path: &Path) -> Result<Vec<u8>> {
|
||||
let file = std::fs::File::open(path)
|
||||
.with_context(|| format!("cannot open cbz: {}", path.display()))?;
|
||||
let mut archive = zip::ZipArchive::new(file).context("invalid cbz archive")?;
|
||||
|
||||
let mut image_names: Vec<String> = Vec::new();
|
||||
for i in 0..archive.len() {
|
||||
let entry = archive.by_index(i).context("cannot read cbz entry")?;
|
||||
let name = entry.name().to_ascii_lowercase();
|
||||
if is_image_name(&name) {
|
||||
image_names.push(entry.name().to_string());
|
||||
}
|
||||
}
|
||||
image_names.sort();
|
||||
|
||||
let first_image = image_names.first().context("no images found in cbz")?;
|
||||
|
||||
let mut entry = archive
|
||||
.by_name(first_image)
|
||||
.context("cannot read first image")?;
|
||||
let mut buf = Vec::new();
|
||||
entry.read_to_end(&mut buf)?;
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
fn extract_cbr_first_page(path: &Path) -> Result<Vec<u8>> {
|
||||
let tmp_dir = std::env::temp_dir().join(format!("stripstream-cbr-thumb-{}", Uuid::new_v4()));
|
||||
std::fs::create_dir_all(&tmp_dir).context("cannot create temp dir")?;
|
||||
|
||||
// Use env command like the API does
|
||||
let output = std::process::Command::new("env")
|
||||
.args(["LC_ALL=en_US.UTF-8", "LANG=en_US.UTF-8", "unar", "-o"])
|
||||
.arg(&tmp_dir)
|
||||
.arg(path)
|
||||
.output()
|
||||
.context("unar failed")?;
|
||||
|
||||
if !output.status.success() {
|
||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
||||
return Err(anyhow::anyhow!(
|
||||
"unar extract failed: {:?}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
// Use WalkDir for recursive search (CBR can have subdirectories)
|
||||
let mut image_files: Vec<_> = WalkDir::new(&tmp_dir)
|
||||
.into_iter()
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| {
|
||||
let name = e.file_name().to_string_lossy().to_lowercase();
|
||||
is_image_name(&name)
|
||||
})
|
||||
.collect();
|
||||
|
||||
image_files.sort_by_key(|e| e.path().to_string_lossy().to_lowercase());
|
||||
|
||||
let first_image = image_files.first().context("no images found in cbr")?;
|
||||
|
||||
let data = std::fs::read(first_image.path())?;
|
||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
fn extract_pdf_first_page(path: &Path) -> Result<Vec<u8>> {
|
||||
let tmp_dir = std::env::temp_dir().join(format!("stripstream-pdf-thumb-{}", Uuid::new_v4()));
|
||||
std::fs::create_dir_all(&tmp_dir)?;
|
||||
let output_prefix = tmp_dir.join("page");
|
||||
|
||||
let output = Command::new("pdftoppm")
|
||||
.args([
|
||||
"-f",
|
||||
"1",
|
||||
"-singlefile",
|
||||
"-png",
|
||||
"-scale-to",
|
||||
"800",
|
||||
path.to_str().unwrap(),
|
||||
output_prefix.to_str().unwrap(),
|
||||
])
|
||||
.output()
|
||||
.context("pdftoppm failed")?;
|
||||
|
||||
if !output.status.success() {
|
||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
||||
return Err(anyhow::anyhow!("pdftoppm failed"));
|
||||
}
|
||||
|
||||
let image_path = output_prefix.with_extension("png");
|
||||
let data = std::fs::read(&image_path)?;
|
||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
@@ -57,6 +57,7 @@ services:
|
||||
- "7080:8080"
|
||||
volumes:
|
||||
- ${LIBRARIES_HOST_PATH:-../libraries}:/libraries
|
||||
- ${THUMBNAILS_HOST_PATH:-../data/thumbnails}:/data/thumbnails
|
||||
depends_on:
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
@@ -80,6 +81,7 @@ services:
|
||||
- "7081:8081"
|
||||
volumes:
|
||||
- ${LIBRARIES_HOST_PATH:-../libraries}:/libraries
|
||||
- ${THUMBNAILS_HOST_PATH:-../data/thumbnails}:/data/thumbnails
|
||||
depends_on:
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
|
||||
5
infra/migrations/0009_add_thumbnails.sql
Normal file
5
infra/migrations/0009_add_thumbnails.sql
Normal file
@@ -0,0 +1,5 @@
|
||||
ALTER TABLE books ADD COLUMN IF NOT EXISTS thumbnail_path TEXT;
|
||||
|
||||
INSERT INTO app_settings (key, value) VALUES
|
||||
('thumbnail', '{"enabled": true, "width": 300, "height": 400, "quality": 80, "format": "webp", "directory": "/data/thumbnails"}')
|
||||
ON CONFLICT (key) DO UPDATE SET value = '{"enabled": true, "width": 300, "height": 400, "quality": 80, "format": "webp", "directory": "/data/thumbnails"}'::jsonb;
|
||||
Reference in New Issue
Block a user