Compare commits

..

12 Commits

Author SHA1 Message Date
e64848a216 feat: implement thumbnail generation and management
- Remove unused image dependencies from Cargo.lock.
- Update API to handle thumbnail generation and checkup processes.
- Introduce new routes for rebuilding and regenerating thumbnails.
- Enhance job tracking with progress indicators for thumbnail jobs.
- Update front-end components to display thumbnail job status and progress.
- Add backend logic for managing thumbnail jobs and integrating with the API.
- Refactor existing code to accommodate new thumbnail functionalities.
2026-03-08 20:55:12 +01:00
c93a7d5d29 feat: thumbnails : part1 2026-03-08 17:54:47 +01:00
360d6e85de feat: review cbr and unraring for image on api 2026-03-07 15:47:46 +01:00
162b4712e7 fix: improve CBR extraction with fallback and increase timeout
- Try multiple entries in CBR archive until finding valid image
- Increase timeout from 12s to 30s for large files
- Better error messages for debugging
2026-03-07 15:02:55 +01:00
217919fa77 perf: increase concurrent renders limit from 4 to 8 2026-03-07 12:16:23 +01:00
ee0235b824 fix: improve CBR extraction logging and remove dead code
- Add magic bytes validation for extracted CBR images
- Add hex dump for debugging invalid images
- Show first entries when listing CBR archive
- Remove unused structs and functions from settings.rs
- Add -y flag to unrar for auto-confirm
2026-03-07 12:13:55 +01:00
f721b248f3 feat: add image rendering logs and refactor Icon component
- Add detailed tracing logs for image processing (CBZ, CBR, PDF)
- Add cache hit/miss logging with timing info
- Centralize all SVG icons into reusable Icon component
- Add Settings icon to header navigation
- Add icons for Image Processing, Cache, and Performance Limits sections
2026-03-07 10:44:38 +01:00
292c61566c feat: add image optimization and settings page
- Add persistent disk cache for processed images
- Optimize image processing with short-circuit and quality settings
- Add WebP lossy encoding with configurable quality
- Add settings API endpoints (GET/POST /settings, cache management)
- Add database table for app configuration
- Add /settings page in backoffice for image/cache/limits config
- Add cache stats and clear functionality
- Update navigation with settings link
2026-03-07 09:12:06 +01:00
9141edfaa9 fix: handle SSE controller errors gracefully
- Add isActive checks before writing to SSE controller
- Wrap controller operations in try/catch to prevent 'already closed' errors
- Fix race condition when client disconnects during SSE streaming
2026-03-06 22:40:57 +01:00
f0a967515b fix: improve series detection and add detailed indexing logs
- Fix series detection to handle path variations (symlinks, separators)
- Add comprehensive logging for job processing and file scanning
- Better error handling for path prefix stripping
- Track files scanned, indexed, and errors per library
2026-03-06 22:35:11 +01:00
d5d582db57 perf: optimize indexing speed with batching and incremental sync
- Batching BDD: group INSERT/UPDATE operations in batches of 100 using UNNEST
- Incremental MeiliSearch: only sync books modified since last sync
- Optimized fingerprint: use only size+mtime+filename (100x faster)
- Increased DB connections from 5 to 20
- Reduced progress update frequency (every 1s or 10 files)
- Add sync_metadata table to track last MeiliSearch sync
2026-03-06 22:09:37 +01:00
ee76090265 fix: natural sort regex to handle titles with letters after numbers
Replace REGEXP_REPLACE with REGEXP_MATCH to extract only digits
Fixes 'invalid input syntax for type integer' error when titles
contain letters after numbers like '20th century boys'
2026-03-06 21:59:19 +01:00
52 changed files with 3547 additions and 492 deletions

View File

@@ -48,6 +48,10 @@ LIBRARIES_ROOT_PATH=/libraries
# You can change this to an absolute path on your machine
LIBRARIES_HOST_PATH=../libraries
# Path to thumbnails directory on host machine (for Docker volume mount)
# Default: ../data/thumbnails (relative to infra/docker-compose.yml)
THUMBNAILS_HOST_PATH=../data/thumbnails
# =============================================================================
# Port Configuration
# =============================================================================

1
.gitignore vendored
View File

@@ -5,3 +5,4 @@ tmp/
libraries/
node_modules/
.next/
data/thumbnails

301
AGENTS.md Normal file
View File

@@ -0,0 +1,301 @@
# AGENTS.md - Agent Coding Guidelines for Stripstream Librarian
This file provides guidelines for agentic coding agents operating in this repository.
---
## 1. Build, Lint, and Test Commands
### Build Commands
```bash
# Build debug version (fastest for development)
cargo build
# Build release version (optimized)
cargo build --release
# Build specific crate
cargo build -p api
cargo build -p indexer
# Watch mode for development (requires cargo-watch)
cargo watch -x build
```
### Lint & Format Commands
```bash
# Run clippy lints
cargo clippy
# Fix auto-fixable clippy warnings
cargo clippy --fix
# Format code
cargo fmt
# Check formatting without making changes
cargo fmt -- --check
```
### Test Commands
```bash
# Run all tests
cargo test
# Run tests for specific crate
cargo test -p api
cargo test -p indexer
cargo test -p parsers
# Run a single test by name
cargo test test_name_here
# Run tests with output display
cargo test -- --nocapture
# Run doc tests
cargo test --doc
```
### Database Migrations
```bash
# Run migrations manually (via sqlx CLI)
# Ensure DATABASE_URL is set, then:
sqlx migrate run
# Create new migration
sqlx migrate add -r migration_name
```
### Docker Development
```bash
# Start infrastructure only
cd infra && docker compose up -d postgres meilisearch
# Start full stack
cd infra && docker compose up -d
# View logs
docker compose logs -f api
docker compose logs -f indexer
```
---
## 2. Code Style Guidelines
### General Principles
- **Conciseness**: Keep responses short and direct. Avoid unnecessary preamble or explanation.
- **Idiomatic Rust**: Follow Rust best practices and ecosystem conventions.
- **Error Handling**: Use `anyhow::Result<T>` for application code, `std::io::Result<T>` for simple file operations.
- **Async**: Use `tokio` for async runtime. Prefer `#[tokio::main]` over manual runtime.
### Naming Conventions
| Element | Convention | Example |
|---------|------------|---------|
| Variables | snake_case | `let book_id = ...` |
| Functions | snake_case | `fn get_book(...)` |
| Structs/Enums | PascalCase | `struct BookItem` |
| Modules | snake_case | `mod books;` |
| Constants | SCREAMING_SNAKE_CASE | `const BATCH_SIZE: usize = 100;` |
| Types | PascalCase | `type MyResult<T> = Result<T, Error>;` |
### Imports
- **Absolute imports** for workspace crates: `use parsers::{detect_format, parse_metadata};`
- **Standard library** imports: `use std::path::Path;`
- **External crates**: `use sqlx::{postgres::PgPoolOptions, Row};`
- **Group by**: std → external → workspace → local (with blank lines between)
```rust
use std::collections::HashMap;
use std::path::Path;
use anyhow::Context;
use serde::{Deserialize, Serialize};
use sqlx::Row;
use uuid::Uuid;
use crate::error::ApiError;
use crate::AppState;
```
### Error Handling
- Use `anyhow` for application-level error handling with context
- Use `with_context()` for adding context to errors
- Return `Result<T, ApiError>` in API handlers
- Use `?` operator instead of manual match/unwrap where possible
```rust
// Good
fn process_book(path: &Path) -> anyhow::Result<Book> {
let file = std::fs::File::open(path)
.with_context(|| format!("cannot open file: {}", path.display()))?;
// ...
}
// Good - API error handling
async fn get_book(State(state): State<AppState>, Path(id): Path<Uuid>)
-> Result<Json<Book>, ApiError> {
let row = sqlx::query("SELECT * FROM books WHERE id = $1")
.bind(id)
.fetch_optional(&state.pool)
.await
.map_err(ApiError::internal)?;
// ...
}
```
### Database (sqlx)
- Use **raw SQL queries** with `sqlx::query()` and `sqlx::query_scalar()`
- Prefer **batch operations** using `UNNEST` for bulk inserts/updates
- Always use **parameterized queries** (`$1`, `$2`, etc.) - never string interpolation
- Follow existing patterns for transactions:
```rust
let mut tx = pool.begin().await?;
// ... queries ...
tx.commit().await?;
```
### Async/Tokio
- Use `tokio::spawn` for background tasks
- Use `spawn_blocking` for CPU-bound work (image processing, file I/O)
- Keep async handlers non-blocking
- Use `tokio::time::timeout` for operations with timeouts
```rust
let bytes = tokio::time::timeout(
Duration::from_secs(60),
tokio::task::spawn_blocking(move || {
render_page(&abs_path_clone, n)
}),
)
.await
.map_err(|_| ApiError::internal("timeout"))?
.map_err(ApiError::internal)?;
```
### Structs and Serialization
- Use `#[derive(Serialize, Deserialize, ToSchema)]` for API types
- Add `utoipa` schemas for OpenAPI documentation
- Use `Option<T>` for nullable fields
- Document public structs briefly
```rust
#[derive(Serialize, ToSchema)]
pub struct BookItem {
#[schema(value_type = String)]
pub id: Uuid,
pub title: String,
pub author: Option<String>,
// ...
}
```
### Performance Considerations
- Use **batch operations** for database inserts/updates (100 items recommended)
- Use **parallel iterators** (`rayon::par_iter()`) for CPU-intensive scans
- Implement **caching** for expensive operations (see `pages.rs` for disk/memory cache examples)
- Use **streaming** for large data where applicable
### Testing
- Currently there are no test files - consider adding unit tests for:
- Parser functions
- Thumbnail generation
- Configuration parsing
- Use `#[cfg(test)]` modules for integration tests
---
## 3. Project Structure
```
stripstream-librarian/
├── apps/
│ ├── api/ # REST API (axum)
│ │ └── src/
│ │ ├── main.rs
│ │ ├── books.rs
│ │ ├── pages.rs
│ │ └── ...
│ ├── indexer/ # Background indexing service
│ │ └── src/
│ │ └── main.rs
│ └── backoffice/ # Next.js admin UI
├── crates/
│ ├── core/ # Shared config
│ │ └── src/config.rs
│ └── parsers/ # Book parsing (CBZ, CBR, PDF)
├── infra/
│ ├── migrations/ # SQL migrations
│ └── docker-compose.yml
└── libraries/ # Book storage (mounted volume)
```
### Key Files
| File | Purpose |
|------|---------|
| `apps/api/src/books.rs` | Book CRUD endpoints |
| `apps/api/src/pages.rs` | Page rendering & caching |
| `apps/indexer/src/main.rs` | Indexing logic, batch processing |
| `crates/parsers/src/lib.rs` | Format detection, metadata parsing |
| `crates/core/src/config.rs` | Configuration from environment |
| `infra/migrations/*.sql` | Database schema |
---
## 4. Common Patterns
### Configuration from Environment
```rust
// In crates/core/src/config.rs
impl IndexerConfig {
pub fn from_env() -> Result<Self> {
Ok(Self {
listen_addr: std::env::var("INDEXER_LISTEN_ADDR")
.unwrap_or_else(|_| "0.0.0.0:8081".to_string()),
database_url: std::env::var("DATABASE_URL")
.context("DATABASE_URL is required")?,
// ...
})
}
}
```
### Path Remapping
```rust
fn remap_libraries_path(path: &str) -> String {
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
if path.starts_with("/libraries/") {
return path.replacen("/libraries", &root, 1);
}
}
path.to_string()
}
```
---
## 5. Important Notes
- **Workspace**: This is a Cargo workspace. Always specify the package when building specific apps.
- **Dependencies**: External crates are defined in workspace `Cargo.toml`, not individual `Cargo.toml`.
- **Database**: PostgreSQL is required. Run migrations before starting services.
- **External Tools**: The indexer relies on `unar` (for CBR) and `pdftoppm` (for PDF) being installed on the system.

44
Cargo.lock generated
View File

@@ -78,6 +78,8 @@ dependencies = [
"utoipa",
"utoipa-swagger-ui",
"uuid",
"walkdir",
"webp",
"zip 2.4.2",
]
@@ -317,6 +319,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aebf35691d1bfb0ac386a69bac2fde4dd276fb618cf8bf4f5318fe285e821bb2"
dependencies = [
"find-msvc-tools",
"jobserver",
"libc",
"shlex",
]
@@ -799,6 +803,12 @@ dependencies = [
"wasip3",
]
[[package]]
name = "glob"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
[[package]]
name = "hashbrown"
version = "0.15.5"
@@ -1138,6 +1148,8 @@ dependencies = [
"chrono",
"notify",
"parsers",
"rand 0.8.5",
"rayon",
"reqwest",
"serde",
"serde_json",
@@ -1215,6 +1227,16 @@ version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2"
[[package]]
name = "jobserver"
version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
dependencies = [
"getrandom 0.3.4",
"libc",
]
[[package]]
name = "js-sys"
version = "0.3.91"
@@ -1294,6 +1316,16 @@ dependencies = [
"vcpkg",
]
[[package]]
name = "libwebp-sys"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "54cd30df7c7165ce74a456e4ca9732c603e8dc5e60784558c1c6dc047f876733"
dependencies = [
"cc",
"glob",
]
[[package]]
name = "litemap"
version = "0.8.1"
@@ -1593,6 +1625,8 @@ dependencies = [
"anyhow",
"lopdf",
"regex",
"uuid",
"walkdir",
"zip 2.4.2",
]
@@ -3152,6 +3186,16 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "webp"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c071456adef4aca59bf6a583c46b90ff5eb0b4f758fc347cea81290288f37ce1"
dependencies = [
"image",
"libwebp-sys",
]
[[package]]
name = "webpki-roots"
version = "0.26.11"

View File

@@ -20,6 +20,7 @@ base64 = "0.22"
chrono = { version = "0.4", features = ["serde"] }
image = { version = "0.25", default-features = false, features = ["jpeg", "png", "webp"] }
lru = "0.12"
rayon = "1.10"
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
rand = "0.8"
serde = { version = "1.0", features = ["derive"] }

141
PLAN_THUMBNAILS.md Normal file
View File

@@ -0,0 +1,141 @@
# Plan: Génération des vignettes à l'index
## 1. Base de données
### Migration SQL (`0010_add_thumbnails.sql`)
- [x] Ajouter `thumbnail_path TEXT` à la table `books` (nullable)
- [x] Ajouter settings pour thumbnails dans `app_settings`:
```json
{
"thumbnail": {
"enabled": true,
"width": 300,
"height": 400,
"quality": 80,
"format": "webp"
}
}
```
---
## 2. Configuration
### `crates/core/src/config.rs`
- [x] Ajouter `ThumbnailConfig` struct
- [x] Ajouter champs dans `IndexerConfig`:
- `thumbnail_width: u32` (défaut: 300)
- `thumbnail_height: u32` (défaut: 400)
- `thumbnail_quality: u8` (défaut: 80)
- `thumbnail_dir: String` (défaut: `/data/thumbnails`)
- [x] Ajouter getter depuis env vars
---
## 3. Indexer - Extraction de la 1ère page
### Fonction à créer dans `crates/parsers/src/lib.rs`
- [x] `extract_first_page(path: &Path, format: BookFormat) -> Result<Vec<u8>>`
- Réutiliser logique de `pages.rs:extract_cbz_page`
- Réutiliser logique de `pages.rs:extract_cbr_page`
- Réutiliser logique de `pages.rs:render_pdf_page`
### Fonction de génération vignette dans `apps/indexer/src/main.rs`
- [x] `generate_thumbnail(image_bytes: &[u8], config: &ThumbnailConfig) -> Result<Vec<u8>>`
- Load image avec `image::load_from_memory`
- Resize avec `image::resize` (ratio kept)
- Encode en WebP avec `webp::Encoder`
- [x] `save_thumbnail(book_id: Uuid, thumbnail_bytes: &[u8], config: &ThumbnailConfig) -> Result<String>`
### Intégration dans `scan_library`
- [x] Après parsing metadata, extraire 1ère page
- [x] Générer vignette et sauvegarder
- [x] Stocker chemin en DB (via batch insert)
---
## 4. Indexer - WalkDir parallèle
### Remplacement de `WalkDir` séquentiel
- [x] Utiliser `rayon` pour paralléliser le scan:
```rust
let total_files: usize = library_paths.par_iter()
.map(|root_path| { ... })
.sum();
```
- [x] Ajouter `rayon = "1.10"` dans workspace dependencies
---
## 5. API - Service des vignettes
### Mise à jour models dans `apps/api/src/books.rs`
- [x] Ajouter `thumbnail_url: Option<String>` à `BookItem`
- [x] Ajouter `thumbnail_url: Option<String>` à `BookDetails`
- [x] Mise à jour des requêtes SQL pour récupérer `thumbnail_path`
### Nouvelle route dans `apps/api/src/main.rs`
- [x] Route `/books/:id/thumbnail` (GET)
- Retourne fichier statique depuis `thumbnail_path`
- Content-Type: image/webp
- Cache-Control: public, max-age=31536000
### Suppression cache 1ère page (optionnel)
- [ ] Optionnel: simplifier `pages.rs` car thumbnail pré-générée
- [ ] Garder render pour pages > 1
### Adapter backoffice
La recupération des thumbnail est fait par une route page/1.
- [x] Passer par la nouvelle route avec une route clean /thumbnail pour chaque cover.
### refacto code entre api et indexer
En fait l'indexer pourrait appeler l'api pour qu'il fasse les vignettes et c'est l'api qui est responsable des images et des lectures ebooks. Je préfère que chaque domaine soit bien respecté. A la fin d'une build, on appelle l'api pour faire le checkup des thumbnails.
Il faudra que coté backoffice on voit partout ou on peut voir le traitement live des jobs, une phase ou on voit en sse le traitement des thumbnails. Coté api, si on a pas de thumbnail on passe par le code actuel de pages.
- [x] Migration `0010_index_job_thumbnails_phase.sql`: status `generating_thumbnails` dans index_jobs
- [x] API: `get_thumbnail` fallback sur page 1 si pas de thumbnail_path (via `pages::render_book_page_1`)
- [x] API: module `thumbnails.rs`, POST `/index/jobs/:id/thumbnails/checkup` (admin), lance la génération en tâche de fond et met à jour la job
- [x] Indexer: plus de génération de thumbnails; en fin de build: status = `generating_thumbnails`, puis appel API checkup; config `api_base_url` + `api_bootstrap_token` (core)
- [x] Backoffice: StatusBadge "Thumbnails" pour `generating_thumbnails`; JobProgress/JobRow/JobsIndicator/page job détail: phase thumbnails visible en SSE (X/Y thumbnails, barre de progression)
---
## 6. Settings API
### Endpoint settings existant
- [ ] Vérifier que `/settings` expose thumbnail config
- [ ] Ajouter endpoint PUT pour mettre à jour thumbnail settings
---
## 7. Taches diverses
- [x] Ajouter dependency `image` et `webp` dans indexer `Cargo.toml`
- [x] Build release vérifié
---
## Ordre d'implémentation suggéré
1. [x] Migration DB + settings
2. [x] Config + parsers (extract first page)
3. [x] Indexer thumbnail generation + save to disk
4. [x] API serve thumbnail
5. [x] Parallel walkdir
6. [ ] Tests & polish (à faire)
---
## Post-déploiement
- [ ] Appliquer migration SQL: `psql -f infra/migrations/0009_add_thumbnails.sql`
- [ ] Créer dossier thumbnails: `mkdir -p /data/thumbnails`
- [ ] Configurer env vars si besoin:
- `THUMBNAIL_ENABLED=true`
- `THUMBNAIL_WIDTH=300`
- `THUMBNAIL_HEIGHT=400`
- `THUMBNAIL_QUALITY=80`
- `THUMBNAIL_DIRECTORY=/data/thumbnails`

View File

@@ -31,3 +31,5 @@ uuid.workspace = true
zip = { version = "2.2", default-features = false, features = ["deflate"] }
utoipa.workspace = true
utoipa-swagger-ui = { workspace = true, features = ["axum"] }
webp = "0.3"
walkdir = "2"

View File

@@ -21,7 +21,10 @@ RUN --mount=type=cache,target=/sccache \
cargo build --release -p api
FROM debian:bookworm-slim
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates wget unrar-free poppler-utils && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates wget unar poppler-utils locales && rm -rf /var/lib/apt/lists/*
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen
ENV LANG=en_US.UTF-8
ENV LC_ALL=en_US.UTF-8
COPY --from=builder /app/target/release/api /usr/local/bin/api
EXPOSE 8080
CMD ["/usr/local/bin/api"]

View File

@@ -34,6 +34,7 @@ pub struct BookItem {
pub volume: Option<i32>,
pub language: Option<String>,
pub page_count: Option<i32>,
pub thumbnail_url: Option<String>,
#[schema(value_type = String)]
pub updated_at: DateTime<Utc>,
}
@@ -58,6 +59,7 @@ pub struct BookDetails {
pub volume: Option<i32>,
pub language: Option<String>,
pub page_count: Option<i32>,
pub thumbnail_url: Option<String>,
pub file_path: Option<String>,
pub file_format: Option<String>,
pub file_parse_status: Option<String>,
@@ -96,7 +98,7 @@ pub async fn list_books(
let sql = format!(
r#"
SELECT id, library_id, kind, title, author, series, volume, language, page_count, updated_at
SELECT id, library_id, kind, title, author, series, volume, language, page_count, thumbnail_path, updated_at
FROM books
WHERE ($1::uuid IS NULL OR library_id = $1)
AND ($2::text IS NULL OR kind = $2)
@@ -107,7 +109,7 @@ pub async fn list_books(
REGEXP_REPLACE(LOWER(title), '[0-9]+', '', 'g'),
-- Extract first number group and convert to integer for numeric sort
COALESCE(
NULLIF(REGEXP_REPLACE(LOWER(title), '^[^0-9]*', '', 'g'), '')::int,
(REGEXP_MATCH(LOWER(title), '\d+'))[1]::int,
0
),
-- Then by full title as fallback
@@ -135,17 +137,21 @@ pub async fn list_books(
let mut items: Vec<BookItem> = rows
.iter()
.take(limit as usize)
.map(|row| BookItem {
id: row.get("id"),
library_id: row.get("library_id"),
kind: row.get("kind"),
title: row.get("title"),
author: row.get("author"),
series: row.get("series"),
volume: row.get("volume"),
language: row.get("language"),
page_count: row.get("page_count"),
updated_at: row.get("updated_at"),
.map(|row| {
let thumbnail_path: Option<String> = row.get("thumbnail_path");
BookItem {
id: row.get("id"),
library_id: row.get("library_id"),
kind: row.get("kind"),
title: row.get("title"),
author: row.get("author"),
series: row.get("series"),
volume: row.get("volume"),
language: row.get("language"),
page_count: row.get("page_count"),
thumbnail_url: thumbnail_path.map(|_p| format!("/books/{}/thumbnail", row.get::<Uuid, _>("id"))),
updated_at: row.get("updated_at"),
}
})
.collect();
@@ -182,7 +188,7 @@ pub async fn get_book(
) -> Result<Json<BookDetails>, ApiError> {
let row = sqlx::query(
r#"
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.series, b.volume, b.language, b.page_count,
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.series, b.volume, b.language, b.page_count, b.thumbnail_path,
bf.abs_path, bf.format, bf.parse_status
FROM books b
LEFT JOIN LATERAL (
@@ -200,6 +206,7 @@ pub async fn get_book(
.await?;
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
let thumbnail_path: Option<String> = row.get("thumbnail_path");
Ok(Json(BookDetails {
id: row.get("id"),
library_id: row.get("library_id"),
@@ -210,6 +217,7 @@ pub async fn get_book(
volume: row.get("volume"),
language: row.get("language"),
page_count: row.get("page_count"),
thumbnail_url: thumbnail_path.map(|_| format!("/books/{}/thumbnail", id)),
file_path: row.get("abs_path"),
file_format: row.get("format"),
file_parse_status: row.get("parse_status"),
@@ -224,16 +232,33 @@ pub struct SeriesItem {
pub first_book_id: Uuid,
}
/// List all series in a library
#[derive(Serialize, ToSchema)]
pub struct SeriesPage {
pub items: Vec<SeriesItem>,
#[schema(value_type = Option<String>)]
pub next_cursor: Option<String>,
}
#[derive(Deserialize, ToSchema)]
pub struct ListSeriesQuery {
#[schema(value_type = Option<String>)]
pub cursor: Option<String>,
#[schema(value_type = Option<i64>, example = 50)]
pub limit: Option<i64>,
}
/// List all series in a library with pagination
#[utoipa::path(
get,
path = "/libraries/{library_id}/series",
tag = "books",
params(
("library_id" = String, Path, description = "Library UUID"),
("cursor" = Option<String>, Query, description = "Cursor for pagination (series name)"),
("limit" = Option<i64>, Query, description = "Max items to return (max 200)"),
),
responses(
(status = 200, body = Vec<SeriesItem>),
(status = 200, body = SeriesPage),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
@@ -241,7 +266,10 @@ pub struct SeriesItem {
pub async fn list_series(
State(state): State<AppState>,
Path(library_id): Path<Uuid>,
) -> Result<Json<Vec<SeriesItem>>, ApiError> {
Query(query): Query<ListSeriesQuery>,
) -> Result<Json<SeriesPage>, ApiError> {
let limit = query.limit.unwrap_or(50).clamp(1, 200);
let rows = sqlx::query(
r#"
WITH sorted_books AS (
@@ -253,7 +281,7 @@ pub async fn list_series(
PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified')
ORDER BY
REGEXP_REPLACE(LOWER(title), '[0-9]+', '', 'g'),
COALESCE(NULLIF(REGEXP_REPLACE(LOWER(title), '^[^0-9]*', '', 'g'), '')::int, 0),
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
title ASC
) as rn
FROM books
@@ -272,23 +300,28 @@ pub async fn list_series(
sb.id as first_book_id
FROM series_counts sc
JOIN sorted_books sb ON sb.name = sc.name AND sb.rn = 1
WHERE ($2::text IS NULL OR sc.name > $2)
ORDER BY
-- Natural sort: extract text part before numbers
REGEXP_REPLACE(LOWER(sc.name), '[0-9]+', '', 'g'),
-- Extract first number group and convert to integer
COALESCE(
NULLIF(REGEXP_REPLACE(LOWER(sc.name), '^[^0-9]*', '', 'g'), '')::int,
(REGEXP_MATCH(LOWER(sc.name), '\d+'))[1]::int,
0
),
sc.name ASC
LIMIT $3
"#,
)
.bind(library_id)
.bind(query.cursor.as_deref())
.bind(limit + 1)
.fetch_all(&state.pool)
.await?;
let series: Vec<SeriesItem> = rows
let mut items: Vec<SeriesItem> = rows
.iter()
.take(limit as usize)
.map(|row| SeriesItem {
name: row.get("name"),
book_count: row.get("book_count"),
@@ -296,5 +329,51 @@ pub async fn list_series(
})
.collect();
Ok(Json(series))
let next_cursor = if rows.len() > limit as usize {
items.last().map(|s| s.name.clone())
} else {
None
};
Ok(Json(SeriesPage {
items: std::mem::take(&mut items),
next_cursor,
}))
}
use axum::{
body::Body,
http::{header, HeaderMap, HeaderValue, StatusCode},
response::IntoResponse,
};
pub async fn get_thumbnail(
State(state): State<AppState>,
Path(book_id): Path<Uuid>,
) -> Result<impl IntoResponse, ApiError> {
let row = sqlx::query("SELECT thumbnail_path FROM books WHERE id = $1")
.bind(book_id)
.fetch_optional(&state.pool)
.await
.map_err(|e| ApiError::internal(e.to_string()))?;
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
let thumbnail_path: Option<String> = row.get("thumbnail_path");
let data = if let Some(ref path) = thumbnail_path {
std::fs::read(path)
.map_err(|e| ApiError::internal(format!("cannot read thumbnail: {}", e)))?
} else {
// Fallback: render page 1 on the fly (same as pages logic)
crate::pages::render_book_page_1(&state, book_id, 300, 80).await?
};
let mut headers = HeaderMap::new();
headers.insert(header::CONTENT_TYPE, HeaderValue::from_static("image/webp"));
headers.insert(
header::CACHE_CONTROL,
HeaderValue::from_static("public, max-age=31536000, immutable"),
);
Ok((StatusCode::OK, headers, Body::from(data)))
}

View File

@@ -1,4 +1,8 @@
use axum::{http::StatusCode, response::{IntoResponse, Response}, Json};
use axum::{
http::StatusCode,
response::{IntoResponse, Response},
Json,
};
use serde::Serialize;
#[derive(Debug)]
@@ -51,7 +55,13 @@ impl ApiError {
impl IntoResponse for ApiError {
fn into_response(self) -> Response {
(self.status, Json(ErrorBody { error: &self.message })).into_response()
(
self.status,
Json(ErrorBody {
error: &self.message,
}),
)
.into_response()
}
}
@@ -60,3 +70,9 @@ impl From<sqlx::Error> for ApiError {
Self::internal(format!("database error: {err}"))
}
}
impl From<std::io::Error> for ApiError {
fn from(err: std::io::Error) -> Self {
Self::internal(format!("IO error: {err}"))
}
}

View File

@@ -34,6 +34,9 @@ pub struct IndexJobResponse {
pub error_opt: Option<String>,
#[schema(value_type = String)]
pub created_at: DateTime<Utc>,
pub progress_percent: Option<i32>,
pub processed_files: Option<i32>,
pub total_files: Option<i32>,
}
#[derive(Serialize, ToSchema)]
@@ -142,7 +145,7 @@ pub async fn enqueue_rebuild(
)]
pub async fn list_index_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> {
let rows = sqlx::query(
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs ORDER BY created_at DESC LIMIT 100",
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs ORDER BY created_at DESC LIMIT 100",
)
.fetch_all(&state.pool)
.await?;
@@ -171,7 +174,7 @@ pub async fn cancel_job(
id: axum::extract::Path<Uuid>,
) -> Result<Json<IndexJobResponse>, ApiError> {
let rows_affected = sqlx::query(
"UPDATE index_jobs SET status = 'cancelled' WHERE id = $1 AND status IN ('pending', 'running')",
"UPDATE index_jobs SET status = 'cancelled' WHERE id = $1 AND status IN ('pending', 'running', 'generating_thumbnails')",
)
.bind(id.0)
.execute(&state.pool)
@@ -182,7 +185,7 @@ pub async fn cancel_job(
}
let row = sqlx::query(
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs WHERE id = $1",
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs WHERE id = $1",
)
.bind(id.0)
.fetch_one(&state.pool)
@@ -298,6 +301,9 @@ pub fn map_row(row: sqlx::postgres::PgRow) -> IndexJobResponse {
stats_json: row.get("stats_json"),
error_opt: row.get("error_opt"),
created_at: row.get("created_at"),
progress_percent: row.try_get("progress_percent").ok(),
processed_files: row.try_get("processed_files").ok(),
total_files: row.try_get("total_files").ok(),
}
}
@@ -333,9 +339,9 @@ fn map_row_detail(row: sqlx::postgres::PgRow) -> IndexJobDetailResponse {
)]
pub async fn get_active_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> {
let rows = sqlx::query(
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files
FROM index_jobs
WHERE status IN ('pending', 'running')
WHERE status IN ('pending', 'running', 'generating_thumbnails')
ORDER BY created_at ASC"
)
.fetch_all(&state.pool)

View File

@@ -6,6 +6,8 @@ mod libraries;
mod openapi;
mod pages;
mod search;
mod settings;
mod thumbnails;
mod tokens;
use std::{
@@ -84,7 +86,7 @@ async fn main() -> anyhow::Result<()> {
meili_url: Arc::from(config.meili_url),
meili_master_key: Arc::from(config.meili_master_key),
page_cache: Arc::new(Mutex::new(LruCache::new(NonZeroUsize::new(512).expect("non-zero")))),
page_render_limit: Arc::new(Semaphore::new(4)),
page_render_limit: Arc::new(Semaphore::new(8)),
metrics: Arc::new(Metrics::new()),
read_rate_limit: Arc::new(Mutex::new(ReadRateLimit {
window_started_at: Instant::now(),
@@ -98,15 +100,19 @@ async fn main() -> anyhow::Result<()> {
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
.route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild))
.route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate))
.route("/index/status", get(index_jobs::list_index_jobs))
.route("/index/jobs/active", get(index_jobs::get_active_jobs))
.route("/index/jobs/:id", get(index_jobs::get_job_details))
.route("/index/jobs/:id/stream", get(index_jobs::stream_job_progress))
.route("/index/jobs/:id/thumbnails/checkup", axum::routing::post(thumbnails::start_checkup))
.route("/index/jobs/:id/errors", get(index_jobs::get_job_errors))
.route("/index/cancel/:id", axum::routing::post(index_jobs::cancel_job))
.route("/folders", get(index_jobs::list_folders))
.route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token))
.route("/admin/tokens/:id", delete(tokens::revoke_token))
.merge(settings::settings_routes())
.route_layer(middleware::from_fn_with_state(
state.clone(),
auth::require_admin,
@@ -115,6 +121,7 @@ async fn main() -> anyhow::Result<()> {
let read_routes = Router::new()
.route("/books", get(books::list_books))
.route("/books/:id", get(books::get_book))
.route("/books/:id/thumbnail", get(books::get_thumbnail))
.route("/books/:id/pages/:n", get(pages::get_page))
.route("/libraries/:library_id/series", get(books::list_series))
.route("/search", get(search::search_books))

View File

@@ -10,6 +10,8 @@ use utoipa::OpenApi;
crate::pages::get_page,
crate::search::search_books,
crate::index_jobs::enqueue_rebuild,
crate::thumbnails::start_thumbnails_rebuild,
crate::thumbnails::start_thumbnails_regenerate,
crate::index_jobs::list_index_jobs,
crate::index_jobs::get_active_jobs,
crate::index_jobs::get_job_details,
@@ -37,6 +39,7 @@ use utoipa::OpenApi;
crate::search::SearchQuery,
crate::search::SearchResponse,
crate::index_jobs::RebuildRequest,
crate::thumbnails::ThumbnailsRebuildRequest,
crate::index_jobs::IndexJobResponse,
crate::index_jobs::IndexJobDetailResponse,
crate::index_jobs::JobErrorResponse,

View File

@@ -1,6 +1,6 @@
use std::{
io::Read,
path::Path,
io::{Read, Write},
path::{Path, PathBuf},
sync::{atomic::Ordering, Arc},
time::Duration,
};
@@ -11,12 +11,14 @@ use axum::{
http::{header, HeaderMap, HeaderValue, StatusCode},
response::{IntoResponse, Response},
};
use image::{codecs::jpeg::JpegEncoder, codecs::png::PngEncoder, codecs::webp::WebPEncoder, ColorType, ImageEncoder};
use image::{codecs::jpeg::JpegEncoder, codecs::png::PngEncoder, ColorType, ImageEncoder, ImageFormat};
use serde::Deserialize;
use utoipa::ToSchema;
use sha2::{Digest, Sha256};
use sqlx::Row;
use tracing::{debug, error, info, instrument, warn};
use uuid::Uuid;
use walkdir::WalkDir;
use crate::{error::ApiError, AppState};
@@ -29,7 +31,44 @@ fn remap_libraries_path(path: &str) -> String {
path.to_string()
}
#[derive(Deserialize, ToSchema)]
fn get_image_cache_dir() -> PathBuf {
std::env::var("IMAGE_CACHE_DIR")
.map(PathBuf::from)
.unwrap_or_else(|_| PathBuf::from("/tmp/stripstream-image-cache"))
}
fn get_cache_key(abs_path: &str, page: u32, format: &str, quality: u8, width: u32) -> String {
let mut hasher = Sha256::new();
hasher.update(abs_path.as_bytes());
hasher.update(page.to_le_bytes());
hasher.update(format.as_bytes());
hasher.update(quality.to_le_bytes());
hasher.update(width.to_le_bytes());
format!("{:x}", hasher.finalize())
}
fn get_cache_path(cache_key: &str, format: &OutputFormat) -> PathBuf {
let cache_dir = get_image_cache_dir();
let prefix = &cache_key[..2];
let ext = format.extension();
cache_dir.join(prefix).join(format!("{}.{}", cache_key, ext))
}
fn read_from_disk_cache(cache_path: &Path) -> Option<Vec<u8>> {
std::fs::read(cache_path).ok()
}
fn write_to_disk_cache(cache_path: &Path, data: &[u8]) -> Result<(), std::io::Error> {
if let Some(parent) = cache_path.parent() {
std::fs::create_dir_all(parent)?;
}
let mut file = std::fs::File::create(cache_path)?;
file.write_all(data)?;
file.sync_data()?;
Ok(())
}
#[derive(Deserialize, ToSchema, Debug)]
pub struct PageQuery {
#[schema(value_type = Option<String>, example = "webp")]
pub format: Option<String>,
@@ -39,7 +78,7 @@ pub struct PageQuery {
pub width: Option<u32>,
}
#[derive(Clone, Copy)]
#[derive(Clone, Copy, Debug)]
enum OutputFormat {
Jpeg,
Png,
@@ -93,12 +132,16 @@ impl OutputFormat {
),
security(("Bearer" = []))
)]
#[instrument(skip(state), fields(book_id = %book_id, page = n))]
pub async fn get_page(
State(state): State<AppState>,
AxumPath((book_id, n)): AxumPath<(Uuid, u32)>,
Query(query): Query<PageQuery>,
) -> Result<Response, ApiError> {
info!("Processing image request");
if n == 0 {
warn!("Invalid page number: 0");
return Err(ApiError::bad_request("page index starts at 1"));
}
@@ -106,15 +149,19 @@ pub async fn get_page(
let quality = query.quality.unwrap_or(80).clamp(1, 100);
let width = query.width.unwrap_or(0);
if width > 2160 {
warn!("Invalid width: {}", width);
return Err(ApiError::bad_request("width must be <= 2160"));
}
let cache_key = format!("{book_id}:{n}:{}:{quality}:{width}", format.extension());
if let Some(cached) = state.page_cache.lock().await.get(&cache_key).cloned() {
let memory_cache_key = format!("{book_id}:{n}:{}:{quality}:{width}", format.extension());
if let Some(cached) = state.page_cache.lock().await.get(&memory_cache_key).cloned() {
state.metrics.page_cache_hits.fetch_add(1, Ordering::Relaxed);
return Ok(image_response(cached, format.content_type()));
debug!("Memory cache hit for key: {}", memory_cache_key);
return Ok(image_response(cached, format.content_type(), None));
}
state.metrics.page_cache_misses.fetch_add(1, Ordering::Relaxed);
debug!("Memory cache miss for key: {}", memory_cache_key);
let row = sqlx::query(
r#"
@@ -127,11 +174,128 @@ pub async fn get_page(
)
.bind(book_id)
.fetch_optional(&state.pool)
.await?;
.await
.map_err(|e| {
error!("Database error fetching book file for book_id {}: {}", book_id, e);
e
})?;
let row = match row {
Some(r) => r,
None => {
error!("Book file not found for book_id: {}", book_id);
return Err(ApiError::not_found("book file not found"));
}
};
let abs_path: String = row.get("abs_path");
let abs_path = remap_libraries_path(&abs_path);
let input_format: String = row.get("format");
info!("Processing book file: {} (format: {})", abs_path, input_format);
let disk_cache_key = get_cache_key(&abs_path, n, format.extension(), quality, width);
let cache_path = get_cache_path(&disk_cache_key, &format);
if let Some(cached_bytes) = read_from_disk_cache(&cache_path) {
info!("Disk cache hit for: {}", cache_path.display());
let bytes = Arc::new(cached_bytes);
state.page_cache.lock().await.put(memory_cache_key, bytes.clone());
return Ok(image_response(bytes, format.content_type(), Some(&disk_cache_key)));
}
debug!("Disk cache miss for: {}", cache_path.display());
let _permit = state
.page_render_limit
.clone()
.acquire_owned()
.await
.map_err(|e| {
error!("Failed to acquire render permit: {}", e);
ApiError::internal("render limiter unavailable")
})?;
info!("Rendering page {} from {}", n, abs_path);
let abs_path_clone = abs_path.clone();
let format_clone = format;
let start_time = std::time::Instant::now();
let bytes = tokio::time::timeout(
Duration::from_secs(60),
tokio::task::spawn_blocking(move || {
render_page(&abs_path_clone, &input_format, n, &format_clone, quality, width)
}),
)
.await
.map_err(|_| {
error!("Page rendering timeout for {} page {}", abs_path, n);
ApiError::internal("page rendering timeout")
})?
.map_err(|e| {
error!("Render task panicked for {} page {}: {}", abs_path, n, e);
ApiError::internal(format!("render task failed: {e}"))
})?;
let duration = start_time.elapsed();
match bytes {
Ok(data) => {
info!("Successfully rendered page {} in {:?}", n, duration);
if let Err(e) = write_to_disk_cache(&cache_path, &data) {
warn!("Failed to write to disk cache: {}", e);
} else {
info!("Cached rendered image to: {}", cache_path.display());
}
let bytes = Arc::new(data);
state.page_cache.lock().await.put(memory_cache_key, bytes.clone());
Ok(image_response(bytes, format.content_type(), Some(&disk_cache_key)))
}
Err(e) => {
error!("Failed to render page {} from {}: {:?}", n, abs_path, e);
Err(e)
}
}
}
fn image_response(bytes: Arc<Vec<u8>>, content_type: &str, etag_suffix: Option<&str>) -> Response {
let mut headers = HeaderMap::new();
headers.insert(header::CONTENT_TYPE, HeaderValue::from_str(content_type).unwrap_or(HeaderValue::from_static("application/octet-stream")));
headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("public, max-age=31536000, immutable"));
let etag = if let Some(suffix) = etag_suffix {
format!("\"{}\"", suffix)
} else {
let mut hasher = Sha256::new();
hasher.update(&*bytes);
format!("\"{:x}\"", hasher.finalize())
};
if let Ok(v) = HeaderValue::from_str(&etag) {
headers.insert(header::ETAG, v);
}
(StatusCode::OK, headers, Body::from((*bytes).clone())).into_response()
}
/// Render page 1 of a book (for thumbnail fallback or thumbnail checkup). Uses thumbnail dimensions by default.
pub async fn render_book_page_1(
state: &AppState,
book_id: Uuid,
width: u32,
quality: u8,
) -> Result<Vec<u8>, ApiError> {
let row = sqlx::query(
r#"SELECT abs_path, format FROM book_files WHERE book_id = $1 ORDER BY updated_at DESC LIMIT 1"#,
)
.bind(book_id)
.fetch_optional(&state.pool)
.await
.map_err(|e| ApiError::internal(e.to_string()))?;
let row = row.ok_or_else(|| ApiError::not_found("book file not found"))?;
let abs_path: String = row.get("abs_path");
// Remap /libraries to LIBRARIES_ROOT_PATH for local development
let abs_path = remap_libraries_path(&abs_path);
let input_format: String = row.get("format");
@@ -142,31 +306,25 @@ pub async fn get_page(
.await
.map_err(|_| ApiError::internal("render limiter unavailable"))?;
let abs_path_clone = abs_path.clone();
let bytes = tokio::time::timeout(
Duration::from_secs(12),
tokio::task::spawn_blocking(move || render_page(&abs_path, &input_format, n, &format, quality, width)),
Duration::from_secs(60),
tokio::task::spawn_blocking(move || {
render_page(
&abs_path_clone,
&input_format,
1,
&OutputFormat::Webp,
quality,
width,
)
}),
)
.await
.map_err(|_| ApiError::internal("page rendering timeout"))?
.map_err(|e| ApiError::internal(format!("render task failed: {e}")))??;
.map_err(|e| ApiError::internal(format!("render task failed: {e}")))?;
let bytes = Arc::new(bytes);
state.page_cache.lock().await.put(cache_key, bytes.clone());
Ok(image_response(bytes, format.content_type()))
}
fn image_response(bytes: Arc<Vec<u8>>, content_type: &str) -> Response {
let mut headers = HeaderMap::new();
headers.insert(header::CONTENT_TYPE, HeaderValue::from_str(content_type).unwrap_or(HeaderValue::from_static("application/octet-stream")));
headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("public, max-age=300"));
let mut hasher = Sha256::new();
hasher.update(&*bytes);
let etag = format!("\"{:x}\"", hasher.finalize());
if let Ok(v) = HeaderValue::from_str(&etag) {
headers.insert(header::ETAG, v);
}
(StatusCode::OK, headers, Body::from((*bytes).clone())).into_response()
bytes
}
fn render_page(
@@ -188,62 +346,118 @@ fn render_page(
}
fn extract_cbz_page(abs_path: &str, page_number: u32) -> Result<Vec<u8>, ApiError> {
let file = std::fs::File::open(abs_path).map_err(|e| ApiError::internal(format!("cannot open cbz: {e}")))?;
let mut archive = zip::ZipArchive::new(file).map_err(|e| ApiError::internal(format!("invalid cbz: {e}")))?;
debug!("Opening CBZ archive: {}", abs_path);
let file = std::fs::File::open(abs_path).map_err(|e| {
error!("Cannot open CBZ file {}: {}", abs_path, e);
ApiError::internal(format!("cannot open cbz: {e}"))
})?;
let mut archive = zip::ZipArchive::new(file).map_err(|e| {
error!("Invalid CBZ archive {}: {}", abs_path, e);
ApiError::internal(format!("invalid cbz: {e}"))
})?;
let mut image_names: Vec<String> = Vec::new();
for i in 0..archive.len() {
let entry = archive.by_index(i).map_err(|e| ApiError::internal(format!("cbz entry read failed: {e}")))?;
let entry = archive.by_index(i).map_err(|e| {
error!("Failed to read CBZ entry {} in {}: {}", i, abs_path, e);
ApiError::internal(format!("cbz entry read failed: {e}"))
})?;
let name = entry.name().to_ascii_lowercase();
if is_image_name(&name) {
image_names.push(entry.name().to_string());
}
}
image_names.sort();
debug!("Found {} images in CBZ {}", image_names.len(), abs_path);
let index = page_number as usize - 1;
let selected = image_names.get(index).ok_or_else(|| ApiError::not_found("page out of range"))?;
let mut entry = archive.by_name(selected).map_err(|e| ApiError::internal(format!("cbz page read failed: {e}")))?;
let selected = image_names.get(index).ok_or_else(|| {
error!("Page {} out of range in {} (total: {})", page_number, abs_path, image_names.len());
ApiError::not_found("page out of range")
})?;
debug!("Extracting page {} ({}) from {}", page_number, selected, abs_path);
let mut entry = archive.by_name(selected).map_err(|e| {
error!("Failed to read CBZ page {} from {}: {}", selected, abs_path, e);
ApiError::internal(format!("cbz page read failed: {e}"))
})?;
let mut buf = Vec::new();
entry.read_to_end(&mut buf).map_err(|e| ApiError::internal(format!("cbz page load failed: {e}")))?;
entry.read_to_end(&mut buf).map_err(|e| {
error!("Failed to load CBZ page {} from {}: {}", selected, abs_path, e);
ApiError::internal(format!("cbz page load failed: {e}"))
})?;
Ok(buf)
}
fn extract_cbr_page(abs_path: &str, page_number: u32) -> Result<Vec<u8>, ApiError> {
let list_output = std::process::Command::new("unrar")
.arg("lb")
.arg(abs_path)
.output()
.map_err(|e| ApiError::internal(format!("unrar list failed: {e}")))?;
if !list_output.status.success() {
return Err(ApiError::internal("unrar could not list archive"));
}
let mut entries: Vec<String> = String::from_utf8_lossy(&list_output.stdout)
.lines()
.filter(|line| is_image_name(&line.to_ascii_lowercase()))
.map(|s| s.to_string())
.collect();
entries.sort();
info!("Opening CBR archive: {}", abs_path);
let index = page_number as usize - 1;
let selected = entries.get(index).ok_or_else(|| ApiError::not_found("page out of range"))?;
let tmp_dir = std::env::temp_dir().join(format!("stripstream-cbr-{}", Uuid::new_v4()));
debug!("Creating temp dir for CBR extraction: {}", tmp_dir.display());
std::fs::create_dir_all(&tmp_dir).map_err(|e| {
error!("Cannot create temp dir: {}", e);
ApiError::internal(format!("temp dir error: {}", e))
})?;
let page_output = std::process::Command::new("unrar")
.arg("p")
.arg("-inul")
// Extract directly - skip listing which fails on UTF-16 encoded filenames
let extract_output = std::process::Command::new("env")
.args(["LC_ALL=en_US.UTF-8", "LANG=en_US.UTF-8", "unar", "-o"])
.arg(&tmp_dir)
.arg(abs_path)
.arg(selected)
.output()
.map_err(|e| ApiError::internal(format!("unrar extract failed: {e}")))?;
if !page_output.status.success() {
return Err(ApiError::internal("unrar could not extract page"));
.map_err(|e| {
let _ = std::fs::remove_dir_all(&tmp_dir);
error!("unar extract failed: {}", e);
ApiError::internal(format!("unar extract failed: {e}"))
})?;
if !extract_output.status.success() {
let _ = std::fs::remove_dir_all(&tmp_dir);
let stderr = String::from_utf8_lossy(&extract_output.stderr);
error!("unar extract failed {}: {}", abs_path, stderr);
return Err(ApiError::internal("unar extract failed"));
}
Ok(page_output.stdout)
// Find and read the requested image (recursive search for CBR files with subdirectories)
let mut image_files: Vec<_> = WalkDir::new(&tmp_dir)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| {
let name = e.file_name().to_string_lossy().to_lowercase();
is_image_name(&name)
})
.collect();
image_files.sort_by_key(|e| e.path().to_string_lossy().to_lowercase());
let selected = image_files.get(index).ok_or_else(|| {
let _ = std::fs::remove_dir_all(&tmp_dir);
error!("Page {} not found (total: {})", page_number, image_files.len());
ApiError::not_found("page out of range")
})?;
let data = std::fs::read(selected.path()).map_err(|e| {
let _ = std::fs::remove_dir_all(&tmp_dir);
error!("read failed: {}", e);
ApiError::internal(format!("read error: {}", e))
})?;
let _ = std::fs::remove_dir_all(&tmp_dir);
info!("Successfully extracted CBR page {} ({} bytes)", page_number, data.len());
Ok(data)
}
fn render_pdf_page(abs_path: &str, page_number: u32, width: u32) -> Result<Vec<u8>, ApiError> {
let tmp_dir = std::env::temp_dir().join(format!("stripstream-pdf-{}", Uuid::new_v4()));
std::fs::create_dir_all(&tmp_dir).map_err(|e| ApiError::internal(format!("cannot create temp dir: {e}")))?;
debug!("Creating temp dir for PDF rendering: {}", tmp_dir.display());
std::fs::create_dir_all(&tmp_dir).map_err(|e| {
error!("Cannot create temp dir {}: {}", tmp_dir.display(), e);
ApiError::internal(format!("cannot create temp dir: {e}"))
})?;
let output_prefix = tmp_dir.join("page");
let mut cmd = std::process::Command::new("pdftoppm");
@@ -256,28 +470,58 @@ fn render_pdf_page(abs_path: &str, page_number: u32, width: u32) -> Result<Vec<u
}
cmd.arg(abs_path).arg(&output_prefix);
debug!("Running pdftoppm for page {} of {} (width: {})", page_number, abs_path, width);
let output = cmd
.output()
.map_err(|e| ApiError::internal(format!("pdf render failed: {e}")))?;
.map_err(|e| {
error!("pdftoppm command failed for {} page {}: {}", abs_path, page_number, e);
ApiError::internal(format!("pdf render failed: {e}"))
})?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
let _ = std::fs::remove_dir_all(&tmp_dir);
error!("pdftoppm failed for {} page {}: {}", abs_path, page_number, stderr);
return Err(ApiError::internal("pdf render command failed"));
}
let image_path = output_prefix.with_extension("png");
let bytes = std::fs::read(&image_path).map_err(|e| ApiError::internal(format!("render output missing: {e}")))?;
debug!("Reading rendered PDF page from: {}", image_path.display());
let bytes = std::fs::read(&image_path).map_err(|e| {
error!("Failed to read rendered PDF output {}: {}", image_path.display(), e);
ApiError::internal(format!("render output missing: {e}"))
})?;
let _ = std::fs::remove_dir_all(&tmp_dir);
debug!("Successfully rendered PDF page {} to {} bytes", page_number, bytes.len());
Ok(bytes)
}
fn transcode_image(input: &[u8], out_format: &OutputFormat, quality: u8, width: u32) -> Result<Vec<u8>, ApiError> {
let mut image = image::load_from_memory(input).map_err(|e| ApiError::internal(format!("invalid source image: {e}")))?;
debug!("Transcoding image: {} bytes, format: {:?}, quality: {}, width: {}", input.len(), out_format, quality, width);
let source_format = image::guess_format(input).ok();
debug!("Source format detected: {:?}", source_format);
let needs_transcode = source_format.map(|f| !format_matches(&f, out_format)).unwrap_or(true);
if width == 0 && !needs_transcode {
debug!("No transcoding needed, returning original");
return Ok(input.to_vec());
}
debug!("Loading image from memory...");
let mut image = image::load_from_memory(input).map_err(|e| {
error!("Failed to load image from memory: {} (input size: {} bytes)", e, input.len());
ApiError::internal(format!("invalid source image: {e}"))
})?;
if width > 0 {
debug!("Resizing image to width: {}", width);
image = image.resize(width, u32::MAX, image::imageops::FilterType::Lanczos3);
}
debug!("Converting to RGBA...");
let rgba = image.to_rgba8();
let (w, h) = rgba.dimensions();
debug!("Image dimensions: {}x{}", w, h);
let mut out = Vec::new();
match out_format {
OutputFormat::Jpeg => {
@@ -293,21 +537,38 @@ fn transcode_image(input: &[u8], out_format: &OutputFormat, quality: u8, width:
.map_err(|e| ApiError::internal(format!("png encode failed: {e}")))?;
}
OutputFormat::Webp => {
let encoder = WebPEncoder::new_lossless(&mut out);
encoder
.write_image(&rgba, w, h, ColorType::Rgba8.into())
.map_err(|e| ApiError::internal(format!("webp encode failed: {e}")))?;
let rgb_data: Vec<u8> = rgba
.pixels()
.flat_map(|p| [p[0], p[1], p[2]])
.collect();
let webp_data = webp::Encoder::new(&rgb_data, webp::PixelLayout::Rgb, w, h)
.encode(f32::max(quality as f32, 85.0));
out.extend_from_slice(&webp_data);
}
}
Ok(out)
}
fn format_matches(source: &ImageFormat, target: &OutputFormat) -> bool {
match (source, target) {
(ImageFormat::Jpeg, OutputFormat::Jpeg) => true,
(ImageFormat::Png, OutputFormat::Png) => true,
(ImageFormat::WebP, OutputFormat::Webp) => true,
_ => false,
}
}
fn is_image_name(name: &str) -> bool {
name.ends_with(".jpg")
|| name.ends_with(".jpeg")
|| name.ends_with(".png")
|| name.ends_with(".webp")
|| name.ends_with(".avif")
let lower = name.to_lowercase();
lower.ends_with(".jpg")
|| lower.ends_with(".jpeg")
|| lower.ends_with(".png")
|| lower.ends_with(".webp")
|| lower.ends_with(".avif")
|| lower.ends_with(".gif")
|| lower.ends_with(".tif")
|| lower.ends_with(".tiff")
|| lower.ends_with(".bmp")
}
#[allow(dead_code)]

250
apps/api/src/settings.rs Normal file
View File

@@ -0,0 +1,250 @@
use axum::{
extract::State,
routing::{get, post},
Json, Router,
};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use sqlx::Row;
use crate::{error::ApiError, AppState};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UpdateSettingRequest {
pub value: Value,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ClearCacheResponse {
pub success: bool,
pub message: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CacheStats {
pub total_size_mb: f64,
pub file_count: u64,
pub directory: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ThumbnailStats {
pub total_size_mb: f64,
pub file_count: u64,
pub directory: String,
}
pub fn settings_routes() -> Router<AppState> {
Router::new()
.route("/settings", get(get_settings))
.route("/settings/:key", get(get_setting).post(update_setting))
.route("/settings/cache/clear", post(clear_cache))
.route("/settings/cache/stats", get(get_cache_stats))
.route("/settings/thumbnail/stats", get(get_thumbnail_stats))
}
async fn get_settings(State(state): State<AppState>) -> Result<Json<Value>, ApiError> {
let rows = sqlx::query(r#"SELECT key, value FROM app_settings"#)
.fetch_all(&state.pool)
.await?;
let mut settings = serde_json::Map::new();
for row in rows {
let key: String = row.get("key");
let value: Value = row.get("value");
settings.insert(key, value);
}
Ok(Json(Value::Object(settings)))
}
async fn get_setting(
State(state): State<AppState>,
axum::extract::Path(key): axum::extract::Path<String>,
) -> Result<Json<Value>, ApiError> {
let row = sqlx::query(r#"SELECT value FROM app_settings WHERE key = $1"#)
.bind(&key)
.fetch_optional(&state.pool)
.await?;
match row {
Some(row) => {
let value: Value = row.get("value");
Ok(Json(value))
}
None => Err(ApiError::not_found(format!("setting '{}' not found", key))),
}
}
async fn update_setting(
State(state): State<AppState>,
axum::extract::Path(key): axum::extract::Path<String>,
Json(body): Json<UpdateSettingRequest>,
) -> Result<Json<Value>, ApiError> {
let row = sqlx::query(
r#"
INSERT INTO app_settings (key, value, updated_at)
VALUES ($1, $2, CURRENT_TIMESTAMP)
ON CONFLICT (key)
DO UPDATE SET value = $2, updated_at = CURRENT_TIMESTAMP
RETURNING value
"#,
)
.bind(&key)
.bind(&body.value)
.fetch_one(&state.pool)
.await?;
let value: Value = row.get("value");
Ok(Json(value))
}
async fn clear_cache(State(_state): State<AppState>) -> Result<Json<ClearCacheResponse>, ApiError> {
let cache_dir = std::env::var("IMAGE_CACHE_DIR")
.unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string());
let result = tokio::task::spawn_blocking(move || {
if std::path::Path::new(&cache_dir).exists() {
match std::fs::remove_dir_all(&cache_dir) {
Ok(_) => ClearCacheResponse {
success: true,
message: format!("Cache directory '{}' cleared successfully", cache_dir),
},
Err(e) => ClearCacheResponse {
success: false,
message: format!("Failed to clear cache: {}", e),
},
}
} else {
ClearCacheResponse {
success: true,
message: format!("Cache directory '{}' does not exist, nothing to clear", cache_dir),
}
}
})
.await
.map_err(|e| ApiError::internal(format!("cache clear failed: {}", e)))?;
Ok(Json(result))
}
async fn get_cache_stats(State(_state): State<AppState>) -> Result<Json<CacheStats>, ApiError> {
let cache_dir = std::env::var("IMAGE_CACHE_DIR")
.unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string());
let cache_dir_clone = cache_dir.clone();
let stats = tokio::task::spawn_blocking(move || {
let path = std::path::Path::new(&cache_dir_clone);
if !path.exists() {
return CacheStats {
total_size_mb: 0.0,
file_count: 0,
directory: cache_dir_clone,
};
}
let mut total_size: u64 = 0;
let mut file_count: u64 = 0;
fn visit_dirs(
dir: &std::path::Path,
total_size: &mut u64,
file_count: &mut u64,
) -> std::io::Result<()> {
if dir.is_dir() {
for entry in std::fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
visit_dirs(&path, total_size, file_count)?;
} else {
*total_size += entry.metadata()?.len();
*file_count += 1;
}
}
}
Ok(())
}
let _ = visit_dirs(path, &mut total_size, &mut file_count);
CacheStats {
total_size_mb: total_size as f64 / 1024.0 / 1024.0,
file_count,
directory: cache_dir_clone,
}
})
.await
.map_err(|e| ApiError::internal(format!("cache stats failed: {}", e)))?;
Ok(Json(stats))
}
fn compute_dir_stats(path: &std::path::Path) -> (u64, u64) {
let mut total_size: u64 = 0;
let mut file_count: u64 = 0;
fn visit_dirs(
dir: &std::path::Path,
total_size: &mut u64,
file_count: &mut u64,
) -> std::io::Result<()> {
if dir.is_dir() {
for entry in std::fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
visit_dirs(&path, total_size, file_count)?;
} else {
*total_size += entry.metadata()?.len();
*file_count += 1;
}
}
}
Ok(())
}
let _ = visit_dirs(path, &mut total_size, &mut file_count);
(total_size, file_count)
}
async fn get_thumbnail_stats(State(_state): State<AppState>) -> Result<Json<ThumbnailStats>, ApiError> {
let settings = sqlx::query(r#"SELECT value FROM app_settings WHERE key = 'thumbnail'"#)
.fetch_optional(&_state.pool)
.await?;
let directory = match settings {
Some(row) => {
let value: serde_json::Value = row.get("value");
value.get("directory")
.and_then(|v| v.as_str())
.unwrap_or("/data/thumbnails")
.to_string()
}
None => "/data/thumbnails".to_string(),
};
let directory_clone = directory.clone();
let stats = tokio::task::spawn_blocking(move || {
let path = std::path::Path::new(&directory_clone);
if !path.exists() {
return ThumbnailStats {
total_size_mb: 0.0,
file_count: 0,
directory: directory_clone,
};
}
let (total_size, file_count) = compute_dir_stats(path);
ThumbnailStats {
total_size_mb: total_size as f64 / 1024.0 / 1024.0,
file_count,
directory: directory_clone,
}
})
.await
.map_err(|e| ApiError::internal(format!("thumbnail stats failed: {}", e)))?;
Ok(Json(stats))
}

284
apps/api/src/thumbnails.rs Normal file
View File

@@ -0,0 +1,284 @@
use std::path::Path;
use anyhow::Context;
use axum::{
extract::{Path as AxumPath, State},
http::StatusCode,
Json,
};
use image::GenericImageView;
use serde::Deserialize;
use sqlx::Row;
use tracing::{info, warn};
use uuid::Uuid;
use utoipa::ToSchema;
use crate::{error::ApiError, index_jobs, pages, AppState};
#[derive(Clone)]
struct ThumbnailConfig {
enabled: bool,
width: u32,
height: u32,
quality: u8,
directory: String,
}
async fn load_thumbnail_config(pool: &sqlx::PgPool) -> ThumbnailConfig {
let fallback = ThumbnailConfig {
enabled: true,
width: 300,
height: 400,
quality: 80,
directory: "/data/thumbnails".to_string(),
};
let row = sqlx::query(r#"SELECT value FROM app_settings WHERE key = 'thumbnail'"#)
.fetch_optional(pool)
.await;
match row {
Ok(Some(row)) => {
let value: serde_json::Value = row.get("value");
ThumbnailConfig {
enabled: value
.get("enabled")
.and_then(|v| v.as_bool())
.unwrap_or(fallback.enabled),
width: value
.get("width")
.and_then(|v| v.as_u64())
.map(|v| v as u32)
.unwrap_or(fallback.width),
height: value
.get("height")
.and_then(|v| v.as_u64())
.map(|v| v as u32)
.unwrap_or(fallback.height),
quality: value
.get("quality")
.and_then(|v| v.as_u64())
.map(|v| v as u8)
.unwrap_or(fallback.quality),
directory: value
.get("directory")
.and_then(|v| v.as_str())
.map(|s| s.to_string())
.unwrap_or_else(|| fallback.directory.clone()),
}
}
_ => fallback,
}
}
fn generate_thumbnail(image_bytes: &[u8], config: &ThumbnailConfig) -> anyhow::Result<Vec<u8>> {
let img = image::load_from_memory(image_bytes).context("failed to load image")?;
let (orig_w, orig_h) = img.dimensions();
let ratio_w = config.width as f32 / orig_w as f32;
let ratio_h = config.height as f32 / orig_h as f32;
let ratio = ratio_w.min(ratio_h);
let new_w = (orig_w as f32 * ratio) as u32;
let new_h = (orig_h as f32 * ratio) as u32;
let resized = img.resize(new_w, new_h, image::imageops::FilterType::Lanczos3);
let rgba = resized.to_rgba8();
let (w, h) = rgba.dimensions();
let rgb_data: Vec<u8> = rgba.pixels().flat_map(|p| [p[0], p[1], p[2]]).collect();
let quality = f32::max(config.quality as f32, 85.0);
let webp_data =
webp::Encoder::new(&rgb_data, webp::PixelLayout::Rgb, w, h).encode(quality);
Ok(webp_data.to_vec())
}
fn save_thumbnail(book_id: Uuid, thumbnail_bytes: &[u8], config: &ThumbnailConfig) -> anyhow::Result<String> {
let dir = Path::new(&config.directory);
std::fs::create_dir_all(dir)?;
let filename = format!("{}.webp", book_id);
let path = dir.join(&filename);
std::fs::write(&path, thumbnail_bytes)?;
Ok(path.to_string_lossy().to_string())
}
async fn run_checkup(state: AppState, job_id: Uuid) {
let pool = &state.pool;
let row = sqlx::query("SELECT library_id, type FROM index_jobs WHERE id = $1")
.bind(job_id)
.fetch_optional(pool)
.await;
let (library_id, job_type) = match row {
Ok(Some(r)) => (
r.get::<Option<Uuid>, _>("library_id"),
r.get::<String, _>("type"),
),
_ => {
warn!("thumbnails checkup: job {} not found", job_id);
return;
}
};
// Regenerate: clear existing thumbnails in scope so they get regenerated
if job_type == "thumbnail_regenerate" {
let cleared = sqlx::query(
r#"UPDATE books SET thumbnail_path = NULL WHERE (library_id = $1 OR $1 IS NULL)"#,
)
.bind(library_id)
.execute(pool)
.await;
if let Ok(res) = cleared {
info!("thumbnails regenerate: cleared {} books", res.rows_affected());
}
}
let book_ids: Vec<Uuid> = sqlx::query_scalar(
r#"SELECT id FROM books WHERE (library_id = $1 OR $1 IS NULL) AND thumbnail_path IS NULL"#,
)
.bind(library_id)
.fetch_all(pool)
.await
.unwrap_or_default();
let config = load_thumbnail_config(pool).await;
if !config.enabled || book_ids.is_empty() {
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), progress_percent = 100, current_file = NULL WHERE id = $1",
)
.bind(job_id)
.execute(pool)
.await;
return;
}
let total = book_ids.len() as i32;
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'generating_thumbnails', total_files = $2, processed_files = 0, current_file = NULL WHERE id = $1",
)
.bind(job_id)
.bind(total)
.execute(pool)
.await;
for (i, &book_id) in book_ids.iter().enumerate() {
match pages::render_book_page_1(&state, book_id, config.width, config.quality).await {
Ok(page_bytes) => {
match generate_thumbnail(&page_bytes, &config) {
Ok(thumb_bytes) => {
if let Ok(path) = save_thumbnail(book_id, &thumb_bytes, &config) {
if sqlx::query("UPDATE books SET thumbnail_path = $1 WHERE id = $2")
.bind(&path)
.bind(book_id)
.execute(pool)
.await
.is_ok()
{
let processed = (i + 1) as i32;
let percent = ((i + 1) as f64 / total as f64 * 100.0) as i32;
let _ = sqlx::query(
"UPDATE index_jobs SET processed_files = $2, progress_percent = $3 WHERE id = $1",
)
.bind(job_id)
.bind(processed)
.bind(percent)
.execute(pool)
.await;
}
}
}
Err(e) => warn!("thumbnail generate failed for book {}: {:?}", book_id, e),
}
}
Err(e) => warn!("render page 1 failed for book {}: {:?}", book_id, e),
}
}
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), progress_percent = 100, current_file = NULL WHERE id = $1",
)
.bind(job_id)
.execute(pool)
.await;
info!("thumbnails checkup finished for job {} ({} books)", job_id, total);
}
#[derive(Deserialize, ToSchema)]
pub struct ThumbnailsRebuildRequest {
#[schema(value_type = Option<String>)]
pub library_id: Option<Uuid>,
}
/// POST /index/thumbnails/rebuild — create a job and generate thumbnails for books that don't have one (optional library scope).
#[utoipa::path(
post,
path = "/index/thumbnails/rebuild",
tag = "indexing",
request_body = Option<ThumbnailsRebuildRequest>,
responses(
(status = 200, body = index_jobs::IndexJobResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden - Admin scope required"),
),
security(("Bearer" = []))
)]
pub async fn start_thumbnails_rebuild(
State(state): State<AppState>,
payload: Option<Json<ThumbnailsRebuildRequest>>,
) -> Result<Json<index_jobs::IndexJobResponse>, ApiError> {
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
let job_id = Uuid::new_v4();
let row = sqlx::query(
r#"INSERT INTO index_jobs (id, library_id, type, status)
VALUES ($1, $2, 'thumbnail_rebuild', 'pending')
RETURNING id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at"#,
)
.bind(job_id)
.bind(library_id)
.fetch_one(&state.pool)
.await
.map_err(|e| ApiError::internal(e.to_string()))?;
Ok(Json(index_jobs::map_row(row)))
}
/// POST /index/thumbnails/regenerate — create a job and regenerate all thumbnails in scope (clears then regenerates).
#[utoipa::path(
post,
path = "/index/thumbnails/regenerate",
tag = "indexing",
request_body = Option<ThumbnailsRebuildRequest>,
responses(
(status = 200, body = index_jobs::IndexJobResponse),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden - Admin scope required"),
),
security(("Bearer" = []))
)]
pub async fn start_thumbnails_regenerate(
State(state): State<AppState>,
payload: Option<Json<ThumbnailsRebuildRequest>>,
) -> Result<Json<index_jobs::IndexJobResponse>, ApiError> {
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
let job_id = Uuid::new_v4();
let row = sqlx::query(
r#"INSERT INTO index_jobs (id, library_id, type, status)
VALUES ($1, $2, 'thumbnail_regenerate', 'pending')
RETURNING id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at"#,
)
.bind(job_id)
.bind(library_id)
.fetch_one(&state.pool)
.await
.map_err(|e| ApiError::internal(e.to_string()))?;
Ok(Json(index_jobs::map_row(row)))
}
/// POST /index/jobs/:id/thumbnails/checkup — start thumbnail generation for books missing thumbnails (called by indexer at end of build).
pub async fn start_checkup(
State(state): State<AppState>,
AxumPath(job_id): AxumPath<Uuid>,
) -> Result<StatusCode, ApiError> {
let state = state.clone();
tokio::spawn(async move { run_checkup(state, job_id).await });
Ok(StatusCode::ACCEPTED)
}

View File

@@ -0,0 +1,43 @@
import { NextRequest, NextResponse } from "next/server";
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ bookId: string }> }
) {
const { bookId } = await params;
const apiBaseUrl = process.env.API_BASE_URL || "http://api:8080";
const apiUrl = `${apiBaseUrl}/books/${bookId}/thumbnail`;
const token = process.env.API_BOOTSTRAP_TOKEN;
if (!token) {
return new NextResponse("API token not configured", { status: 500 });
}
try {
const response = await fetch(apiUrl, {
headers: {
Authorization: `Bearer ${token}`,
},
});
if (!response.ok) {
return new NextResponse(`Failed to fetch thumbnail: ${response.status}`, {
status: response.status
});
}
const contentType = response.headers.get("content-type") || "image/webp";
const imageBuffer = await response.arrayBuffer();
return new NextResponse(imageBuffer, {
headers: {
"Content-Type": contentType,
"Cache-Control": "public, max-age=31536000, immutable",
},
});
} catch (error) {
console.error("Error fetching thumbnail:", error);
return new NextResponse("Failed to fetch thumbnail", { status: 500 });
}
}

View File

@@ -33,26 +33,38 @@ export async function GET(
},
});
if (response.ok) {
if (response.ok && isActive) {
const data = await response.json();
const dataStr = JSON.stringify(data);
// Only send if data changed
if (dataStr !== lastData) {
if (dataStr !== lastData && isActive) {
lastData = dataStr;
controller.enqueue(
new TextEncoder().encode(`data: ${dataStr}\n\n`)
);
try {
controller.enqueue(
new TextEncoder().encode(`data: ${dataStr}\n\n`)
);
} catch (err) {
// Controller closed, ignore
isActive = false;
return;
}
// Stop polling if job is complete
if (data.status === "success" || data.status === "failed" || data.status === "cancelled") {
isActive = false;
controller.close();
try {
controller.close();
} catch (err) {
// Already closed, ignore
}
}
}
}
} catch (error) {
console.error("SSE fetch error:", error);
if (isActive) {
console.error("SSE fetch error:", error);
}
}
};

View File

@@ -28,20 +28,27 @@ export async function GET(request: NextRequest) {
},
});
if (response.ok) {
if (response.ok && isActive) {
const data = await response.json();
const dataStr = JSON.stringify(data);
// Send if data changed
if (dataStr !== lastData) {
if (dataStr !== lastData && isActive) {
lastData = dataStr;
controller.enqueue(
new TextEncoder().encode(`data: ${dataStr}\n\n`)
);
try {
controller.enqueue(
new TextEncoder().encode(`data: ${dataStr}\n\n`)
);
} catch (err) {
// Controller closed, ignore
isActive = false;
}
}
}
} catch (error) {
console.error("SSE fetch error:", error);
if (isActive) {
console.error("SSE fetch error:", error);
}
}
};

View File

@@ -0,0 +1,59 @@
import { NextRequest, NextResponse } from "next/server";
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ key: string }> }
) {
try {
const { key } = await params;
const baseUrl = process.env.API_BASE_URL || "http://api:8080";
const token = process.env.API_BOOTSTRAP_TOKEN;
const response = await fetch(`${baseUrl}/settings/${key}`, {
headers: {
Authorization: `Bearer ${token}`,
},
cache: "no-store"
});
if (!response.ok) {
return NextResponse.json({ error: "Failed to fetch setting" }, { status: response.status });
}
const data = await response.json();
return NextResponse.json(data);
} catch (error) {
return NextResponse.json({ error: "Internal server error" }, { status: 500 });
}
}
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ key: string }> }
) {
try {
const { key } = await params;
const baseUrl = process.env.API_BASE_URL || "http://api:8080";
const token = process.env.API_BOOTSTRAP_TOKEN;
const body = await request.json();
const response = await fetch(`${baseUrl}/settings/${key}`, {
method: "POST",
headers: {
Authorization: `Bearer ${token}`,
"Content-Type": "application/json",
},
body: JSON.stringify(body),
cache: "no-store"
});
if (!response.ok) {
return NextResponse.json({ error: "Failed to update setting" }, { status: response.status });
}
const data = await response.json();
return NextResponse.json(data);
} catch (error) {
return NextResponse.json({ error: "Internal server error" }, { status: 500 });
}
}

View File

@@ -0,0 +1,25 @@
import { NextRequest, NextResponse } from "next/server";
export async function POST(request: NextRequest) {
try {
const baseUrl = process.env.API_BASE_URL || "http://api:8080";
const token = process.env.API_BOOTSTRAP_TOKEN;
const response = await fetch(`${baseUrl}/settings/cache/clear`, {
method: "POST",
headers: {
Authorization: `Bearer ${token}`,
},
cache: "no-store"
});
if (!response.ok) {
return NextResponse.json({ error: "Failed to clear cache" }, { status: response.status });
}
const data = await response.json();
return NextResponse.json(data);
} catch (error) {
return NextResponse.json({ error: "Internal server error" }, { status: 500 });
}
}

View File

@@ -0,0 +1,24 @@
import { NextRequest, NextResponse } from "next/server";
export async function GET(request: NextRequest) {
try {
const baseUrl = process.env.API_BASE_URL || "http://api:8080";
const token = process.env.API_BOOTSTRAP_TOKEN;
const response = await fetch(`${baseUrl}/settings/cache/stats`, {
headers: {
Authorization: `Bearer ${token}`,
},
cache: "no-store"
});
if (!response.ok) {
return NextResponse.json({ error: "Failed to fetch cache stats" }, { status: response.status });
}
const data = await response.json();
return NextResponse.json(data);
} catch (error) {
return NextResponse.json({ error: "Internal server error" }, { status: 500 });
}
}

View File

@@ -0,0 +1,24 @@
import { NextRequest, NextResponse } from "next/server";
export async function GET(request: NextRequest) {
try {
const baseUrl = process.env.API_BASE_URL || "http://api:8080";
const token = process.env.API_BOOTSTRAP_TOKEN;
const response = await fetch(`${baseUrl}/settings`, {
headers: {
Authorization: `Bearer ${token}`,
},
cache: "no-store"
});
if (!response.ok) {
return NextResponse.json({ error: "Failed to fetch settings" }, { status: response.status });
}
const data = await response.json();
return NextResponse.json(data);
} catch (error) {
return NextResponse.json({ error: "Internal server error" }, { status: 500 });
}
}

View File

@@ -38,7 +38,7 @@ function BookImage({ src, alt }: { src: string; alt: string }) {
}
export function BookCard({ book }: BookCardProps) {
const coverUrl = book.coverUrl || `/api/books/${book.id}/pages/1?format=webp&width=200`;
const coverUrl = book.coverUrl || `/api/books/${book.id}/thumbnail`;
return (
<Link

View File

@@ -87,6 +87,8 @@ export function JobProgress({ jobId, onComplete }: JobProgressProps) {
const percent = progress.progress_percent ?? 0;
const processed = progress.processed_files ?? 0;
const total = progress.total_files ?? 0;
const isThumbnailsPhase = progress.status === "generating_thumbnails";
const unitLabel = isThumbnailsPhase ? "thumbnails" : "files";
return (
<div className="p-4 bg-card rounded-lg border border-border">
@@ -100,7 +102,7 @@ export function JobProgress({ jobId, onComplete }: JobProgressProps) {
<ProgressBar value={percent} showLabel size="lg" className="mb-3" />
<div className="flex flex-wrap items-center gap-x-4 gap-y-1 text-sm text-muted-foreground mb-3">
<span>{processed} / {total} files</span>
<span>{processed} / {total} {unitLabel}</span>
{progress.current_file && (
<span className="truncate max-w-md" title={progress.current_file}>
Current: {progress.current_file.length > 40
@@ -110,7 +112,7 @@ export function JobProgress({ jobId, onComplete }: JobProgressProps) {
)}
</div>
{progress.stats_json && (
{progress.stats_json && !isThumbnailsPhase && (
<div className="flex flex-wrap gap-3 text-xs">
<Badge variant="primary">Scanned: {progress.stats_json.scanned_files}</Badge>
<Badge variant="success">Indexed: {progress.stats_json.indexed_files}</Badge>

View File

@@ -33,9 +33,8 @@ interface JobRowProps {
}
export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, formatDuration }: JobRowProps) {
const [showProgress, setShowProgress] = useState(
highlighted || job.status === "running" || job.status === "pending"
);
const isActive = job.status === "running" || job.status === "pending" || job.status === "generating_thumbnails";
const [showProgress, setShowProgress] = useState(highlighted || isActive);
const handleComplete = () => {
setShowProgress(false);
@@ -53,12 +52,32 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
const removed = job.stats_json?.removed_files ?? 0;
const errors = job.stats_json?.errors ?? 0;
// Format files display
const filesDisplay = job.status === "running" && job.total_files
? `${job.processed_files || 0}/${job.total_files}`
: scanned > 0
? `${scanned} scanned`
: "-";
const isThumbnailPhase = job.status === "generating_thumbnails";
const isThumbnailJob = job.type === "thumbnail_rebuild" || job.type === "thumbnail_regenerate";
const hasThumbnailPhase = isThumbnailPhase || isThumbnailJob;
// Files column: index-phase stats only
const filesDisplay =
job.status === "running" && !isThumbnailPhase
? job.total_files != null
? `${job.processed_files ?? 0}/${job.total_files}`
: scanned > 0
? `${scanned} scanned`
: "-"
: job.status === "success" && (indexed > 0 || removed > 0 || errors > 0)
? null // rendered below as ✓ / / ⚠
: scanned > 0
? `${scanned} scanned`
: "—";
// Thumbnails column
const thumbInProgress = hasThumbnailPhase && (job.status === "running" || isThumbnailPhase);
const thumbDisplay =
thumbInProgress && job.total_files != null
? `${job.processed_files ?? 0}/${job.total_files}`
: job.status === "success" && job.total_files != null && hasThumbnailPhase
? `${job.total_files}`
: "—";
return (
<>
@@ -86,7 +105,7 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
!
</span>
)}
{(job.status === "running" || job.status === "pending") && (
{isActive && (
<button
className="text-xs text-primary hover:text-primary/80 hover:underline"
onClick={() => setShowProgress(!showProgress)}
@@ -98,21 +117,26 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
</td>
<td className="px-4 py-3">
<div className="flex flex-col gap-1">
<span className="text-sm text-foreground">{filesDisplay}</span>
{job.status === "running" && job.total_files && (
<MiniProgressBar
value={job.processed_files || 0}
max={job.total_files}
className="w-24"
/>
)}
{job.status === "success" && (
{filesDisplay !== null ? (
<span className="text-sm text-foreground">{filesDisplay}</span>
) : (
<div className="flex items-center gap-2 text-xs">
<span className="text-success"> {indexed}</span>
{removed > 0 && <span className="text-warning"> {removed}</span>}
{errors > 0 && <span className="text-error"> {errors}</span>}
</div>
)}
{job.status === "running" && !isThumbnailPhase && job.total_files != null && (
<MiniProgressBar value={job.processed_files ?? 0} max={job.total_files} className="w-24" />
)}
</div>
</td>
<td className="px-4 py-3">
<div className="flex flex-col gap-1">
<span className="text-sm text-foreground">{thumbDisplay}</span>
{thumbInProgress && job.total_files != null && (
<MiniProgressBar value={job.processed_files ?? 0} max={job.total_files} className="w-24" />
)}
</div>
</td>
<td className="px-4 py-3 text-sm text-muted-foreground">
@@ -129,7 +153,7 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
>
View
</Link>
{(job.status === "pending" || job.status === "running") && (
{(job.status === "pending" || job.status === "running" || job.status === "generating_thumbnails") && (
<Button
variant="danger"
size="sm"
@@ -141,9 +165,9 @@ export function JobRow({ job, libraryName, highlighted, onCancel, formatDate, fo
</div>
</td>
</tr>
{showProgress && (job.status === "running" || job.status === "pending") && (
{showProgress && isActive && (
<tr>
<td colSpan={8} className="px-4 py-3 bg-muted/50">
<td colSpan={9} className="px-4 py-3 bg-muted/50">
<JobProgress
jobId={job.id}
onComplete={handleComplete}

View File

@@ -78,7 +78,7 @@ export function JobsIndicator() {
return () => document.removeEventListener("mousedown", handleClickOutside);
}, []);
const runningJobs = activeJobs.filter(j => j.status === "running");
const runningJobs = activeJobs.filter(j => j.status === "running" || j.status === "generating_thumbnails");
const pendingJobs = activeJobs.filter(j => j.status === "pending");
const totalCount = activeJobs.length;
@@ -210,19 +210,19 @@ export function JobsIndicator() {
>
<div className="flex items-start gap-3">
<div className="mt-0.5">
{job.status === "running" && <span className="animate-spin inline-block"></span>}
{(job.status === "running" || job.status === "generating_thumbnails") && <span className="animate-spin inline-block"></span>}
{job.status === "pending" && <span></span>}
</div>
<div className="flex-1 min-w-0">
<div className="flex items-center gap-2 mb-1">
<code className="text-xs px-1.5 py-0.5 bg-muted rounded font-mono">{job.id.slice(0, 8)}</code>
<Badge variant={job.type === 'rebuild' ? 'primary' : 'secondary'} className="text-[10px]">
{job.type}
<Badge variant={job.type === 'rebuild' ? 'primary' : job.type === 'thumbnail_regenerate' ? 'warning' : 'secondary'} className="text-[10px]">
{job.type === 'thumbnail_rebuild' ? 'Thumbnails' : job.type === 'thumbnail_regenerate' ? 'Regenerate' : job.type}
</Badge>
</div>
{job.status === "running" && job.progress_percent !== null && (
{(job.status === "running" || job.status === "generating_thumbnails") && job.progress_percent != null && (
<div className="flex items-center gap-2 mt-2">
<MiniProgressBar value={job.progress_percent} />
<span className="text-xs font-medium text-muted-foreground">{job.progress_percent}%</span>

View File

@@ -111,6 +111,7 @@ export function JobsList({ initialJobs, libraries, highlightJobId }: JobsListPro
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Type</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Status</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Files</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Thumbnails</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Duration</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Created</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">Actions</th>

View File

@@ -60,6 +60,7 @@ export function Badge({ children, variant = "default", className = "" }: BadgePr
// Status badge for jobs/tasks
const statusVariants: Record<string, BadgeVariant> = {
running: "in-progress",
generating_thumbnails: "in-progress",
success: "completed",
completed: "completed",
failed: "error",
@@ -68,20 +69,33 @@ const statusVariants: Record<string, BadgeVariant> = {
unread: "unread",
};
const statusLabels: Record<string, string> = {
generating_thumbnails: "Thumbnails",
};
interface StatusBadgeProps {
status: string;
className?: string;
}
export function StatusBadge({ status, className = "" }: StatusBadgeProps) {
const variant = statusVariants[status.toLowerCase()] || "default";
return <Badge variant={variant} className={className}>{status}</Badge>;
const key = status.toLowerCase();
const variant = statusVariants[key] || "default";
const label = statusLabels[key] ?? status;
return <Badge variant={variant} className={className}>{label}</Badge>;
}
// Job type badge
const jobTypeVariants: Record<string, BadgeVariant> = {
rebuild: "primary",
full_rebuild: "warning",
thumbnail_rebuild: "secondary",
thumbnail_regenerate: "warning",
};
const jobTypeLabels: Record<string, string> = {
thumbnail_rebuild: "Thumbnails",
thumbnail_regenerate: "Regenerate",
};
interface JobTypeBadgeProps {
@@ -90,8 +104,10 @@ interface JobTypeBadgeProps {
}
export function JobTypeBadge({ type, className = "" }: JobTypeBadgeProps) {
const variant = jobTypeVariants[type.toLowerCase()] || "default";
return <Badge variant={variant} className={className}>{type}</Badge>;
const key = type.toLowerCase();
const variant = jobTypeVariants[key] || "default";
const label = jobTypeLabels[key] ?? type;
return <Badge variant={variant} className={className}>{label}</Badge>;
}
// Progress badge (shows percentage)

View File

@@ -1,94 +1,123 @@
type IconName = "dashboard" | "books" | "libraries" | "jobs" | "tokens" | "series";
type IconName =
| "dashboard"
| "books"
| "libraries"
| "jobs"
| "tokens"
| "series"
| "settings"
| "image"
| "cache"
| "performance"
| "folder"
| "folderOpen"
| "search"
| "plus"
| "edit"
| "trash"
| "check"
| "x"
| "chevronLeft"
| "chevronRight"
| "chevronUp"
| "chevronDown"
| "arrowLeft"
| "arrowRight"
| "refresh"
| "sun"
| "moon"
| "externalLink"
| "key"
| "play"
| "stop"
| "spinner"
| "warning";
interface PageIconProps {
type IconSize = "sm" | "md" | "lg" | "xl";
interface IconProps {
name: IconName;
size?: IconSize;
className?: string;
}
const icons: Record<IconName, React.ReactNode> = {
dashboard: (
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 12l2-2m0 0l7-7 7 7M5 10v10a1 1 0 001 1h3m10-11l2 2m-2-2v10a1 1 0 01-1 1h-3m-6 0a1 1 0 001-1v-4a1 1 0 011-1h2a1 1 0 011 1v4a1 1 0 001 1m-6 0h6" />
</svg>
),
books: (
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253" />
</svg>
),
libraries: (
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" />
</svg>
),
jobs: (
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M13 10V3L4 14h7v7l9-11h-7z" />
</svg>
),
tokens: (
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M15 7a2 2 0 012 2m4 0a6 6 0 01-7.743 5.743L11 17H9v2H7v2H4a1 1 0 01-1-1v-2.586a1 1 0 01.293-.707l5.964-5.964A6 6 0 1121 9z" />
</svg>
),
series: (
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" />
</svg>
),
const sizeClasses: Record<IconSize, string> = {
sm: "w-4 h-4",
md: "w-5 h-5",
lg: "w-6 h-6",
xl: "w-8 h-8",
};
const colors: Record<IconName, string> = {
const icons: Record<IconName, string> = {
dashboard: "M3 12l2-2m0 0l7-7 7 7M5 10v10a1 1 0 001 1h3m10-11l2 2m-2-2v10a1 1 0 01-1 1h-3m-6 0a1 1 0 001-1v-4a1 1 0 011-1h2a1 1 0 011 1v4a1 1 0 001 1m-6 0h6",
books: "M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253",
libraries: "M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z",
jobs: "M13 10V3L4 14h7v7l9-11h-7z",
tokens: "M15 7a2 2 0 012 2m4 0a6 6 0 01-7.743 5.743L11 17H9v2H7v2H4a1 1 0 01-1-1v-2.586a1 1 0 01.293-.707l5.964-5.964A6 6 0 1121 9z",
series: "M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10",
settings: "M10.325 4.317c.426-1.756 2.924-1.756 3.35 0a1.724 1.724 0 002.573 1.066c1.543-.94 3.31.826 2.37 2.37a1.724 1.724 0 001.065 2.572c1.756.426 1.756 2.924 0 3.35a1.724 1.724 0 00-1.066 2.573c.94 1.543-.826 3.31-2.37 2.37a1.724 1.724 0 00-2.572 1.065c-.426 1.756-2.924 1.756-3.35 0a1.724 1.724 0 00-2.573-1.066c-1.543.94-3.31-.826-2.37-2.37a1.724 1.724 0 00-1.065-2.572c-1.756-.426-1.756-2.924 0-3.35a1.724 1.724 0 001.066-2.573c-.94-1.543.826-3.31 2.37-2.37.996.608 2.296.07 2.572-1.065z M15 12a3 3 0 11-6 0 3 3 0 016 0z",
image: "M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z",
cache: "M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10",
performance: "M13 10V3L4 14h7v7l9-11h-7z",
folder: "M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z",
folderOpen: "M5 19a2 2 0 01-2-2V7a2 2 0 012-2h4l2 2h4a2 2 0 012 2v1M5 19h14a2 2 0 002-2v-5a2 2 0 00-2-2H9a2 2 0 00-2 2v5a2 2 0 01-2 2z",
search: "M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z",
plus: "M12 4v16m8-8H4",
edit: "M11 5H6a2 2 0 00-2 2v11a2 2 0 002 2h11a2 2 0 002-2v-5m-1.414-9.414a2 2 0 112.828 2.828L11.828 15H9v-2.828l8.586-8.586z",
trash: "M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16",
check: "M5 13l4 4L19 7",
x: "M6 18L18 6M6 6l12 12",
chevronLeft: "M15 19l-7-7 7-7",
chevronRight: "M9 5l7 7-7 7",
chevronUp: "M5 15l7-7 7 7",
chevronDown: "M19 9l-7 7-7-7",
arrowLeft: "M10 19l-7-7m0 0l7-7m-7 7h18",
arrowRight: "M14 5l7 7m0 0l-7 7m7-7H3",
refresh: "M4 4v5h.582m15.582 0A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15",
sun: "M12 3v1m0 16v1m9-9h-1M4 12H3m15.364 6.364l-.707-.707M6.343 6.343l-.707-.707m12.728 0l-.707.707M6.343 17.657l-.707.707M16 12a4 4 0 11-8 0 4 4 0 018 0z",
moon: "M20.354 15.354A9 9 0 018.646 3.646 9.003 9.003 0 0012 21a9.003 9.003 0 008.354-5.646z",
externalLink: "M10 6H6a2 2 0 00-2 2v10a2 2 0 002 2h10a2 2 0 002-2v-4M14 4h6m0 0v6m0-6L10 14",
key: "M15 7a2 2 0 012 2m4 0a6 6 0 01-7.743 5.743L11 17H9v2H7v2H4a1 1 0 01-1-1v-2.586a1 1 0 01.293-.707l5.964-5.964A6 6 0 1121 9z",
play: "M14.752 11.168l-3.197-2.132A1 1 0 0010 9.87v4.263a1 1 0 001.555.832l3.197-2.132a1 1 0 000-1.664z M21 12a9 9 0 11-18 0 9 9 0 0118 0z",
stop: "M21 12a9 9 0 11-18 0 9 9 0 0118 0z M9 10a1 1 0 011-1h4a1 1 0 011 1v4a1 1 0 01-1 1h-4a1 1 0 01-1-1v-4z",
spinner: "M4 4v5h.582m15.582 0A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15",
warning: "M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z",
};
const colorClasses: Partial<Record<IconName, string>> = {
dashboard: "text-primary",
books: "text-success",
libraries: "text-primary",
jobs: "text-warning",
tokens: "text-error",
series: "text-primary",
settings: "text-muted-foreground",
image: "text-primary",
cache: "text-warning",
performance: "text-success",
};
export function PageIcon({ name, className = "" }: PageIconProps) {
export function Icon({ name, size = "md", className = "" }: IconProps) {
const sizeClass = sizeClasses[size];
const colorClass = colorClasses[name];
return (
<span className={`${colors[name]} ${className}`}>
{icons[name]}
</span>
<svg
className={`${sizeClass} ${colorClass || ""} ${className}`}
fill="none"
stroke="currentColor"
viewBox="0 0 24 24"
>
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d={icons[name]} />
</svg>
);
}
// Nav icons (smaller)
export function NavIcon({ name, className = "" }: { name: IconName; className?: string }) {
const navIcons: Record<IconName, React.ReactNode> = {
dashboard: (
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 12l2-2m0 0l7-7 7 7M5 10v10a1 1 0 001 1h3m10-11l2 2m-2-2v10a1 1 0 01-1 1h-3m-6 0a1 1 0 001-1v-4a1 1 0 011-1h2a1 1 0 011 1v4a1 1 0 001 1m-6 0h6" />
</svg>
),
books: (
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253" />
</svg>
),
libraries: (
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" />
</svg>
),
jobs: (
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M13 10V3L4 14h7v7l9-11h-7z" />
</svg>
),
tokens: (
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M15 7a2 2 0 012 2m4 0a6 6 0 01-7.743 5.743L11 17H9v2H7v2H4a1 1 0 01-1-1v-2.586a1 1 0 01.293-.707l5.964-5.964A6 6 0 1121 9z" />
</svg>
),
series: (
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" />
</svg>
),
};
return <span className={className}>{navIcons[name]}</span>;
// Backwards compatibility aliases
export function PageIcon({ name, className = "" }: { name: IconName; className?: string }) {
return <Icon name={name} size="xl" className={className} />;
}
export function NavIcon({ name, className = "" }: { name: IconName; className?: string }) {
return <Icon name={name} size="sm" className={className} />;
}

View File

@@ -17,5 +17,5 @@ export {
FormField, FormLabel, FormInput, FormSelect, FormRow,
FormSection, FormError, FormDescription
} from "./Form";
export { PageIcon, NavIcon } from "./Icon";
export { PageIcon, NavIcon, Icon } from "./Icon";
export { CursorPagination, OffsetPagination } from "./Pagination";

View File

@@ -171,19 +171,19 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
</Card>
{/* Progress Card */}
{(job.status === "running" || job.status === "success" || job.status === "failed") && (
{(job.status === "running" || job.status === "generating_thumbnails" || job.status === "success" || job.status === "failed") && (
<Card>
<CardHeader>
<CardTitle>Progress</CardTitle>
<CardTitle>{job.status === "generating_thumbnails" ? "Thumbnails" : "Progress"}</CardTitle>
</CardHeader>
<CardContent>
{job.total_files && job.total_files > 0 && (
{job.total_files != null && job.total_files > 0 && (
<>
<ProgressBar value={job.progress_percent || 0} showLabel size="lg" className="mb-4" />
<div className="grid grid-cols-3 gap-4">
<StatBox value={job.processed_files || 0} label="Processed" variant="primary" />
<StatBox value={job.total_files} label="Total" />
<StatBox value={job.total_files - (job.processed_files || 0)} label="Remaining" variant="warning" />
<StatBox value={job.processed_files ?? 0} label="Processed" variant="primary" />
<StatBox value={job.total_files} label={job.status === "generating_thumbnails" ? "Total thumbnails" : "Total"} />
<StatBox value={job.total_files - (job.processed_files ?? 0)} label="Remaining" variant="warning" />
</div>
</>
)}

View File

@@ -1,6 +1,6 @@
import { revalidatePath } from "next/cache";
import { redirect } from "next/navigation";
import { listJobs, fetchLibraries, rebuildIndex, IndexJobDto, LibraryDto } from "../../lib/api";
import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, IndexJobDto, LibraryDto } from "../../lib/api";
import { JobsList } from "../components/JobsList";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormSelect, FormRow } from "../components/ui";
@@ -31,6 +31,22 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
redirect(`/jobs?highlight=${result.id}`);
}
async function triggerThumbnailsRebuild(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await rebuildThumbnails(libraryId || undefined);
revalidatePath("/jobs");
redirect(`/jobs?highlight=${result.id}`);
}
async function triggerThumbnailsRegenerate(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await regenerateThumbnails(libraryId || undefined);
revalidatePath("/jobs");
redirect(`/jobs?highlight=${result.id}`);
}
return (
<>
<div className="mb-6">
@@ -45,7 +61,7 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
<Card className="mb-6">
<CardHeader>
<CardTitle>Queue New Job</CardTitle>
<CardDescription>Select a library to rebuild or perform a full rebuild</CardDescription>
<CardDescription>Rebuild index, full rebuild, generate missing thumbnails, or regenerate all thumbnails</CardDescription>
</CardHeader>
<CardContent className="space-y-4">
<form action={triggerRebuild}>
@@ -89,6 +105,48 @@ export default async function JobsPage({ searchParams }: { searchParams: Promise
</Button>
</FormRow>
</form>
<form action={triggerThumbnailsRebuild}>
<FormRow>
<FormField className="flex-1">
<FormSelect name="library_id" defaultValue="">
<option value="">All libraries</option>
{libraries.map((lib) => (
<option key={lib.id} value={lib.id}>
{lib.name}
</option>
))}
</FormSelect>
</FormField>
<Button type="submit" variant="secondary">
<svg className="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" />
</svg>
Generate thumbnails
</Button>
</FormRow>
</form>
<form action={triggerThumbnailsRegenerate}>
<FormRow>
<FormField className="flex-1">
<FormSelect name="library_id" defaultValue="">
<option value="">All libraries</option>
{libraries.map((lib) => (
<option key={lib.id} value={lib.id}>
{lib.name}
</option>
))}
</FormSelect>
</FormField>
<Button type="submit" variant="warning">
<svg className="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
</svg>
Regenerate thumbnails
</Button>
</FormRow>
</form>
</CardContent>
</Card>

View File

@@ -6,7 +6,7 @@ import "./globals.css";
import { ThemeProvider } from "./theme-provider";
import { ThemeToggle } from "./theme-toggle";
import { JobsIndicator } from "./components/JobsIndicator";
import { NavIcon } from "./components/ui";
import { NavIcon, Icon } from "./components/ui";
export const metadata: Metadata = {
title: "StripStream Backoffice",
@@ -14,9 +14,9 @@ export const metadata: Metadata = {
};
type NavItem = {
href: "/" | "/books" | "/libraries" | "/jobs" | "/tokens";
href: "/" | "/books" | "/libraries" | "/jobs" | "/tokens" | "/settings";
label: string;
icon: "dashboard" | "books" | "libraries" | "jobs" | "tokens";
icon: "dashboard" | "books" | "libraries" | "jobs" | "tokens" | "settings";
};
const navItems: NavItem[] = [
@@ -71,6 +71,13 @@ export default function RootLayout({ children }: { children: ReactNode }) {
{/* Actions */}
<div className="flex items-center gap-1 pl-4 ml-2 border-l border-border/60">
<JobsIndicator />
<Link
href="/settings"
className="p-2 rounded-lg text-muted-foreground hover:text-foreground hover:bg-accent transition-colors"
title="Settings"
>
<Icon name="settings" size="md" />
</Link>
<ThemeToggle />
</div>
</div>

View File

@@ -1,4 +1,5 @@
import { fetchLibraries, fetchSeries, getBookCoverUrl, LibraryDto, SeriesDto } from "../../../../lib/api";
import { fetchLibraries, fetchSeries, getBookCoverUrl, LibraryDto, SeriesDto, SeriesPageDto } from "../../../../lib/api";
import { CursorPagination } from "../../../components/ui";
import Image from "next/image";
import Link from "next/link";
import { notFound } from "next/navigation";
@@ -7,26 +8,36 @@ import { LibrarySubPageHeader } from "../../../components/LibrarySubPageHeader";
export const dynamic = "force-dynamic";
export default async function LibrarySeriesPage({
params
params,
searchParams
}: {
params: Promise<{ id: string }>;
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
}) {
const { id } = await params;
const searchParamsAwaited = await searchParams;
const cursor = typeof searchParamsAwaited.cursor === "string" ? searchParamsAwaited.cursor : undefined;
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
const [library, series] = await Promise.all([
const [library, seriesPage] = await Promise.all([
fetchLibraries().then(libs => libs.find(l => l.id === id)),
fetchSeries(id).catch(() => [] as SeriesDto[])
fetchSeries(id, cursor, limit).catch(() => ({ items: [] as SeriesDto[], next_cursor: null }) as SeriesPageDto)
]);
if (!library) {
notFound();
}
const series = seriesPage.items;
const nextCursor = seriesPage.next_cursor;
const hasNextPage = !!nextCursor;
const hasPrevPage = !!cursor;
return (
<div className="space-y-6">
<LibrarySubPageHeader
library={library}
title={`Series (${series.length})`}
title="Series"
icon={
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" />
@@ -36,35 +47,45 @@ export default async function LibrarySeriesPage({
/>
{series.length > 0 ? (
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 gap-6">
{series.map((s) => (
<Link
key={s.name}
href={`/libraries/${id}/books?series=${encodeURIComponent(s.name)}`}
className="group"
>
<div className="bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md transition-shadow duration-200">
<div className="aspect-[2/3] relative bg-muted/50">
<Image
src={getBookCoverUrl(s.first_book_id)}
alt={`Cover of ${s.name}`}
fill
className="object-cover"
unoptimized
/>
<>
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 gap-6">
{series.map((s) => (
<Link
key={s.name}
href={`/libraries/${id}/books?series=${encodeURIComponent(s.name)}`}
className="group"
>
<div className="bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md transition-shadow duration-200">
<div className="aspect-[2/3] relative bg-muted/50">
<Image
src={getBookCoverUrl(s.first_book_id)}
alt={`Cover of ${s.name}`}
fill
className="object-cover"
unoptimized
/>
</div>
<div className="p-3">
<h3 className="font-medium text-foreground truncate text-sm" title={s.name}>
{s.name === "unclassified" ? "Unclassified" : s.name}
</h3>
<p className="text-xs text-muted-foreground mt-1">
{s.book_count} book{s.book_count !== 1 ? 's' : ''}
</p>
</div>
</div>
<div className="p-3">
<h3 className="font-medium text-foreground truncate text-sm" title={s.name}>
{s.name === "unclassified" ? "Unclassified" : s.name}
</h3>
<p className="text-xs text-muted-foreground mt-1">
{s.book_count} book{s.book_count !== 1 ? 's' : ''}
</p>
</div>
</div>
</Link>
))}
</div>
</Link>
))}
</div>
<CursorPagination
hasNextPage={hasNextPage}
hasPrevPage={hasPrevPage}
pageSize={limit}
currentCount={series.length}
nextCursor={nextCursor}
/>
</>
) : (
<div className="text-center py-12 text-muted-foreground">
<p>No series found in this library</p>

View File

@@ -32,8 +32,8 @@ export default async function LibrariesPage() {
const seriesCounts = await Promise.all(
libraries.map(async (lib) => {
try {
const series = await fetchSeries(lib.id);
return { id: lib.id, count: series.length };
const seriesPage = await fetchSeries(lib.id);
return { id: lib.id, count: seriesPage.items.length };
} catch {
return { id: lib.id, count: 0 };
}

View File

@@ -0,0 +1,434 @@
"use client";
import { useState } from "react";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, FormSelect, FormRow, Icon } from "../components/ui";
import { Settings, CacheStats, ClearCacheResponse, ThumbnailStats } from "../../lib/api";
interface SettingsPageProps {
initialSettings: Settings;
initialCacheStats: CacheStats;
initialThumbnailStats: ThumbnailStats;
}
export default function SettingsPage({ initialSettings, initialCacheStats, initialThumbnailStats }: SettingsPageProps) {
const [settings, setSettings] = useState<Settings>({
...initialSettings,
thumbnail: initialSettings.thumbnail || { enabled: true, width: 300, height: 400, quality: 80, format: "webp", directory: "/data/thumbnails" }
});
const [cacheStats, setCacheStats] = useState<CacheStats>(initialCacheStats);
const [thumbnailStats, setThumbnailStats] = useState<ThumbnailStats>(initialThumbnailStats);
const [isClearing, setIsClearing] = useState(false);
const [clearResult, setClearResult] = useState<ClearCacheResponse | null>(null);
const [isSaving, setIsSaving] = useState(false);
const [saveMessage, setSaveMessage] = useState<string | null>(null);
async function handleUpdateSetting(key: string, value: unknown) {
setIsSaving(true);
setSaveMessage(null);
try {
const response = await fetch(`/api/settings/${key}`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ value })
});
if (response.ok) {
setSaveMessage("Settings saved successfully");
setTimeout(() => setSaveMessage(null), 3000);
} else {
setSaveMessage("Failed to save settings");
}
} catch (error) {
setSaveMessage("Error saving settings");
} finally {
setIsSaving(false);
}
}
async function handleClearCache() {
setIsClearing(true);
setClearResult(null);
try {
const response = await fetch("/api/settings/cache/clear", { method: "POST" });
const result = await response.json();
setClearResult(result);
// Refresh cache stats
const statsResponse = await fetch("/api/settings/cache/stats");
if (statsResponse.ok) {
const stats = await statsResponse.json();
setCacheStats(stats);
}
} catch (error) {
setClearResult({ success: false, message: "Failed to clear cache" });
} finally {
setIsClearing(false);
}
}
return (
<>
<div className="mb-6">
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
<Icon name="settings" size="xl" />
Settings
</h1>
</div>
{saveMessage && (
<Card className="mb-6 border-success/50 bg-success/5">
<CardContent className="pt-6">
<p className="text-success">{saveMessage}</p>
</CardContent>
</Card>
)}
{/* Image Processing Settings */}
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="image" size="md" />
Image Processing
</CardTitle>
<CardDescription>Configure how images are processed and compressed</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Output Format</label>
<FormSelect
value={settings.image_processing.format}
onChange={(e) => {
const newSettings = { ...settings, image_processing: { ...settings.image_processing, format: e.target.value } };
setSettings(newSettings);
handleUpdateSetting("image_processing", newSettings.image_processing);
}}
>
<option value="webp">WebP (Recommended)</option>
<option value="jpeg">JPEG</option>
<option value="png">PNG</option>
</FormSelect>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Quality (1-100)</label>
<FormInput
type="number"
min={1}
max={100}
value={settings.image_processing.quality}
onChange={(e) => {
const quality = parseInt(e.target.value) || 85;
const newSettings = { ...settings, image_processing: { ...settings.image_processing, quality } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("image_processing", settings.image_processing)}
/>
</FormField>
</FormRow>
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Resize Filter</label>
<FormSelect
value={settings.image_processing.filter}
onChange={(e) => {
const newSettings = { ...settings, image_processing: { ...settings.image_processing, filter: e.target.value } };
setSettings(newSettings);
handleUpdateSetting("image_processing", newSettings.image_processing);
}}
>
<option value="lanczos3">Lanczos3 (Best Quality)</option>
<option value="triangle">Triangle (Faster)</option>
<option value="nearest">Nearest (Fastest)</option>
</FormSelect>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Max Width (px)</label>
<FormInput
type="number"
min={100}
max={2160}
value={settings.image_processing.max_width}
onChange={(e) => {
const max_width = parseInt(e.target.value) || 2160;
const newSettings = { ...settings, image_processing: { ...settings.image_processing, max_width } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("image_processing", settings.image_processing)}
/>
</FormField>
</FormRow>
</div>
</CardContent>
</Card>
{/* Cache Settings */}
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="cache" size="md" />
Cache
</CardTitle>
<CardDescription>Manage the image cache and storage</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<div className="grid grid-cols-3 gap-4 p-4 bg-muted/30 rounded-lg">
<div>
<p className="text-sm text-muted-foreground">Cache Size</p>
<p className="text-2xl font-semibold">{cacheStats.total_size_mb.toFixed(2)} MB</p>
</div>
<div>
<p className="text-sm text-muted-foreground">Files</p>
<p className="text-2xl font-semibold">{cacheStats.file_count}</p>
</div>
<div>
<p className="text-sm text-muted-foreground">Directory</p>
<p className="text-sm font-mono truncate" title={cacheStats.directory}>{cacheStats.directory}</p>
</div>
</div>
{clearResult && (
<div className={`p-3 rounded-lg ${clearResult.success ? 'bg-success/10 text-success' : 'bg-destructive/10 text-destructive'}`}>
{clearResult.message}
</div>
)}
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Cache Directory</label>
<FormInput
value={settings.cache.directory}
onChange={(e) => {
const newSettings = { ...settings, cache: { ...settings.cache, directory: e.target.value } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("cache", settings.cache)}
/>
</FormField>
<FormField className="w-32">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Max Size (MB)</label>
<FormInput
type="number"
value={settings.cache.max_size_mb}
onChange={(e) => {
const max_size_mb = parseInt(e.target.value) || 10000;
const newSettings = { ...settings, cache: { ...settings.cache, max_size_mb } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("cache", settings.cache)}
/>
</FormField>
</FormRow>
<Button
onClick={handleClearCache}
disabled={isClearing}
variant="destructive"
>
{isClearing ? (
<>
<Icon name="spinner" size="sm" className="animate-spin -ml-1 mr-2" />
Clearing...
</>
) : (
<>
<Icon name="trash" size="sm" className="mr-2" />
Clear Cache
</>
)}
</Button>
</div>
</CardContent>
</Card>
{/* Limits Settings */}
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="performance" size="md" />
Performance Limits
</CardTitle>
<CardDescription>Configure API performance and rate limiting</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Concurrent Renders</label>
<FormInput
type="number"
min={1}
max={20}
value={settings.limits.concurrent_renders}
onChange={(e) => {
const concurrent_renders = parseInt(e.target.value) || 4;
const newSettings = { ...settings, limits: { ...settings.limits, concurrent_renders } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("limits", settings.limits)}
/>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Timeout (seconds)</label>
<FormInput
type="number"
min={5}
max={60}
value={settings.limits.timeout_seconds}
onChange={(e) => {
const timeout_seconds = parseInt(e.target.value) || 12;
const newSettings = { ...settings, limits: { ...settings.limits, timeout_seconds } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("limits", settings.limits)}
/>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Rate Limit (req/s)</label>
<FormInput
type="number"
min={10}
max={1000}
value={settings.limits.rate_limit_per_second}
onChange={(e) => {
const rate_limit_per_second = parseInt(e.target.value) || 120;
const newSettings = { ...settings, limits: { ...settings.limits, rate_limit_per_second } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("limits", settings.limits)}
/>
</FormField>
</FormRow>
<p className="text-sm text-muted-foreground">
Note: Changes to limits require a server restart to take effect.
</p>
</div>
</CardContent>
</Card>
{/* Thumbnail Settings */}
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="image" size="md" />
Thumbnails
</CardTitle>
<CardDescription>Configure thumbnail generation during indexing</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Enable Thumbnails</label>
<FormSelect
value={settings.thumbnail.enabled ? "true" : "false"}
onChange={(e) => {
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, enabled: e.target.value === "true" } };
setSettings(newSettings);
handleUpdateSetting("thumbnail", newSettings.thumbnail);
}}
>
<option value="true">Enabled</option>
<option value="false">Disabled</option>
</FormSelect>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Output Format</label>
<FormSelect
value={settings.thumbnail.format}
onChange={(e) => {
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, format: e.target.value } };
setSettings(newSettings);
handleUpdateSetting("thumbnail", newSettings.thumbnail);
}}
>
<option value="webp">WebP (Recommended)</option>
<option value="jpeg">JPEG</option>
<option value="png">PNG</option>
</FormSelect>
</FormField>
</FormRow>
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Width (px)</label>
<FormInput
type="number"
min={50}
max={600}
value={settings.thumbnail.width}
onChange={(e) => {
const width = parseInt(e.target.value) || 300;
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, width } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
/>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Height (px)</label>
<FormInput
type="number"
min={50}
max={800}
value={settings.thumbnail.height}
onChange={(e) => {
const height = parseInt(e.target.value) || 400;
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, height } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
/>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Quality (1-100)</label>
<FormInput
type="number"
min={1}
max={100}
value={settings.thumbnail.quality}
onChange={(e) => {
const quality = parseInt(e.target.value) || 80;
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, quality } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
/>
</FormField>
</FormRow>
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">Thumbnail Directory</label>
<FormInput
value={settings.thumbnail.directory}
onChange={(e) => {
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, directory: e.target.value } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
/>
</FormField>
</FormRow>
<div className="grid grid-cols-3 gap-4 p-4 bg-muted/30 rounded-lg">
<div>
<p className="text-sm text-muted-foreground">Total Size</p>
<p className="text-2xl font-semibold">{thumbnailStats.total_size_mb.toFixed(2)} MB</p>
</div>
<div>
<p className="text-sm text-muted-foreground">Files</p>
<p className="text-2xl font-semibold">{thumbnailStats.file_count}</p>
</div>
<div>
<p className="text-sm text-muted-foreground">Directory</p>
<p className="text-sm font-mono truncate" title={thumbnailStats.directory}>{thumbnailStats.directory}</p>
</div>
</div>
<p className="text-sm text-muted-foreground">
Note: Thumbnail settings are used during indexing. Existing thumbnails will not be regenerated automatically.
</p>
</div>
</CardContent>
</Card>
</>
);
}

View File

@@ -0,0 +1,27 @@
import { getSettings, getCacheStats, getThumbnailStats } from "../../lib/api";
import SettingsPage from "./SettingsPage";
export const dynamic = "force-dynamic";
export default async function SettingsPageWrapper() {
const settings = await getSettings().catch(() => ({
image_processing: { format: "webp", quality: 85, filter: "lanczos3", max_width: 2160 },
cache: { enabled: true, directory: "/tmp/stripstream-image-cache", max_size_mb: 10000 },
limits: { concurrent_renders: 4, timeout_seconds: 12, rate_limit_per_second: 120 },
thumbnail: { enabled: true, width: 300, height: 400, quality: 80, format: "webp", directory: "/data/thumbnails" }
}));
const cacheStats = await getCacheStats().catch(() => ({
total_size_mb: 0,
file_count: 0,
directory: "/tmp/stripstream-image-cache"
}));
const thumbnailStats = await getThumbnailStats().catch(() => ({
total_size_mb: 0,
file_count: 0,
directory: "/data/thumbnails"
}));
return <SettingsPage initialSettings={settings} initialCacheStats={cacheStats} initialThumbnailStats={thumbnailStats} />;
}

View File

@@ -98,7 +98,10 @@ function config() {
return { baseUrl: baseUrl.replace(/\/$/, ""), token };
}
export async function apiFetch<T>(path: string, init?: RequestInit): Promise<T> {
export async function apiFetch<T>(
path: string,
init?: RequestInit,
): Promise<T> {
const { baseUrl, token } = config();
const headers = new Headers(init?.headers || {});
headers.set("Authorization", `Bearer ${token}`);
@@ -109,7 +112,7 @@ export async function apiFetch<T>(path: string, init?: RequestInit): Promise<T>
const res = await fetch(`${baseUrl}${path}`, {
...init,
headers,
cache: "no-store"
cache: "no-store",
});
if (!res.ok) {
@@ -130,7 +133,7 @@ export async function fetchLibraries() {
export async function createLibrary(name: string, rootPath: string) {
return apiFetch<LibraryDto>("/libraries", {
method: "POST",
body: JSON.stringify({ name, root_path: rootPath })
body: JSON.stringify({ name, root_path: rootPath }),
});
}
@@ -143,12 +146,21 @@ export async function scanLibrary(libraryId: string, full?: boolean) {
if (full) body.full = true;
return apiFetch<IndexJobDto>(`/libraries/${libraryId}/scan`, {
method: "POST",
body: JSON.stringify(body)
body: JSON.stringify(body),
});
}
export async function updateLibraryMonitoring(libraryId: string, monitorEnabled: boolean, scanMode: string, watcherEnabled?: boolean) {
const body: { monitor_enabled: boolean; scan_mode: string; watcher_enabled?: boolean } = {
export async function updateLibraryMonitoring(
libraryId: string,
monitorEnabled: boolean,
scanMode: string,
watcherEnabled?: boolean,
) {
const body: {
monitor_enabled: boolean;
scan_mode: string;
watcher_enabled?: boolean;
} = {
monitor_enabled: monitorEnabled,
scan_mode: scanMode,
};
@@ -157,7 +169,7 @@ export async function updateLibraryMonitoring(libraryId: string, monitorEnabled:
}
return apiFetch<LibraryDto>(`/libraries/${libraryId}/monitoring`, {
method: "PATCH",
body: JSON.stringify(body)
body: JSON.stringify(body),
});
}
@@ -171,7 +183,25 @@ export async function rebuildIndex(libraryId?: string, full?: boolean) {
if (full) body.full = true;
return apiFetch<IndexJobDto>("/index/rebuild", {
method: "POST",
body: JSON.stringify(body)
body: JSON.stringify(body),
});
}
export async function rebuildThumbnails(libraryId?: string) {
const body: { library_id?: string } = {};
if (libraryId) body.library_id = libraryId;
return apiFetch<IndexJobDto>("/index/thumbnails/rebuild", {
method: "POST",
body: JSON.stringify(body),
});
}
export async function regenerateThumbnails(libraryId?: string) {
const body: { library_id?: string } = {};
if (libraryId) body.library_id = libraryId;
return apiFetch<IndexJobDto>("/index/thumbnails/regenerate", {
method: "POST",
body: JSON.stringify(body),
});
}
@@ -191,7 +221,7 @@ export async function listTokens() {
export async function createToken(name: string, scope: string) {
return apiFetch<{ token: string }>("/admin/tokens", {
method: "POST",
body: JSON.stringify({ name, scope })
body: JSON.stringify({ name, scope }),
});
}
@@ -199,31 +229,122 @@ export async function revokeToken(id: string) {
return apiFetch<void>(`/admin/tokens/${id}`, { method: "DELETE" });
}
export async function fetchBooks(libraryId?: string, series?: string, cursor?: string, limit: number = 50): Promise<BooksPageDto> {
export async function fetchBooks(
libraryId?: string,
series?: string,
cursor?: string,
limit: number = 50,
): Promise<BooksPageDto> {
const params = new URLSearchParams();
if (libraryId) params.set("library_id", libraryId);
if (series) params.set("series", series);
if (cursor) params.set("cursor", cursor);
params.set("limit", limit.toString());
return apiFetch<BooksPageDto>(`/books?${params.toString()}`);
}
export async function fetchSeries(libraryId: string): Promise<SeriesDto[]> {
return apiFetch<SeriesDto[]>(`/libraries/${libraryId}/series`);
export type SeriesPageDto = {
items: SeriesDto[];
next_cursor: string | null;
};
export async function fetchSeries(
libraryId: string,
cursor?: string,
limit: number = 50,
): Promise<SeriesPageDto> {
const params = new URLSearchParams();
if (cursor) params.set("cursor", cursor);
params.set("limit", limit.toString());
return apiFetch<SeriesPageDto>(
`/libraries/${libraryId}/series?${params.toString()}`,
);
}
export async function searchBooks(query: string, libraryId?: string, limit: number = 20): Promise<SearchResponseDto> {
export async function searchBooks(
query: string,
libraryId?: string,
limit: number = 20,
): Promise<SearchResponseDto> {
const params = new URLSearchParams();
params.set("q", query);
if (libraryId) params.set("library_id", libraryId);
params.set("limit", limit.toString());
return apiFetch<SearchResponseDto>(`/search?${params.toString()}`);
}
export function getBookCoverUrl(bookId: string): string {
// Utiliser une route API locale pour éviter les problèmes CORS
// Le navigateur ne peut pas accéder à http://api:8080 (hostname Docker interne)
return `/api/books/${bookId}/pages/1?format=webp&width=200`;
return `/api/books/${bookId}/thumbnail`;
}
export type Settings = {
image_processing: {
format: string;
quality: number;
filter: string;
max_width: number;
};
cache: {
enabled: boolean;
directory: string;
max_size_mb: number;
};
limits: {
concurrent_renders: number;
timeout_seconds: number;
rate_limit_per_second: number;
};
thumbnail: {
enabled: boolean;
width: number;
height: number;
quality: number;
format: string;
directory: string;
};
};
export type CacheStats = {
total_size_mb: number;
file_count: number;
directory: string;
};
export type ClearCacheResponse = {
success: boolean;
message: string;
};
export type ThumbnailStats = {
total_size_mb: number;
file_count: number;
directory: string;
};
export async function getSettings() {
return apiFetch<Settings>("/settings");
}
export async function updateSetting(key: string, value: unknown) {
return apiFetch<unknown>(`/settings/${key}`, {
method: "POST",
body: JSON.stringify({ value }),
});
}
export async function getCacheStats() {
return apiFetch<CacheStats>("/settings/cache/stats");
}
export async function clearCache() {
return apiFetch<ClearCacheResponse>("/settings/cache/clear", {
method: "POST",
});
}
export async function getThumbnailStats() {
return apiFetch<ThumbnailStats>("/settings/thumbnail/stats");
}

View File

@@ -1,6 +1,6 @@
/// <reference types="next" />
/// <reference types="next/image-types/global" />
import "./.next/dev/types/routes.d.ts";
import "./.next/types/routes.d.ts";
// NOTE: This file should not be edited
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.

View File

@@ -3,9 +3,9 @@
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "next dev -p 8082",
"dev": "next dev -p 7082",
"build": "next build",
"start": "next start -p 8082"
"start": "next start -p 7082"
},
"dependencies": {
"next": "^16.1.6",

View File

@@ -10,6 +10,8 @@ axum.workspace = true
chrono.workspace = true
notify = "6.1"
parsers = { path = "../../crates/parsers" }
rand.workspace = true
rayon.workspace = true
reqwest.workspace = true
serde.workspace = true
serde_json.workspace = true

File diff suppressed because it is too large Load Diff

View File

@@ -12,10 +12,12 @@ pub struct ApiConfig {
impl ApiConfig {
pub fn from_env() -> Result<Self> {
Ok(Self {
listen_addr: std::env::var("API_LISTEN_ADDR").unwrap_or_else(|_| "0.0.0.0:8080".to_string()),
listen_addr: std::env::var("API_LISTEN_ADDR")
.unwrap_or_else(|_| "0.0.0.0:8080".to_string()),
database_url: std::env::var("DATABASE_URL").context("DATABASE_URL is required")?,
meili_url: std::env::var("MEILI_URL").context("MEILI_URL is required")?,
meili_master_key: std::env::var("MEILI_MASTER_KEY").context("MEILI_MASTER_KEY is required")?,
meili_master_key: std::env::var("MEILI_MASTER_KEY")
.context("MEILI_MASTER_KEY is required")?,
api_bootstrap_token: std::env::var("API_BOOTSTRAP_TOKEN")
.context("API_BOOTSTRAP_TOKEN is required")?,
})
@@ -29,20 +31,76 @@ pub struct IndexerConfig {
pub meili_url: String,
pub meili_master_key: String,
pub scan_interval_seconds: u64,
pub thumbnail_config: ThumbnailConfig,
/// API base URL for thumbnail checkup at end of build (e.g. http://api:8080)
pub api_base_url: String,
/// Token to call API (e.g. API_BOOTSTRAP_TOKEN)
pub api_bootstrap_token: String,
}
#[derive(Debug, Clone)]
pub struct ThumbnailConfig {
pub enabled: bool,
pub width: u32,
pub height: u32,
pub quality: u8,
pub format: String,
pub directory: String,
}
impl Default for ThumbnailConfig {
fn default() -> Self {
Self {
enabled: true,
width: 300,
height: 400,
quality: 80,
format: "webp".to_string(),
directory: "/data/thumbnails".to_string(),
}
}
}
impl IndexerConfig {
pub fn from_env() -> Result<Self> {
let thumbnail_config = ThumbnailConfig {
enabled: std::env::var("THUMBNAIL_ENABLED")
.ok()
.and_then(|v| v.parse::<bool>().ok())
.unwrap_or(true),
width: std::env::var("THUMBNAIL_WIDTH")
.ok()
.and_then(|v| v.parse::<u32>().ok())
.unwrap_or(300),
height: std::env::var("THUMBNAIL_HEIGHT")
.ok()
.and_then(|v| v.parse::<u32>().ok())
.unwrap_or(400),
quality: std::env::var("THUMBNAIL_QUALITY")
.ok()
.and_then(|v| v.parse::<u8>().ok())
.unwrap_or(80),
format: std::env::var("THUMBNAIL_FORMAT").unwrap_or_else(|_| "webp".to_string()),
directory: std::env::var("THUMBNAIL_DIRECTORY")
.unwrap_or_else(|_| "/data/thumbnails".to_string()),
};
Ok(Self {
listen_addr: std::env::var("INDEXER_LISTEN_ADDR")
.unwrap_or_else(|_| "0.0.0.0:8081".to_string()),
database_url: std::env::var("DATABASE_URL").context("DATABASE_URL is required")?,
meili_url: std::env::var("MEILI_URL").context("MEILI_URL is required")?,
meili_master_key: std::env::var("MEILI_MASTER_KEY").context("MEILI_MASTER_KEY is required")?,
meili_master_key: std::env::var("MEILI_MASTER_KEY")
.context("MEILI_MASTER_KEY is required")?,
scan_interval_seconds: std::env::var("INDEXER_SCAN_INTERVAL_SECONDS")
.ok()
.and_then(|v| v.parse::<u64>().ok())
.unwrap_or(5),
thumbnail_config,
api_base_url: std::env::var("API_BASE_URL")
.unwrap_or_else(|_| "http://api:8080".to_string()),
api_bootstrap_token: std::env::var("API_BOOTSTRAP_TOKEN")
.context("API_BOOTSTRAP_TOKEN is required for thumbnail checkup")?,
})
}
}
@@ -59,8 +117,10 @@ impl AdminUiConfig {
Ok(Self {
listen_addr: std::env::var("ADMIN_UI_LISTEN_ADDR")
.unwrap_or_else(|_| "0.0.0.0:8082".to_string()),
api_base_url: std::env::var("API_BASE_URL").unwrap_or_else(|_| "http://api:8080".to_string()),
api_token: std::env::var("API_BOOTSTRAP_TOKEN").context("API_BOOTSTRAP_TOKEN is required")?,
api_base_url: std::env::var("API_BASE_URL")
.unwrap_or_else(|_| "http://api:8080".to_string()),
api_token: std::env::var("API_BOOTSTRAP_TOKEN")
.context("API_BOOTSTRAP_TOKEN is required")?,
})
}
}

View File

@@ -8,4 +8,6 @@ license.workspace = true
anyhow.workspace = true
lopdf = "0.35"
regex = "1"
uuid.workspace = true
walkdir.workspace = true
zip = { version = "2.2", default-features = false, features = ["deflate"] }

View File

@@ -1,5 +1,9 @@
use anyhow::{Context, Result};
use std::io::Read;
use std::path::Path;
use std::process::Command;
use uuid::Uuid;
use walkdir::WalkDir;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BookFormat {
@@ -54,16 +58,47 @@ pub fn parse_metadata(
// Determine series from parent folder relative to library root
let series = path.parent().and_then(|parent| {
// Get the relative path from library root to parent
let relative = parent.strip_prefix(library_root).ok()?;
// If relative path is not empty, use first component as series
let first_component = relative.components().next()?;
let series_name = first_component.as_os_str().to_string_lossy().to_string();
// Only if series_name is not empty
// Normalize paths for comparison (handle different separators, etc.)
let parent_str = parent.to_string_lossy().to_string();
let root_str = library_root.to_string_lossy().to_string();
// Try to find the library root in the parent path
let relative = if let Some(idx) = parent_str.find(&root_str) {
// Found root in parent, extract what comes after
let after_root = &parent_str[idx + root_str.len()..];
Path::new(after_root)
} else if let Some(relative) = parent.strip_prefix(library_root).ok() {
// Standard approach works
relative
} else {
// Log for diagnostic on server
eprintln!(
"[PARSER] Cannot determine series: parent '{}' doesn't start with root '{}'",
parent.display(),
library_root.display()
);
return None;
};
// Remove leading separators
let relative_str = relative.to_string_lossy().to_string();
let relative_clean = relative_str.trim_start_matches(|c| c == '/' || c == '\\');
if relative_clean.is_empty() {
return None;
}
// Get first component as series
let first_sep = relative_clean.find(|c| c == '/' || c == '\\');
let series_name = match first_sep {
Some(idx) => &relative_clean[..idx],
None => relative_clean,
};
if series_name.is_empty() {
None
} else {
Some(series_name)
Some(series_name.to_string())
}
});
@@ -209,3 +244,105 @@ fn is_image_name(name: &str) -> bool {
|| name.ends_with(".webp")
|| name.ends_with(".avif")
}
pub fn extract_first_page(path: &Path, format: BookFormat) -> Result<Vec<u8>> {
match format {
BookFormat::Cbz => extract_cbz_first_page(path),
BookFormat::Cbr => extract_cbr_first_page(path),
BookFormat::Pdf => extract_pdf_first_page(path),
}
}
fn extract_cbz_first_page(path: &Path) -> Result<Vec<u8>> {
let file = std::fs::File::open(path)
.with_context(|| format!("cannot open cbz: {}", path.display()))?;
let mut archive = zip::ZipArchive::new(file).context("invalid cbz archive")?;
let mut image_names: Vec<String> = Vec::new();
for i in 0..archive.len() {
let entry = archive.by_index(i).context("cannot read cbz entry")?;
let name = entry.name().to_ascii_lowercase();
if is_image_name(&name) {
image_names.push(entry.name().to_string());
}
}
image_names.sort();
let first_image = image_names.first().context("no images found in cbz")?;
let mut entry = archive
.by_name(first_image)
.context("cannot read first image")?;
let mut buf = Vec::new();
entry.read_to_end(&mut buf)?;
Ok(buf)
}
fn extract_cbr_first_page(path: &Path) -> Result<Vec<u8>> {
let tmp_dir = std::env::temp_dir().join(format!("stripstream-cbr-thumb-{}", Uuid::new_v4()));
std::fs::create_dir_all(&tmp_dir).context("cannot create temp dir")?;
// Use env command like the API does
let output = std::process::Command::new("env")
.args(["LC_ALL=en_US.UTF-8", "LANG=en_US.UTF-8", "unar", "-o"])
.arg(&tmp_dir)
.arg(path)
.output()
.context("unar failed")?;
if !output.status.success() {
let _ = std::fs::remove_dir_all(&tmp_dir);
return Err(anyhow::anyhow!(
"unar extract failed: {:?}",
String::from_utf8_lossy(&output.stderr)
));
}
// Use WalkDir for recursive search (CBR can have subdirectories)
let mut image_files: Vec<_> = WalkDir::new(&tmp_dir)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| {
let name = e.file_name().to_string_lossy().to_lowercase();
is_image_name(&name)
})
.collect();
image_files.sort_by_key(|e| e.path().to_string_lossy().to_lowercase());
let first_image = image_files.first().context("no images found in cbr")?;
let data = std::fs::read(first_image.path())?;
let _ = std::fs::remove_dir_all(&tmp_dir);
Ok(data)
}
fn extract_pdf_first_page(path: &Path) -> Result<Vec<u8>> {
let tmp_dir = std::env::temp_dir().join(format!("stripstream-pdf-thumb-{}", Uuid::new_v4()));
std::fs::create_dir_all(&tmp_dir)?;
let output_prefix = tmp_dir.join("page");
let output = Command::new("pdftoppm")
.args([
"-f",
"1",
"-singlefile",
"-png",
"-scale-to",
"800",
path.to_str().unwrap(),
output_prefix.to_str().unwrap(),
])
.output()
.context("pdftoppm failed")?;
if !output.status.success() {
let _ = std::fs::remove_dir_all(&tmp_dir);
return Err(anyhow::anyhow!("pdftoppm failed"));
}
let image_path = output_prefix.with_extension("png");
let data = std::fs::read(&image_path)?;
let _ = std::fs::remove_dir_all(&tmp_dir);
Ok(data)
}

View File

@@ -57,6 +57,7 @@ services:
- "7080:8080"
volumes:
- ${LIBRARIES_HOST_PATH:-../libraries}:/libraries
- ${THUMBNAILS_HOST_PATH:-../data/thumbnails}:/data/thumbnails
depends_on:
migrate:
condition: service_completed_successfully
@@ -80,6 +81,7 @@ services:
- "7081:8081"
volumes:
- ${LIBRARIES_HOST_PATH:-../libraries}:/libraries
- ${THUMBNAILS_HOST_PATH:-../data/thumbnails}:/data/thumbnails
depends_on:
migrate:
condition: service_completed_successfully

View File

@@ -0,0 +1,6 @@
CREATE TABLE IF NOT EXISTS sync_metadata (
id INTEGER PRIMARY KEY,
last_meili_sync TIMESTAMPTZ
);
INSERT INTO sync_metadata (id, last_meili_sync) VALUES (1, NULL) ON CONFLICT DO NOTHING;

View File

@@ -0,0 +1,11 @@
CREATE TABLE IF NOT EXISTS app_settings (
key TEXT PRIMARY KEY,
value JSONB NOT NULL,
updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP
);
INSERT INTO app_settings (key, value) VALUES
('image_processing', '{"format": "webp", "quality": 85, "filter": "lanczos3", "max_width": 2160}'),
('cache', '{"enabled": true, "directory": "/tmp/stripstream-image-cache", "max_size_mb": 10000}'),
('limits', '{"concurrent_renders": 4, "timeout_seconds": 12, "rate_limit_per_second": 120}')
ON CONFLICT DO NOTHING;

View File

@@ -0,0 +1,5 @@
ALTER TABLE books ADD COLUMN IF NOT EXISTS thumbnail_path TEXT;
INSERT INTO app_settings (key, value) VALUES
('thumbnail', '{"enabled": true, "width": 300, "height": 400, "quality": 80, "format": "webp", "directory": "/data/thumbnails"}')
ON CONFLICT (key) DO UPDATE SET value = '{"enabled": true, "width": 300, "height": 400, "quality": 80, "format": "webp", "directory": "/data/thumbnails"}'::jsonb;

View File

@@ -0,0 +1,6 @@
-- Migration: Add status 'generating_thumbnails' for thumbnail phase after indexing
ALTER TABLE index_jobs
DROP CONSTRAINT IF EXISTS index_jobs_status_check,
ADD CONSTRAINT index_jobs_status_check
CHECK (status IN ('pending', 'running', 'generating_thumbnails', 'success', 'failed'));

View File

@@ -0,0 +1,6 @@
-- Migration: Add job type 'thumbnail_rebuild' for manual thumbnail generation
ALTER TABLE index_jobs
DROP CONSTRAINT IF EXISTS index_jobs_type_check,
ADD CONSTRAINT index_jobs_type_check
CHECK (type IN ('scan', 'rebuild', 'full_rebuild', 'thumbnail_rebuild', 'thumbnail_regenerate'));