From 292c61566c6a2ae2f22a0fed3a8d9b8985bff16b Mon Sep 17 00:00:00 2001 From: Froidefond Julien Date: Sat, 7 Mar 2026 09:12:06 +0100 Subject: [PATCH] feat: add image optimization and settings page - Add persistent disk cache for processed images - Optimize image processing with short-circuit and quality settings - Add WebP lossy encoding with configurable quality - Add settings API endpoints (GET/POST /settings, cache management) - Add database table for app configuration - Add /settings page in backoffice for image/cache/limits config - Add cache stats and clear functionality - Update navigation with settings link --- Cargo.lock | 39 +++ apps/api/Cargo.toml | 1 + apps/api/src/books.rs | 44 ++- apps/api/src/main.rs | 2 + apps/api/src/pages.rs | 117 +++++-- apps/api/src/settings.rs | 260 +++++++++++++++ .../app/api/settings/[key]/route.ts | 59 ++++ .../app/api/settings/cache/clear/route.ts | 25 ++ .../app/api/settings/cache/stats/route.ts | 24 ++ apps/backoffice/app/api/settings/route.ts | 24 ++ apps/backoffice/app/components/ui/Icon.tsx | 15 +- apps/backoffice/app/layout.tsx | 5 +- .../app/libraries/[id]/series/page.tsx | 87 +++-- apps/backoffice/app/libraries/page.tsx | 4 +- apps/backoffice/app/settings/SettingsPage.tsx | 303 ++++++++++++++++++ apps/backoffice/app/settings/page.tsx | 20 ++ apps/backoffice/lib/api.ts | 62 +++- apps/backoffice/next-env.d.ts | 2 +- infra/migrations/0008_add_settings.sql | 11 + 19 files changed, 1038 insertions(+), 66 deletions(-) create mode 100644 apps/api/src/settings.rs create mode 100644 apps/backoffice/app/api/settings/[key]/route.ts create mode 100644 apps/backoffice/app/api/settings/cache/clear/route.ts create mode 100644 apps/backoffice/app/api/settings/cache/stats/route.ts create mode 100644 apps/backoffice/app/api/settings/route.ts create mode 100644 apps/backoffice/app/settings/SettingsPage.tsx create mode 100644 apps/backoffice/app/settings/page.tsx create mode 100644 infra/migrations/0008_add_settings.sql diff --git a/Cargo.lock b/Cargo.lock index 337a8c0..309023f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -78,6 +78,7 @@ dependencies = [ "utoipa", "utoipa-swagger-ui", "uuid", + "webp", "zip 2.4.2", ] @@ -317,6 +318,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aebf35691d1bfb0ac386a69bac2fde4dd276fb618cf8bf4f5318fe285e821bb2" dependencies = [ "find-msvc-tools", + "jobserver", + "libc", "shlex", ] @@ -799,6 +802,12 @@ dependencies = [ "wasip3", ] +[[package]] +name = "glob" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" + [[package]] name = "hashbrown" version = "0.15.5" @@ -1216,6 +1225,16 @@ version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.4", + "libc", +] + [[package]] name = "js-sys" version = "0.3.91" @@ -1295,6 +1314,16 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "libwebp-sys" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54cd30df7c7165ce74a456e4ca9732c603e8dc5e60784558c1c6dc047f876733" +dependencies = [ + "cc", + "glob", +] + [[package]] name = "litemap" version = "0.8.1" @@ -3153,6 +3182,16 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webp" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c071456adef4aca59bf6a583c46b90ff5eb0b4f758fc347cea81290288f37ce1" +dependencies = [ + "image", + "libwebp-sys", +] + [[package]] name = "webpki-roots" version = "0.26.11" diff --git a/apps/api/Cargo.toml b/apps/api/Cargo.toml index f341bfe..60bd46a 100644 --- a/apps/api/Cargo.toml +++ b/apps/api/Cargo.toml @@ -31,3 +31,4 @@ uuid.workspace = true zip = { version = "2.2", default-features = false, features = ["deflate"] } utoipa.workspace = true utoipa-swagger-ui = { workspace = true, features = ["axum"] } +webp = "0.3" diff --git a/apps/api/src/books.rs b/apps/api/src/books.rs index 0194f83..e7b635b 100644 --- a/apps/api/src/books.rs +++ b/apps/api/src/books.rs @@ -224,16 +224,33 @@ pub struct SeriesItem { pub first_book_id: Uuid, } -/// List all series in a library +#[derive(Serialize, ToSchema)] +pub struct SeriesPage { + pub items: Vec, + #[schema(value_type = Option)] + pub next_cursor: Option, +} + +#[derive(Deserialize, ToSchema)] +pub struct ListSeriesQuery { + #[schema(value_type = Option)] + pub cursor: Option, + #[schema(value_type = Option, example = 50)] + pub limit: Option, +} + +/// List all series in a library with pagination #[utoipa::path( get, path = "/libraries/{library_id}/series", tag = "books", params( ("library_id" = String, Path, description = "Library UUID"), + ("cursor" = Option, Query, description = "Cursor for pagination (series name)"), + ("limit" = Option, Query, description = "Max items to return (max 200)"), ), responses( - (status = 200, body = Vec), + (status = 200, body = SeriesPage), (status = 401, description = "Unauthorized"), ), security(("Bearer" = [])) @@ -241,7 +258,10 @@ pub struct SeriesItem { pub async fn list_series( State(state): State, Path(library_id): Path, -) -> Result>, ApiError> { + Query(query): Query, +) -> Result, ApiError> { + let limit = query.limit.unwrap_or(50).clamp(1, 200); + let rows = sqlx::query( r#" WITH sorted_books AS ( @@ -272,6 +292,7 @@ pub async fn list_series( sb.id as first_book_id FROM series_counts sc JOIN sorted_books sb ON sb.name = sc.name AND sb.rn = 1 + WHERE ($2::text IS NULL OR sc.name > $2) ORDER BY -- Natural sort: extract text part before numbers REGEXP_REPLACE(LOWER(sc.name), '[0-9]+', '', 'g'), @@ -281,14 +302,18 @@ pub async fn list_series( 0 ), sc.name ASC + LIMIT $3 "#, ) .bind(library_id) + .bind(query.cursor.as_deref()) + .bind(limit + 1) .fetch_all(&state.pool) .await?; - let series: Vec = rows + let mut items: Vec = rows .iter() + .take(limit as usize) .map(|row| SeriesItem { name: row.get("name"), book_count: row.get("book_count"), @@ -296,5 +321,14 @@ pub async fn list_series( }) .collect(); - Ok(Json(series)) + let next_cursor = if rows.len() > limit as usize { + items.last().map(|s| s.name.clone()) + } else { + None + }; + + Ok(Json(SeriesPage { + items: std::mem::take(&mut items), + next_cursor, + })) } diff --git a/apps/api/src/main.rs b/apps/api/src/main.rs index f4ca1d3..85885b9 100644 --- a/apps/api/src/main.rs +++ b/apps/api/src/main.rs @@ -6,6 +6,7 @@ mod libraries; mod openapi; mod pages; mod search; +mod settings; mod tokens; use std::{ @@ -107,6 +108,7 @@ async fn main() -> anyhow::Result<()> { .route("/folders", get(index_jobs::list_folders)) .route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token)) .route("/admin/tokens/:id", delete(tokens::revoke_token)) + .merge(settings::settings_routes()) .route_layer(middleware::from_fn_with_state( state.clone(), auth::require_admin, diff --git a/apps/api/src/pages.rs b/apps/api/src/pages.rs index 8f0fc15..18143d1 100644 --- a/apps/api/src/pages.rs +++ b/apps/api/src/pages.rs @@ -1,6 +1,6 @@ use std::{ - io::Read, - path::Path, + io::{Read, Write}, + path::{Path, PathBuf}, sync::{atomic::Ordering, Arc}, time::Duration, }; @@ -11,7 +11,7 @@ use axum::{ http::{header, HeaderMap, HeaderValue, StatusCode}, response::{IntoResponse, Response}, }; -use image::{codecs::jpeg::JpegEncoder, codecs::png::PngEncoder, codecs::webp::WebPEncoder, ColorType, ImageEncoder}; +use image::{codecs::jpeg::JpegEncoder, codecs::png::PngEncoder, ColorType, ImageEncoder, ImageFormat}; use serde::Deserialize; use utoipa::ToSchema; use sha2::{Digest, Sha256}; @@ -29,6 +29,43 @@ fn remap_libraries_path(path: &str) -> String { path.to_string() } +fn get_image_cache_dir() -> PathBuf { + std::env::var("IMAGE_CACHE_DIR") + .map(PathBuf::from) + .unwrap_or_else(|_| PathBuf::from("/tmp/stripstream-image-cache")) +} + +fn get_cache_key(abs_path: &str, page: u32, format: &str, quality: u8, width: u32) -> String { + let mut hasher = Sha256::new(); + hasher.update(abs_path.as_bytes()); + hasher.update(page.to_le_bytes()); + hasher.update(format.as_bytes()); + hasher.update(quality.to_le_bytes()); + hasher.update(width.to_le_bytes()); + format!("{:x}", hasher.finalize()) +} + +fn get_cache_path(cache_key: &str, format: &OutputFormat) -> PathBuf { + let cache_dir = get_image_cache_dir(); + let prefix = &cache_key[..2]; + let ext = format.extension(); + cache_dir.join(prefix).join(format!("{}.{}", cache_key, ext)) +} + +fn read_from_disk_cache(cache_path: &Path) -> Option> { + std::fs::read(cache_path).ok() +} + +fn write_to_disk_cache(cache_path: &Path, data: &[u8]) -> Result<(), std::io::Error> { + if let Some(parent) = cache_path.parent() { + std::fs::create_dir_all(parent)?; + } + let mut file = std::fs::File::create(cache_path)?; + file.write_all(data)?; + file.sync_data()?; + Ok(()) +} + #[derive(Deserialize, ToSchema)] pub struct PageQuery { #[schema(value_type = Option, example = "webp")] @@ -109,10 +146,11 @@ pub async fn get_page( return Err(ApiError::bad_request("width must be <= 2160")); } - let cache_key = format!("{book_id}:{n}:{}:{quality}:{width}", format.extension()); - if let Some(cached) = state.page_cache.lock().await.get(&cache_key).cloned() { + let memory_cache_key = format!("{book_id}:{n}:{}:{quality}:{width}", format.extension()); + + if let Some(cached) = state.page_cache.lock().await.get(&memory_cache_key).cloned() { state.metrics.page_cache_hits.fetch_add(1, Ordering::Relaxed); - return Ok(image_response(cached, format.content_type())); + return Ok(image_response(cached, format.content_type(), None)); } state.metrics.page_cache_misses.fetch_add(1, Ordering::Relaxed); @@ -131,10 +169,18 @@ pub async fn get_page( let row = row.ok_or_else(|| ApiError::not_found("book file not found"))?; let abs_path: String = row.get("abs_path"); - // Remap /libraries to LIBRARIES_ROOT_PATH for local development let abs_path = remap_libraries_path(&abs_path); let input_format: String = row.get("format"); + let disk_cache_key = get_cache_key(&abs_path, n, format.extension(), quality, width); + let cache_path = get_cache_path(&disk_cache_key, &format); + + if let Some(cached_bytes) = read_from_disk_cache(&cache_path) { + let bytes = Arc::new(cached_bytes); + state.page_cache.lock().await.put(memory_cache_key, bytes.clone()); + return Ok(image_response(bytes, format.content_type(), Some(&disk_cache_key))); + } + let _permit = state .page_render_limit .clone() @@ -142,27 +188,39 @@ pub async fn get_page( .await .map_err(|_| ApiError::internal("render limiter unavailable"))?; + let abs_path_clone = abs_path.clone(); + let format_clone = format; let bytes = tokio::time::timeout( Duration::from_secs(12), - tokio::task::spawn_blocking(move || render_page(&abs_path, &input_format, n, &format, quality, width)), + tokio::task::spawn_blocking(move || { + render_page(&abs_path_clone, &input_format, n, &format_clone, quality, width) + }), ) .await .map_err(|_| ApiError::internal("page rendering timeout"))? .map_err(|e| ApiError::internal(format!("render task failed: {e}")))??; - let bytes = Arc::new(bytes); - state.page_cache.lock().await.put(cache_key, bytes.clone()); + let _ = write_to_disk_cache(&cache_path, &bytes); - Ok(image_response(bytes, format.content_type())) + let bytes = Arc::new(bytes); + state.page_cache.lock().await.put(memory_cache_key, bytes.clone()); + + Ok(image_response(bytes, format.content_type(), Some(&disk_cache_key))) } -fn image_response(bytes: Arc>, content_type: &str) -> Response { +fn image_response(bytes: Arc>, content_type: &str, etag_suffix: Option<&str>) -> Response { let mut headers = HeaderMap::new(); headers.insert(header::CONTENT_TYPE, HeaderValue::from_str(content_type).unwrap_or(HeaderValue::from_static("application/octet-stream"))); - headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("public, max-age=300")); - let mut hasher = Sha256::new(); - hasher.update(&*bytes); - let etag = format!("\"{:x}\"", hasher.finalize()); + headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("public, max-age=31536000, immutable")); + + let etag = if let Some(suffix) = etag_suffix { + format!("\"{}\"", suffix) + } else { + let mut hasher = Sha256::new(); + hasher.update(&*bytes); + format!("\"{:x}\"", hasher.finalize()) + }; + if let Ok(v) = HeaderValue::from_str(&etag) { headers.insert(header::ETAG, v); } @@ -271,6 +329,13 @@ fn render_pdf_page(abs_path: &str, page_number: u32, width: u32) -> Result Result, ApiError> { + let source_format = image::guess_format(input).ok(); + let needs_transcode = source_format.map(|f| !format_matches(&f, out_format)).unwrap_or(true); + + if width == 0 && !needs_transcode { + return Ok(input.to_vec()); + } + let mut image = image::load_from_memory(input).map_err(|e| ApiError::internal(format!("invalid source image: {e}")))?; if width > 0 { image = image.resize(width, u32::MAX, image::imageops::FilterType::Lanczos3); @@ -293,15 +358,27 @@ fn transcode_image(input: &[u8], out_format: &OutputFormat, quality: u8, width: .map_err(|e| ApiError::internal(format!("png encode failed: {e}")))?; } OutputFormat::Webp => { - let encoder = WebPEncoder::new_lossless(&mut out); - encoder - .write_image(&rgba, w, h, ColorType::Rgba8.into()) - .map_err(|e| ApiError::internal(format!("webp encode failed: {e}")))?; + let rgb_data: Vec = rgba + .pixels() + .flat_map(|p| [p[0], p[1], p[2]]) + .collect(); + let webp_data = webp::Encoder::new(&rgb_data, webp::PixelLayout::Rgb, w, h) + .encode(f32::max(quality as f32, 85.0)); + out.extend_from_slice(&webp_data); } } Ok(out) } +fn format_matches(source: &ImageFormat, target: &OutputFormat) -> bool { + match (source, target) { + (ImageFormat::Jpeg, OutputFormat::Jpeg) => true, + (ImageFormat::Png, OutputFormat::Png) => true, + (ImageFormat::WebP, OutputFormat::Webp) => true, + _ => false, + } +} + fn is_image_name(name: &str) -> bool { name.ends_with(".jpg") || name.ends_with(".jpeg") diff --git a/apps/api/src/settings.rs b/apps/api/src/settings.rs new file mode 100644 index 0000000..151b7e4 --- /dev/null +++ b/apps/api/src/settings.rs @@ -0,0 +1,260 @@ +use axum::{ + extract::{Query, State}, + response::IntoResponse, + routing::{get, post}, + Json, Router, +}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use sqlx::Row; + +use crate::{error::ApiError, AppState}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ImageProcessingSettings { + pub format: String, + pub quality: u8, + pub filter: String, + pub max_width: u32, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CacheSettings { + pub enabled: bool, + pub directory: String, + pub max_size_mb: u32, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct LimitsSettings { + pub concurrent_renders: u8, + pub timeout_seconds: u8, + pub rate_limit_per_second: u16, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AppSettings { + pub image_processing: ImageProcessingSettings, + pub cache: CacheSettings, + pub limits: LimitsSettings, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UpdateSettingRequest { + pub value: Value, +} + +pub fn settings_routes() -> Router { + Router::new() + .route("/settings", get(get_settings)) + .route("/settings/:key", get(get_setting).post(update_setting)) + .route("/settings/cache/clear", post(clear_cache)) + .route("/settings/cache/stats", get(get_cache_stats)) +} + +async fn get_settings(State(state): State) -> Result, ApiError> { + let rows = sqlx::query(r#"SELECT key, value FROM app_settings"#) + .fetch_all(&state.pool) + .await?; + + let mut settings = serde_json::Map::new(); + for row in rows { + let key: String = row.get("key"); + let value: Value = row.get("value"); + settings.insert(key, value); + } + + Ok(Json(Value::Object(settings))) +} + +async fn get_setting( + State(state): State, + axum::extract::Path(key): axum::extract::Path, +) -> Result, ApiError> { + let row = sqlx::query(r#"SELECT value FROM app_settings WHERE key = $1"#) + .bind(&key) + .fetch_optional(&state.pool) + .await?; + + match row { + Some(row) => { + let value: Value = row.get("value"); + Ok(Json(value)) + } + None => Err(ApiError::not_found(format!("setting '{}' not found", key))), + } +} + +async fn update_setting( + State(state): State, + axum::extract::Path(key): axum::extract::Path, + Json(body): Json, +) -> Result, ApiError> { + let row = sqlx::query( + r#" + INSERT INTO app_settings (key, value, updated_at) + VALUES ($1, $2, CURRENT_TIMESTAMP) + ON CONFLICT (key) + DO UPDATE SET value = $2, updated_at = CURRENT_TIMESTAMP + RETURNING value + "#, + ) + .bind(&key) + .bind(&body.value) + .fetch_one(&state.pool) + .await?; + + let value: Value = row.get("value"); + Ok(Json(value)) +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ClearCacheResponse { + pub success: bool, + pub message: String, +} + +async fn clear_cache(State(_state): State) -> Result, ApiError> { + let cache_dir = std::env::var("IMAGE_CACHE_DIR") + .unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string()); + + let result = tokio::task::spawn_blocking(move || { + if std::path::Path::new(&cache_dir).exists() { + match std::fs::remove_dir_all(&cache_dir) { + Ok(_) => ClearCacheResponse { + success: true, + message: format!("Cache directory '{}' cleared successfully", cache_dir), + }, + Err(e) => ClearCacheResponse { + success: false, + message: format!("Failed to clear cache: {}", e), + }, + } + } else { + ClearCacheResponse { + success: true, + message: format!("Cache directory '{}' does not exist, nothing to clear", cache_dir), + } + } + }) + .await + .map_err(|e| ApiError::internal(format!("cache clear failed: {}", e)))?; + + Ok(Json(result)) +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CacheStats { + pub total_size_mb: f64, + pub file_count: u64, + pub directory: String, +} + +async fn get_cache_stats(State(_state): State) -> Result, ApiError> { + let cache_dir = std::env::var("IMAGE_CACHE_DIR") + .unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string()); + + let cache_dir_clone = cache_dir.clone(); + let stats = tokio::task::spawn_blocking(move || { + let path = std::path::Path::new(&cache_dir_clone); + if !path.exists() { + return CacheStats { + total_size_mb: 0.0, + file_count: 0, + directory: cache_dir_clone, + }; + } + + let mut total_size: u64 = 0; + let mut file_count: u64 = 0; + + fn visit_dirs( + dir: &std::path::Path, + total_size: &mut u64, + file_count: &mut u64, + ) -> std::io::Result<()> { + if dir.is_dir() { + for entry in std::fs::read_dir(dir)? { + let entry = entry?; + let path = entry.path(); + if path.is_dir() { + visit_dirs(&path, total_size, file_count)?; + } else { + *total_size += entry.metadata()?.len(); + *file_count += 1; + } + } + } + Ok(()) + } + + let _ = visit_dirs(path, &mut total_size, &mut file_count); + + CacheStats { + total_size_mb: total_size as f64 / 1024.0 / 1024.0, + file_count, + directory: cache_dir_clone, + } + }) + .await + .map_err(|e| ApiError::internal(format!("cache stats failed: {}", e)))?; + + Ok(Json(stats)) +} + +pub async fn get_settings_from_db( + pool: &sqlx::PgPool, +) -> Result { + let settings = get_settings_from_db_raw(pool).await?; + + let image_processing = settings + .get("image_processing") + .and_then(|v| serde_json::from_value(v.clone()).ok()) + .unwrap_or_else(|| ImageProcessingSettings { + format: "webp".to_string(), + quality: 85, + filter: "lanczos3".to_string(), + max_width: 2160, + }); + + let cache = settings + .get("cache") + .and_then(|v| serde_json::from_value(v.clone()).ok()) + .unwrap_or_else(|| CacheSettings { + enabled: true, + directory: "/tmp/stripstream-image-cache".to_string(), + max_size_mb: 10000, + }); + + let limits = settings + .get("limits") + .and_then(|v| serde_json::from_value(v.clone()).ok()) + .unwrap_or_else(|| LimitsSettings { + concurrent_renders: 4, + timeout_seconds: 12, + rate_limit_per_second: 120, + }); + + Ok(AppSettings { + image_processing, + cache, + limits, + }) +} + +async fn get_settings_from_db_raw( + pool: &sqlx::PgPool, +) -> Result, ApiError> { + let rows = sqlx::query(r#"SELECT key, value FROM app_settings"#) + .fetch_all(pool) + .await?; + + let mut settings = std::collections::HashMap::new(); + for row in rows { + let key: String = row.get("key"); + let value: Value = row.get("value"); + settings.insert(key, value); + } + + Ok(settings) +} diff --git a/apps/backoffice/app/api/settings/[key]/route.ts b/apps/backoffice/app/api/settings/[key]/route.ts new file mode 100644 index 0000000..d98cd8f --- /dev/null +++ b/apps/backoffice/app/api/settings/[key]/route.ts @@ -0,0 +1,59 @@ +import { NextRequest, NextResponse } from "next/server"; + +export async function GET( + request: NextRequest, + { params }: { params: Promise<{ key: string }> } +) { + try { + const { key } = await params; + const baseUrl = process.env.API_BASE_URL || "http://api:8080"; + const token = process.env.API_BOOTSTRAP_TOKEN; + + const response = await fetch(`${baseUrl}/settings/${key}`, { + headers: { + Authorization: `Bearer ${token}`, + }, + cache: "no-store" + }); + + if (!response.ok) { + return NextResponse.json({ error: "Failed to fetch setting" }, { status: response.status }); + } + + const data = await response.json(); + return NextResponse.json(data); + } catch (error) { + return NextResponse.json({ error: "Internal server error" }, { status: 500 }); + } +} + +export async function POST( + request: NextRequest, + { params }: { params: Promise<{ key: string }> } +) { + try { + const { key } = await params; + const baseUrl = process.env.API_BASE_URL || "http://api:8080"; + const token = process.env.API_BOOTSTRAP_TOKEN; + const body = await request.json(); + + const response = await fetch(`${baseUrl}/settings/${key}`, { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + "Content-Type": "application/json", + }, + body: JSON.stringify(body), + cache: "no-store" + }); + + if (!response.ok) { + return NextResponse.json({ error: "Failed to update setting" }, { status: response.status }); + } + + const data = await response.json(); + return NextResponse.json(data); + } catch (error) { + return NextResponse.json({ error: "Internal server error" }, { status: 500 }); + } +} diff --git a/apps/backoffice/app/api/settings/cache/clear/route.ts b/apps/backoffice/app/api/settings/cache/clear/route.ts new file mode 100644 index 0000000..8a554c4 --- /dev/null +++ b/apps/backoffice/app/api/settings/cache/clear/route.ts @@ -0,0 +1,25 @@ +import { NextRequest, NextResponse } from "next/server"; + +export async function POST(request: NextRequest) { + try { + const baseUrl = process.env.API_BASE_URL || "http://api:8080"; + const token = process.env.API_BOOTSTRAP_TOKEN; + + const response = await fetch(`${baseUrl}/settings/cache/clear`, { + method: "POST", + headers: { + Authorization: `Bearer ${token}`, + }, + cache: "no-store" + }); + + if (!response.ok) { + return NextResponse.json({ error: "Failed to clear cache" }, { status: response.status }); + } + + const data = await response.json(); + return NextResponse.json(data); + } catch (error) { + return NextResponse.json({ error: "Internal server error" }, { status: 500 }); + } +} diff --git a/apps/backoffice/app/api/settings/cache/stats/route.ts b/apps/backoffice/app/api/settings/cache/stats/route.ts new file mode 100644 index 0000000..b73c7b7 --- /dev/null +++ b/apps/backoffice/app/api/settings/cache/stats/route.ts @@ -0,0 +1,24 @@ +import { NextRequest, NextResponse } from "next/server"; + +export async function GET(request: NextRequest) { + try { + const baseUrl = process.env.API_BASE_URL || "http://api:8080"; + const token = process.env.API_BOOTSTRAP_TOKEN; + + const response = await fetch(`${baseUrl}/settings/cache/stats`, { + headers: { + Authorization: `Bearer ${token}`, + }, + cache: "no-store" + }); + + if (!response.ok) { + return NextResponse.json({ error: "Failed to fetch cache stats" }, { status: response.status }); + } + + const data = await response.json(); + return NextResponse.json(data); + } catch (error) { + return NextResponse.json({ error: "Internal server error" }, { status: 500 }); + } +} diff --git a/apps/backoffice/app/api/settings/route.ts b/apps/backoffice/app/api/settings/route.ts new file mode 100644 index 0000000..1e48dae --- /dev/null +++ b/apps/backoffice/app/api/settings/route.ts @@ -0,0 +1,24 @@ +import { NextRequest, NextResponse } from "next/server"; + +export async function GET(request: NextRequest) { + try { + const baseUrl = process.env.API_BASE_URL || "http://api:8080"; + const token = process.env.API_BOOTSTRAP_TOKEN; + + const response = await fetch(`${baseUrl}/settings`, { + headers: { + Authorization: `Bearer ${token}`, + }, + cache: "no-store" + }); + + if (!response.ok) { + return NextResponse.json({ error: "Failed to fetch settings" }, { status: response.status }); + } + + const data = await response.json(); + return NextResponse.json(data); + } catch (error) { + return NextResponse.json({ error: "Internal server error" }, { status: 500 }); + } +} diff --git a/apps/backoffice/app/components/ui/Icon.tsx b/apps/backoffice/app/components/ui/Icon.tsx index cc29ae9..6e3912f 100644 --- a/apps/backoffice/app/components/ui/Icon.tsx +++ b/apps/backoffice/app/components/ui/Icon.tsx @@ -1,4 +1,4 @@ -type IconName = "dashboard" | "books" | "libraries" | "jobs" | "tokens" | "series"; +type IconName = "dashboard" | "books" | "libraries" | "jobs" | "tokens" | "series" | "settings"; interface PageIconProps { name: IconName; @@ -36,6 +36,12 @@ const icons: Record = { ), + settings: ( + + + + + ), }; const colors: Record = { @@ -45,6 +51,7 @@ const colors: Record = { jobs: "text-warning", tokens: "text-error", series: "text-primary", + settings: "text-muted-foreground", }; export function PageIcon({ name, className = "" }: PageIconProps) { @@ -88,6 +95,12 @@ export function NavIcon({ name, className = "" }: { name: IconName; className?: ), + settings: ( + + + + + ), }; return {navIcons[name]}; diff --git a/apps/backoffice/app/layout.tsx b/apps/backoffice/app/layout.tsx index 270b32a..7e5dde2 100644 --- a/apps/backoffice/app/layout.tsx +++ b/apps/backoffice/app/layout.tsx @@ -14,9 +14,9 @@ export const metadata: Metadata = { }; type NavItem = { - href: "/" | "/books" | "/libraries" | "/jobs" | "/tokens"; + href: "/" | "/books" | "/libraries" | "/jobs" | "/tokens" | "/settings"; label: string; - icon: "dashboard" | "books" | "libraries" | "jobs" | "tokens"; + icon: "dashboard" | "books" | "libraries" | "jobs" | "tokens" | "settings"; }; const navItems: NavItem[] = [ @@ -25,6 +25,7 @@ const navItems: NavItem[] = [ { href: "/libraries", label: "Libraries", icon: "libraries" }, { href: "/jobs", label: "Jobs", icon: "jobs" }, { href: "/tokens", label: "Tokens", icon: "tokens" }, + { href: "/settings", label: "Settings", icon: "settings" }, ]; export default function RootLayout({ children }: { children: ReactNode }) { diff --git a/apps/backoffice/app/libraries/[id]/series/page.tsx b/apps/backoffice/app/libraries/[id]/series/page.tsx index 5e638da..2775f2d 100644 --- a/apps/backoffice/app/libraries/[id]/series/page.tsx +++ b/apps/backoffice/app/libraries/[id]/series/page.tsx @@ -1,4 +1,5 @@ -import { fetchLibraries, fetchSeries, getBookCoverUrl, LibraryDto, SeriesDto } from "../../../../lib/api"; +import { fetchLibraries, fetchSeries, getBookCoverUrl, LibraryDto, SeriesDto, SeriesPageDto } from "../../../../lib/api"; +import { CursorPagination } from "../../../components/ui"; import Image from "next/image"; import Link from "next/link"; import { notFound } from "next/navigation"; @@ -7,26 +8,36 @@ import { LibrarySubPageHeader } from "../../../components/LibrarySubPageHeader"; export const dynamic = "force-dynamic"; export default async function LibrarySeriesPage({ - params + params, + searchParams }: { params: Promise<{ id: string }>; + searchParams: Promise<{ [key: string]: string | string[] | undefined }>; }) { const { id } = await params; + const searchParamsAwaited = await searchParams; + const cursor = typeof searchParamsAwaited.cursor === "string" ? searchParamsAwaited.cursor : undefined; + const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20; - const [library, series] = await Promise.all([ + const [library, seriesPage] = await Promise.all([ fetchLibraries().then(libs => libs.find(l => l.id === id)), - fetchSeries(id).catch(() => [] as SeriesDto[]) + fetchSeries(id, cursor, limit).catch(() => ({ items: [] as SeriesDto[], next_cursor: null }) as SeriesPageDto) ]); if (!library) { notFound(); } + const series = seriesPage.items; + const nextCursor = seriesPage.next_cursor; + const hasNextPage = !!nextCursor; + const hasPrevPage = !!cursor; + return (
@@ -36,35 +47,45 @@ export default async function LibrarySeriesPage({ /> {series.length > 0 ? ( -
- {series.map((s) => ( - -
-
- {`Cover + <> +
+ {series.map((s) => ( + +
+
+ {`Cover +
+
+

+ {s.name === "unclassified" ? "Unclassified" : s.name} +

+

+ {s.book_count} book{s.book_count !== 1 ? 's' : ''} +

+
-
-

- {s.name === "unclassified" ? "Unclassified" : s.name} -

-

- {s.book_count} book{s.book_count !== 1 ? 's' : ''} -

-
-
- - ))} -
+ + ))} +
+ + + ) : (

No series found in this library

diff --git a/apps/backoffice/app/libraries/page.tsx b/apps/backoffice/app/libraries/page.tsx index d0f2322..6151e0a 100644 --- a/apps/backoffice/app/libraries/page.tsx +++ b/apps/backoffice/app/libraries/page.tsx @@ -32,8 +32,8 @@ export default async function LibrariesPage() { const seriesCounts = await Promise.all( libraries.map(async (lib) => { try { - const series = await fetchSeries(lib.id); - return { id: lib.id, count: series.length }; + const seriesPage = await fetchSeries(lib.id); + return { id: lib.id, count: seriesPage.items.length }; } catch { return { id: lib.id, count: 0 }; } diff --git a/apps/backoffice/app/settings/SettingsPage.tsx b/apps/backoffice/app/settings/SettingsPage.tsx new file mode 100644 index 0000000..b0edaf4 --- /dev/null +++ b/apps/backoffice/app/settings/SettingsPage.tsx @@ -0,0 +1,303 @@ +"use client"; + +import { useState } from "react"; +import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, FormSelect, FormRow } from "../components/ui"; +import { Settings, CacheStats, ClearCacheResponse } from "../../lib/api"; + +interface SettingsPageProps { + initialSettings: Settings; + initialCacheStats: CacheStats; +} + +export default function SettingsPage({ initialSettings, initialCacheStats }: SettingsPageProps) { + const [settings, setSettings] = useState(initialSettings); + const [cacheStats, setCacheStats] = useState(initialCacheStats); + const [isClearing, setIsClearing] = useState(false); + const [clearResult, setClearResult] = useState(null); + const [isSaving, setIsSaving] = useState(false); + const [saveMessage, setSaveMessage] = useState(null); + + async function handleUpdateSetting(key: string, value: unknown) { + setIsSaving(true); + setSaveMessage(null); + try { + const response = await fetch(`/api/settings/${key}`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ value }) + }); + if (response.ok) { + setSaveMessage("Settings saved successfully"); + setTimeout(() => setSaveMessage(null), 3000); + } else { + setSaveMessage("Failed to save settings"); + } + } catch (error) { + setSaveMessage("Error saving settings"); + } finally { + setIsSaving(false); + } + } + + async function handleClearCache() { + setIsClearing(true); + setClearResult(null); + try { + const response = await fetch("/api/settings/cache/clear", { method: "POST" }); + const result = await response.json(); + setClearResult(result); + // Refresh cache stats + const statsResponse = await fetch("/api/settings/cache/stats"); + if (statsResponse.ok) { + const stats = await statsResponse.json(); + setCacheStats(stats); + } + } catch (error) { + setClearResult({ success: false, message: "Failed to clear cache" }); + } finally { + setIsClearing(false); + } + } + + return ( + <> +
+

+ + + + + Settings +

+
+ + {saveMessage && ( + + +

{saveMessage}

+
+
+ )} + + {/* Image Processing Settings */} + + + Image Processing + Configure how images are processed and compressed + + +
+ + + + { + const newSettings = { ...settings, image_processing: { ...settings.image_processing, format: e.target.value } }; + setSettings(newSettings); + handleUpdateSetting("image_processing", newSettings.image_processing); + }} + > + + + + + + + + { + const quality = parseInt(e.target.value) || 85; + const newSettings = { ...settings, image_processing: { ...settings.image_processing, quality } }; + setSettings(newSettings); + }} + onBlur={() => handleUpdateSetting("image_processing", settings.image_processing)} + /> + + + + + + { + const newSettings = { ...settings, image_processing: { ...settings.image_processing, filter: e.target.value } }; + setSettings(newSettings); + handleUpdateSetting("image_processing", newSettings.image_processing); + }} + > + + + + + + + + { + const max_width = parseInt(e.target.value) || 2160; + const newSettings = { ...settings, image_processing: { ...settings.image_processing, max_width } }; + setSettings(newSettings); + }} + onBlur={() => handleUpdateSetting("image_processing", settings.image_processing)} + /> + + +
+
+
+ + {/* Cache Settings */} + + + Cache + Manage the image cache and storage + + +
+
+
+

Cache Size

+

{cacheStats.total_size_mb.toFixed(2)} MB

+
+
+

Files

+

{cacheStats.file_count}

+
+
+

Directory

+

{cacheStats.directory}

+
+
+ + {clearResult && ( +
+ {clearResult.message} +
+ )} + + + + + { + const newSettings = { ...settings, cache: { ...settings.cache, directory: e.target.value } }; + setSettings(newSettings); + }} + onBlur={() => handleUpdateSetting("cache", settings.cache)} + /> + + + + { + const max_size_mb = parseInt(e.target.value) || 10000; + const newSettings = { ...settings, cache: { ...settings.cache, max_size_mb } }; + setSettings(newSettings); + }} + onBlur={() => handleUpdateSetting("cache", settings.cache)} + /> + + + + +
+
+
+ + {/* Limits Settings */} + + + Performance Limits + Configure API performance and rate limiting + + +
+ + + + { + const concurrent_renders = parseInt(e.target.value) || 4; + const newSettings = { ...settings, limits: { ...settings.limits, concurrent_renders } }; + setSettings(newSettings); + }} + onBlur={() => handleUpdateSetting("limits", settings.limits)} + /> + + + + { + const timeout_seconds = parseInt(e.target.value) || 12; + const newSettings = { ...settings, limits: { ...settings.limits, timeout_seconds } }; + setSettings(newSettings); + }} + onBlur={() => handleUpdateSetting("limits", settings.limits)} + /> + + + + { + const rate_limit_per_second = parseInt(e.target.value) || 120; + const newSettings = { ...settings, limits: { ...settings.limits, rate_limit_per_second } }; + setSettings(newSettings); + }} + onBlur={() => handleUpdateSetting("limits", settings.limits)} + /> + + +

+ Note: Changes to limits require a server restart to take effect. +

+
+
+
+ + ); +} diff --git a/apps/backoffice/app/settings/page.tsx b/apps/backoffice/app/settings/page.tsx new file mode 100644 index 0000000..e12f649 --- /dev/null +++ b/apps/backoffice/app/settings/page.tsx @@ -0,0 +1,20 @@ +import { getSettings, getCacheStats } from "../../lib/api"; +import SettingsPage from "./SettingsPage"; + +export const dynamic = "force-dynamic"; + +export default async function SettingsPageWrapper() { + const settings = await getSettings().catch(() => ({ + image_processing: { format: "webp", quality: 85, filter: "lanczos3", max_width: 2160 }, + cache: { enabled: true, directory: "/tmp/stripstream-image-cache", max_size_mb: 10000 }, + limits: { concurrent_renders: 4, timeout_seconds: 12, rate_limit_per_second: 120 } + })); + + const cacheStats = await getCacheStats().catch(() => ({ + total_size_mb: 0, + file_count: 0, + directory: "/tmp/stripstream-image-cache" + })); + + return ; +} diff --git a/apps/backoffice/lib/api.ts b/apps/backoffice/lib/api.ts index b4f6c48..e680e74 100644 --- a/apps/backoffice/lib/api.ts +++ b/apps/backoffice/lib/api.ts @@ -209,8 +209,17 @@ export async function fetchBooks(libraryId?: string, series?: string, cursor?: s return apiFetch(`/books?${params.toString()}`); } -export async function fetchSeries(libraryId: string): Promise { - return apiFetch(`/libraries/${libraryId}/series`); +export type SeriesPageDto = { + items: SeriesDto[]; + next_cursor: string | null; +}; + +export async function fetchSeries(libraryId: string, cursor?: string, limit: number = 50): Promise { + const params = new URLSearchParams(); + if (cursor) params.set("cursor", cursor); + params.set("limit", limit.toString()); + + return apiFetch(`/libraries/${libraryId}/series?${params.toString()}`); } export async function searchBooks(query: string, libraryId?: string, limit: number = 20): Promise { @@ -227,3 +236,52 @@ export function getBookCoverUrl(bookId: string): string { // Le navigateur ne peut pas accéder à http://api:8080 (hostname Docker interne) return `/api/books/${bookId}/pages/1?format=webp&width=200`; } + +export type Settings = { + image_processing: { + format: string; + quality: number; + filter: string; + max_width: number; + }; + cache: { + enabled: boolean; + directory: string; + max_size_mb: number; + }; + limits: { + concurrent_renders: number; + timeout_seconds: number; + rate_limit_per_second: number; + }; +}; + +export type CacheStats = { + total_size_mb: number; + file_count: number; + directory: string; +}; + +export type ClearCacheResponse = { + success: boolean; + message: string; +}; + +export async function getSettings() { + return apiFetch("/settings"); +} + +export async function updateSetting(key: string, value: unknown) { + return apiFetch(`/settings/${key}`, { + method: "POST", + body: JSON.stringify({ value }) + }); +} + +export async function getCacheStats() { + return apiFetch("/settings/cache/stats"); +} + +export async function clearCache() { + return apiFetch("/settings/cache/clear", { method: "POST" }); +} diff --git a/apps/backoffice/next-env.d.ts b/apps/backoffice/next-env.d.ts index c4b7818..9edff1c 100644 --- a/apps/backoffice/next-env.d.ts +++ b/apps/backoffice/next-env.d.ts @@ -1,6 +1,6 @@ /// /// -import "./.next/dev/types/routes.d.ts"; +import "./.next/types/routes.d.ts"; // NOTE: This file should not be edited // see https://nextjs.org/docs/app/api-reference/config/typescript for more information. diff --git a/infra/migrations/0008_add_settings.sql b/infra/migrations/0008_add_settings.sql new file mode 100644 index 0000000..65b6a69 --- /dev/null +++ b/infra/migrations/0008_add_settings.sql @@ -0,0 +1,11 @@ +CREATE TABLE IF NOT EXISTS app_settings ( + key TEXT PRIMARY KEY, + value JSONB NOT NULL, + updated_at TIMESTAMPTZ DEFAULT CURRENT_TIMESTAMP +); + +INSERT INTO app_settings (key, value) VALUES + ('image_processing', '{"format": "webp", "quality": 85, "filter": "lanczos3", "max_width": 2160}'), + ('cache', '{"enabled": true, "directory": "/tmp/stripstream-image-cache", "max_size_mb": 10000}'), + ('limits', '{"concurrent_renders": 4, "timeout_seconds": 12, "rate_limit_per_second": 120}') +ON CONFLICT DO NOTHING;