add page streaming, admin ui flows, and runtime hardening

This commit is contained in:
2026-03-05 15:26:47 +01:00
parent 6eaf2ba5dc
commit 20f9af6cba
14 changed files with 957 additions and 33 deletions

View File

@@ -7,7 +7,11 @@ license.workspace = true
[dependencies]
anyhow.workspace = true
axum.workspace = true
reqwest.workspace = true
serde.workspace = true
serde_json.workspace = true
stripstream-core = { path = "../../crates/core" }
tokio.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true
uuid.workspace = true

View File

@@ -1,6 +1,79 @@
use axum::{response::Html, routing::get, Router};
use axum::{
extract::{Form, State},
response::{Html, Redirect},
routing::{get, post},
Router,
};
use reqwest::Client;
use serde::Deserialize;
use stripstream_core::config::AdminUiConfig;
use tracing::info;
use uuid::Uuid;
#[derive(Clone)]
struct AppState {
api_base_url: String,
api_token: String,
client: Client,
}
#[derive(Deserialize)]
struct LibraryDto {
id: Uuid,
name: String,
root_path: String,
enabled: bool,
}
#[derive(Deserialize)]
struct IndexJobDto {
id: Uuid,
#[serde(rename = "type")]
kind: String,
status: String,
created_at: String,
}
#[derive(Deserialize)]
struct TokenDto {
id: Uuid,
name: String,
scope: String,
prefix: String,
revoked_at: Option<String>,
}
#[derive(Deserialize)]
struct CreatedToken {
token: String,
}
#[derive(Deserialize)]
struct AddLibraryForm {
name: String,
root_path: String,
}
#[derive(Deserialize)]
struct DeleteLibraryForm {
id: Uuid,
}
#[derive(Deserialize)]
struct RebuildForm {
library_id: String,
}
#[derive(Deserialize)]
struct CreateTokenForm {
name: String,
scope: String,
}
#[derive(Deserialize)]
struct RevokeTokenForm {
id: Uuid,
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
@@ -10,10 +83,25 @@ async fn main() -> anyhow::Result<()> {
)
.init();
let config = AdminUiConfig::from_env();
let config = AdminUiConfig::from_env()?;
let state = AppState {
api_base_url: config.api_base_url,
api_token: config.api_token,
client: Client::new(),
};
let app = Router::new()
.route("/health", get(health))
.route("/", get(index));
.route("/", get(index))
.route("/libraries", get(libraries_page))
.route("/libraries/add", post(add_library))
.route("/libraries/delete", post(delete_library))
.route("/jobs", get(jobs_page))
.route("/jobs/rebuild", post(rebuild_jobs))
.route("/tokens", get(tokens_page))
.route("/tokens/create", post(create_token))
.route("/tokens/revoke", post(revoke_token))
.with_state(state);
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
info!(addr = %config.listen_addr, "admin ui listening");
@@ -25,8 +113,225 @@ async fn health() -> &'static str {
"ok"
}
async fn index() -> Html<&'static str> {
Html(
"<html><body><h1>Stripstream Admin</h1><p>UI skeleton ready. Next: libraries, jobs, tokens screens.</p></body></html>",
async fn index() -> Html<String> {
Html(layout(
"Dashboard",
"<h1>Stripstream Admin</h1><p>Gestion des librairies, jobs d'indexation et tokens API.</p><p><a href='/libraries'>Libraries</a> | <a href='/jobs'>Jobs</a> | <a href='/tokens'>Tokens</a></p>",
))
}
async fn libraries_page(State(state): State<AppState>) -> Html<String> {
let libraries = fetch_libraries(&state).await.unwrap_or_default();
let mut rows = String::new();
for lib in libraries {
rows.push_str(&format!(
"<tr><td>{}</td><td><code>{}</code></td><td>{}</td><td><form method='post' action='/libraries/delete'><input type='hidden' name='id' value='{}'/><button type='submit'>Delete</button></form></td></tr>",
html_escape(&lib.name),
html_escape(&lib.root_path),
if lib.enabled { "yes" } else { "no" },
lib.id
));
}
let content = format!(
"<h1>Libraries</h1>
<form method='post' action='/libraries/add'>
<input name='name' placeholder='Name' required />
<input name='root_path' placeholder='/libraries/demo' required />
<button type='submit'>Add</button>
</form>
<table border='1' cellpadding='6'>
<tr><th>Name</th><th>Root Path</th><th>Enabled</th><th>Actions</th></tr>
{}
</table>",
rows
);
Html(layout("Libraries", &content))
}
async fn add_library(State(state): State<AppState>, Form(form): Form<AddLibraryForm>) -> Redirect {
let _ = state
.client
.post(format!("{}/libraries", state.api_base_url))
.bearer_auth(&state.api_token)
.json(&serde_json::json!({"name": form.name, "root_path": form.root_path}))
.send()
.await;
Redirect::to("/libraries")
}
async fn delete_library(State(state): State<AppState>, Form(form): Form<DeleteLibraryForm>) -> Redirect {
let _ = state
.client
.delete(format!("{}/libraries/{}", state.api_base_url, form.id))
.bearer_auth(&state.api_token)
.send()
.await;
Redirect::to("/libraries")
}
async fn jobs_page(State(state): State<AppState>) -> Html<String> {
let jobs = fetch_jobs(&state).await.unwrap_or_default();
let mut rows = String::new();
for job in jobs {
rows.push_str(&format!(
"<tr><td><code>{}</code></td><td>{}</td><td>{}</td><td>{}</td></tr>",
job.id,
html_escape(&job.kind),
html_escape(&job.status),
html_escape(&job.created_at)
));
}
let content = format!(
"<h1>Index Jobs</h1>
<form method='post' action='/jobs/rebuild'>
<input name='library_id' placeholder='optional library UUID' />
<button type='submit'>Queue Rebuild</button>
</form>
<table border='1' cellpadding='6'>
<tr><th>ID</th><th>Type</th><th>Status</th><th>Created</th></tr>
{}
</table>",
rows
);
Html(layout("Jobs", &content))
}
async fn rebuild_jobs(State(state): State<AppState>, Form(form): Form<RebuildForm>) -> Redirect {
let body = if form.library_id.trim().is_empty() {
serde_json::json!({})
} else {
serde_json::json!({"library_id": form.library_id.trim()})
};
let _ = state
.client
.post(format!("{}/index/rebuild", state.api_base_url))
.bearer_auth(&state.api_token)
.json(&body)
.send()
.await;
Redirect::to("/jobs")
}
async fn tokens_page(State(state): State<AppState>) -> Html<String> {
let tokens = fetch_tokens(&state).await.unwrap_or_default();
let mut rows = String::new();
for token in tokens {
rows.push_str(&format!(
"<tr><td>{}</td><td>{}</td><td><code>{}</code></td><td>{}</td><td><form method='post' action='/tokens/revoke'><input type='hidden' name='id' value='{}'/><button type='submit'>Revoke</button></form></td></tr>",
html_escape(&token.name),
html_escape(&token.scope),
html_escape(&token.prefix),
if token.revoked_at.is_some() { "yes" } else { "no" },
token.id
));
}
let content = format!(
"<h1>API Tokens</h1>
<form method='post' action='/tokens/create'>
<input name='name' placeholder='token name' required />
<select name='scope'>
<option value='read'>read</option>
<option value='admin'>admin</option>
</select>
<button type='submit'>Create Token</button>
</form>
<table border='1' cellpadding='6'>
<tr><th>Name</th><th>Scope</th><th>Prefix</th><th>Revoked</th><th>Actions</th></tr>
{}
</table>",
rows
);
Html(layout("Tokens", &content))
}
async fn create_token(State(state): State<AppState>, Form(form): Form<CreateTokenForm>) -> Html<String> {
let response = state
.client
.post(format!("{}/admin/tokens", state.api_base_url))
.bearer_auth(&state.api_token)
.json(&serde_json::json!({"name": form.name, "scope": form.scope}))
.send()
.await;
match response {
Ok(resp) if resp.status().is_success() => {
let created: Result<CreatedToken, _> = resp.json().await;
match created {
Ok(token) => Html(layout(
"Token Created",
&format!(
"<h1>Token Created</h1><p>Copie ce token maintenant (il ne sera plus affiche):</p><pre>{}</pre><p><a href='/tokens'>Back to tokens</a></p>",
html_escape(&token.token)
),
)),
Err(_) => Html(layout("Error", "<p>Token created but response parse failed.</p><p><a href='/tokens'>Back</a></p>")),
}
}
_ => Html(layout("Error", "<p>Token creation failed.</p><p><a href='/tokens'>Back</a></p>")),
}
}
async fn revoke_token(State(state): State<AppState>, Form(form): Form<RevokeTokenForm>) -> Redirect {
let _ = state
.client
.delete(format!("{}/admin/tokens/{}", state.api_base_url, form.id))
.bearer_auth(&state.api_token)
.send()
.await;
Redirect::to("/tokens")
}
async fn fetch_libraries(state: &AppState) -> Result<Vec<LibraryDto>, reqwest::Error> {
state
.client
.get(format!("{}/libraries", state.api_base_url))
.bearer_auth(&state.api_token)
.send()
.await?
.json::<Vec<LibraryDto>>()
.await
}
async fn fetch_jobs(state: &AppState) -> Result<Vec<IndexJobDto>, reqwest::Error> {
state
.client
.get(format!("{}/index/status", state.api_base_url))
.bearer_auth(&state.api_token)
.send()
.await?
.json::<Vec<IndexJobDto>>()
.await
}
async fn fetch_tokens(state: &AppState) -> Result<Vec<TokenDto>, reqwest::Error> {
state
.client
.get(format!("{}/admin/tokens", state.api_base_url))
.bearer_auth(&state.api_token)
.send()
.await?
.json::<Vec<TokenDto>>()
.await
}
fn layout(title: &str, content: &str) -> String {
format!(
"<!doctype html><html><head><meta charset='utf-8'><meta name='viewport' content='width=device-width,initial-scale=1'><title>{}</title><style>body{{font-family:ui-sans-serif,system-ui;margin:2rem;line-height:1.4}}table{{width:100%;border-collapse:collapse;margin-top:1rem}}th,td{{text-align:left}}input,select,button{{padding:.45rem;margin:.25rem}}</style></head><body><nav><a href='/'>Home</a> | <a href='/libraries'>Libraries</a> | <a href='/jobs'>Jobs</a> | <a href='/tokens'>Tokens</a></nav><hr />{}</body></html>",
html_escape(title),
content
)
}
fn html_escape(value: &str) -> String {
value
.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('"', "&quot;")
}

View File

@@ -10,14 +10,18 @@ argon2.workspace = true
axum.workspace = true
base64.workspace = true
chrono.workspace = true
image.workspace = true
lru.workspace = true
stripstream-core = { path = "../../crates/core" }
rand.workspace = true
reqwest.workspace = true
serde.workspace = true
serde_json.workspace = true
sha2.workspace = true
sqlx.workspace = true
tokio.workspace = true
tower.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true
uuid.workspace = true
zip = { version = "2.2", default-features = false, features = ["deflate"] }

View File

@@ -16,7 +16,7 @@ COPY crates/parsers/src crates/parsers/src
RUN cargo build --release -p api
FROM debian:bookworm-slim
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates wget && rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates wget unrar-free poppler-utils && rm -rf /var/lib/apt/lists/*
COPY --from=builder /app/target/release/api /usr/local/bin/api
EXPOSE 8080
CMD ["/usr/local/bin/api"]

View File

@@ -3,14 +3,29 @@ mod books;
mod error;
mod index_jobs;
mod libraries;
mod pages;
mod search;
mod tokens;
use std::sync::Arc;
use std::{
num::NonZeroUsize,
sync::{
atomic::{AtomicU64, Ordering},
Arc,
},
time::{Duration, Instant},
};
use axum::{middleware, routing::{delete, get}, Router};
use axum::{
middleware,
response::IntoResponse,
routing::{delete, get},
Json, Router,
};
use lru::LruCache;
use stripstream_core::config::ApiConfig;
use sqlx::postgres::PgPoolOptions;
use tokio::sync::{Mutex, Semaphore};
use tracing::info;
#[derive(Clone)]
@@ -19,6 +34,31 @@ struct AppState {
bootstrap_token: Arc<str>,
meili_url: Arc<str>,
meili_master_key: Arc<str>,
page_cache: Arc<Mutex<LruCache<String, Arc<Vec<u8>>>>>,
page_render_limit: Arc<Semaphore>,
metrics: Arc<Metrics>,
read_rate_limit: Arc<Mutex<ReadRateLimit>>,
}
struct Metrics {
requests_total: AtomicU64,
page_cache_hits: AtomicU64,
page_cache_misses: AtomicU64,
}
struct ReadRateLimit {
window_started_at: Instant,
requests_in_window: u32,
}
impl Metrics {
fn new() -> Self {
Self {
requests_total: AtomicU64::new(0),
page_cache_hits: AtomicU64::new(0),
page_cache_misses: AtomicU64::new(0),
}
}
}
#[tokio::main]
@@ -40,6 +80,13 @@ async fn main() -> anyhow::Result<()> {
bootstrap_token: Arc::from(config.api_bootstrap_token),
meili_url: Arc::from(config.meili_url),
meili_master_key: Arc::from(config.meili_master_key),
page_cache: Arc::new(Mutex::new(LruCache::new(NonZeroUsize::new(512).expect("non-zero")))),
page_render_limit: Arc::new(Semaphore::new(4)),
metrics: Arc::new(Metrics::new()),
read_rate_limit: Arc::new(Mutex::new(ReadRateLimit {
window_started_at: Instant::now(),
requests_in_window: 0,
})),
};
let admin_routes = Router::new()
@@ -49,18 +96,29 @@ async fn main() -> anyhow::Result<()> {
.route("/index/status", get(index_jobs::list_index_jobs))
.route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token))
.route("/admin/tokens/:id", delete(tokens::revoke_token))
.layer(middleware::from_fn_with_state(state.clone(), auth::require_admin));
.route_layer(middleware::from_fn_with_state(
state.clone(),
auth::require_admin,
));
let read_routes = Router::new()
.route("/books", get(books::list_books))
.route("/books/:id", get(books::get_book))
.route("/books/:id/pages/:n", get(pages::get_page))
.route("/search", get(search::search_books))
.layer(middleware::from_fn_with_state(state.clone(), auth::require_read));
.route_layer(middleware::from_fn_with_state(state.clone(), read_rate_limit))
.route_layer(middleware::from_fn_with_state(
state.clone(),
auth::require_read,
));
let app = Router::new()
.route("/health", get(health))
.route("/ready", get(ready))
.route("/metrics", get(metrics))
.merge(admin_routes)
.merge(read_routes)
.layer(middleware::from_fn_with_state(state.clone(), request_counter))
.with_state(state);
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
@@ -72,3 +130,50 @@ async fn main() -> anyhow::Result<()> {
async fn health() -> &'static str {
"ok"
}
async fn ready(axum::extract::State(state): axum::extract::State<AppState>) -> Result<Json<serde_json::Value>, error::ApiError> {
sqlx::query("SELECT 1").execute(&state.pool).await?;
Ok(Json(serde_json::json!({"status": "ready"})))
}
async fn metrics(axum::extract::State(state): axum::extract::State<AppState>) -> String {
format!(
"requests_total {}\npage_cache_hits {}\npage_cache_misses {}\n",
state.metrics.requests_total.load(Ordering::Relaxed),
state.metrics.page_cache_hits.load(Ordering::Relaxed),
state.metrics.page_cache_misses.load(Ordering::Relaxed),
)
}
async fn request_counter(
axum::extract::State(state): axum::extract::State<AppState>,
req: axum::extract::Request,
next: axum::middleware::Next,
) -> axum::response::Response {
state.metrics.requests_total.fetch_add(1, Ordering::Relaxed);
next.run(req).await
}
async fn read_rate_limit(
axum::extract::State(state): axum::extract::State<AppState>,
req: axum::extract::Request,
next: axum::middleware::Next,
) -> axum::response::Response {
let mut limiter = state.read_rate_limit.lock().await;
if limiter.window_started_at.elapsed() >= Duration::from_secs(1) {
limiter.window_started_at = Instant::now();
limiter.requests_in_window = 0;
}
if limiter.requests_in_window >= 120 {
return (
axum::http::StatusCode::TOO_MANY_REQUESTS,
"rate limit exceeded",
)
.into_response();
}
limiter.requests_in_window += 1;
drop(limiter);
next.run(req).await
}

281
apps/api/src/pages.rs Normal file
View File

@@ -0,0 +1,281 @@
use std::{
io::Read,
path::Path,
sync::{atomic::Ordering, Arc},
time::Duration,
};
use axum::{
body::Body,
extract::{Path as AxumPath, Query, State},
http::{header, HeaderMap, HeaderValue, StatusCode},
response::{IntoResponse, Response},
};
use image::{codecs::jpeg::JpegEncoder, codecs::png::PngEncoder, codecs::webp::WebPEncoder, ColorType, ImageEncoder};
use serde::Deserialize;
use sha2::{Digest, Sha256};
use sqlx::Row;
use uuid::Uuid;
use crate::{error::ApiError, AppState};
#[derive(Deserialize)]
pub struct PageQuery {
pub format: Option<String>,
pub quality: Option<u8>,
pub width: Option<u32>,
}
#[derive(Clone, Copy)]
enum OutputFormat {
Jpeg,
Png,
Webp,
}
impl OutputFormat {
fn parse(value: Option<&str>) -> Result<Self, ApiError> {
match value.unwrap_or("webp") {
"jpeg" | "jpg" => Ok(Self::Jpeg),
"png" => Ok(Self::Png),
"webp" => Ok(Self::Webp),
_ => Err(ApiError::bad_request("format must be webp|jpeg|png")),
}
}
fn content_type(&self) -> &'static str {
match self {
Self::Jpeg => "image/jpeg",
Self::Png => "image/png",
Self::Webp => "image/webp",
}
}
fn extension(&self) -> &'static str {
match self {
Self::Jpeg => "jpg",
Self::Png => "png",
Self::Webp => "webp",
}
}
}
pub async fn get_page(
State(state): State<AppState>,
AxumPath((book_id, n)): AxumPath<(Uuid, u32)>,
Query(query): Query<PageQuery>,
) -> Result<Response, ApiError> {
if n == 0 {
return Err(ApiError::bad_request("page index starts at 1"));
}
let format = OutputFormat::parse(query.format.as_deref())?;
let quality = query.quality.unwrap_or(80).clamp(1, 100);
let width = query.width.unwrap_or(0);
if width > 2160 {
return Err(ApiError::bad_request("width must be <= 2160"));
}
let cache_key = format!("{book_id}:{n}:{}:{quality}:{width}", format.extension());
if let Some(cached) = state.page_cache.lock().await.get(&cache_key).cloned() {
state.metrics.page_cache_hits.fetch_add(1, Ordering::Relaxed);
return Ok(image_response(cached, format.content_type()));
}
state.metrics.page_cache_misses.fetch_add(1, Ordering::Relaxed);
let row = sqlx::query(
r#"
SELECT abs_path, format
FROM book_files
WHERE book_id = $1
ORDER BY updated_at DESC
LIMIT 1
"#,
)
.bind(book_id)
.fetch_optional(&state.pool)
.await?;
let row = row.ok_or_else(|| ApiError::not_found("book file not found"))?;
let abs_path: String = row.get("abs_path");
let input_format: String = row.get("format");
let _permit = state
.page_render_limit
.clone()
.acquire_owned()
.await
.map_err(|_| ApiError::internal("render limiter unavailable"))?;
let bytes = tokio::time::timeout(
Duration::from_secs(12),
tokio::task::spawn_blocking(move || render_page(&abs_path, &input_format, n, &format, quality, width)),
)
.await
.map_err(|_| ApiError::internal("page rendering timeout"))?
.map_err(|e| ApiError::internal(format!("render task failed: {e}")))??;
let bytes = Arc::new(bytes);
state.page_cache.lock().await.put(cache_key, bytes.clone());
Ok(image_response(bytes, format.content_type()))
}
fn image_response(bytes: Arc<Vec<u8>>, content_type: &str) -> Response {
let mut headers = HeaderMap::new();
headers.insert(header::CONTENT_TYPE, HeaderValue::from_str(content_type).unwrap_or(HeaderValue::from_static("application/octet-stream")));
headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("public, max-age=300"));
let mut hasher = Sha256::new();
hasher.update(&*bytes);
let etag = format!("\"{:x}\"", hasher.finalize());
if let Ok(v) = HeaderValue::from_str(&etag) {
headers.insert(header::ETAG, v);
}
(StatusCode::OK, headers, Body::from((*bytes).clone())).into_response()
}
fn render_page(
abs_path: &str,
input_format: &str,
page_number: u32,
out_format: &OutputFormat,
quality: u8,
width: u32,
) -> Result<Vec<u8>, ApiError> {
let page_bytes = match input_format {
"cbz" => extract_cbz_page(abs_path, page_number)?,
"cbr" => extract_cbr_page(abs_path, page_number)?,
"pdf" => render_pdf_page(abs_path, page_number, width)?,
_ => return Err(ApiError::bad_request("unsupported source format")),
};
transcode_image(&page_bytes, out_format, quality, width)
}
fn extract_cbz_page(abs_path: &str, page_number: u32) -> Result<Vec<u8>, ApiError> {
let file = std::fs::File::open(abs_path).map_err(|e| ApiError::internal(format!("cannot open cbz: {e}")))?;
let mut archive = zip::ZipArchive::new(file).map_err(|e| ApiError::internal(format!("invalid cbz: {e}")))?;
let mut image_names: Vec<String> = Vec::new();
for i in 0..archive.len() {
let entry = archive.by_index(i).map_err(|e| ApiError::internal(format!("cbz entry read failed: {e}")))?;
let name = entry.name().to_ascii_lowercase();
if is_image_name(&name) {
image_names.push(entry.name().to_string());
}
}
image_names.sort();
let index = page_number as usize - 1;
let selected = image_names.get(index).ok_or_else(|| ApiError::not_found("page out of range"))?;
let mut entry = archive.by_name(selected).map_err(|e| ApiError::internal(format!("cbz page read failed: {e}")))?;
let mut buf = Vec::new();
entry.read_to_end(&mut buf).map_err(|e| ApiError::internal(format!("cbz page load failed: {e}")))?;
Ok(buf)
}
fn extract_cbr_page(abs_path: &str, page_number: u32) -> Result<Vec<u8>, ApiError> {
let list_output = std::process::Command::new("unrar")
.arg("lb")
.arg(abs_path)
.output()
.map_err(|e| ApiError::internal(format!("unrar list failed: {e}")))?;
if !list_output.status.success() {
return Err(ApiError::internal("unrar could not list archive"));
}
let mut entries: Vec<String> = String::from_utf8_lossy(&list_output.stdout)
.lines()
.filter(|line| is_image_name(&line.to_ascii_lowercase()))
.map(|s| s.to_string())
.collect();
entries.sort();
let index = page_number as usize - 1;
let selected = entries.get(index).ok_or_else(|| ApiError::not_found("page out of range"))?;
let page_output = std::process::Command::new("unrar")
.arg("p")
.arg("-inul")
.arg(abs_path)
.arg(selected)
.output()
.map_err(|e| ApiError::internal(format!("unrar extract failed: {e}")))?;
if !page_output.status.success() {
return Err(ApiError::internal("unrar could not extract page"));
}
Ok(page_output.stdout)
}
fn render_pdf_page(abs_path: &str, page_number: u32, width: u32) -> Result<Vec<u8>, ApiError> {
let tmp_dir = std::env::temp_dir().join(format!("stripstream-pdf-{}", Uuid::new_v4()));
std::fs::create_dir_all(&tmp_dir).map_err(|e| ApiError::internal(format!("cannot create temp dir: {e}")))?;
let output_prefix = tmp_dir.join("page");
let mut cmd = std::process::Command::new("pdftoppm");
cmd.arg("-f")
.arg(page_number.to_string())
.arg("-singlefile")
.arg("-png");
if width > 0 {
cmd.arg("-scale-to-x").arg(width.to_string()).arg("-scale-to-y").arg("-1");
}
cmd.arg(abs_path).arg(&output_prefix);
let output = cmd
.output()
.map_err(|e| ApiError::internal(format!("pdf render failed: {e}")))?;
if !output.status.success() {
let _ = std::fs::remove_dir_all(&tmp_dir);
return Err(ApiError::internal("pdf render command failed"));
}
let image_path = output_prefix.with_extension("png");
let bytes = std::fs::read(&image_path).map_err(|e| ApiError::internal(format!("render output missing: {e}")))?;
let _ = std::fs::remove_dir_all(&tmp_dir);
Ok(bytes)
}
fn transcode_image(input: &[u8], out_format: &OutputFormat, quality: u8, width: u32) -> Result<Vec<u8>, ApiError> {
let mut image = image::load_from_memory(input).map_err(|e| ApiError::internal(format!("invalid source image: {e}")))?;
if width > 0 {
image = image.resize(width, u32::MAX, image::imageops::FilterType::Lanczos3);
}
let rgba = image.to_rgba8();
let (w, h) = rgba.dimensions();
let mut out = Vec::new();
match out_format {
OutputFormat::Jpeg => {
let mut encoder = JpegEncoder::new_with_quality(&mut out, quality);
encoder
.encode(&rgba, w, h, ColorType::Rgba8.into())
.map_err(|e| ApiError::internal(format!("jpeg encode failed: {e}")))?;
}
OutputFormat::Png => {
let encoder = PngEncoder::new(&mut out);
encoder
.write_image(&rgba, w, h, ColorType::Rgba8.into())
.map_err(|e| ApiError::internal(format!("png encode failed: {e}")))?;
}
OutputFormat::Webp => {
let encoder = WebPEncoder::new_lossless(&mut out);
encoder
.write_image(&rgba, w, h, ColorType::Rgba8.into())
.map_err(|e| ApiError::internal(format!("webp encode failed: {e}")))?;
}
}
Ok(out)
}
fn is_image_name(name: &str) -> bool {
name.ends_with(".jpg")
|| name.ends_with(".jpeg")
|| name.ends_with(".png")
|| name.ends_with(".webp")
|| name.ends_with(".avif")
}
#[allow(dead_code)]
fn _is_absolute_path(value: &str) -> bool {
Path::new(value).is_absolute()
}

View File

@@ -1,6 +1,7 @@
use anyhow::Context;
use axum::{routing::get, Router};
use axum::{extract::State, routing::get, Json, Router};
use chrono::{DateTime, Utc};
use axum::http::StatusCode;
use parsers::{detect_format, parse_metadata, BookFormat};
use serde::Serialize;
use sha2::{Digest, Sha256};
@@ -48,7 +49,10 @@ async fn main() -> anyhow::Result<()> {
tokio::spawn(run_worker(state.clone(), config.scan_interval_seconds));
let app = Router::new().route("/health", get(health));
let app = Router::new()
.route("/health", get(health))
.route("/ready", get(ready))
.with_state(state.clone());
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
info!(addr = %config.listen_addr, "indexer listening");
@@ -60,6 +64,14 @@ async fn health() -> &'static str {
"ok"
}
async fn ready(State(state): State<AppState>) -> Result<Json<serde_json::Value>, StatusCode> {
sqlx::query("SELECT 1")
.execute(&state.pool)
.await
.map_err(|_| StatusCode::SERVICE_UNAVAILABLE)?;
Ok(Json(serde_json::json!({"status": "ready"})))
}
async fn run_worker(state: AppState, interval_seconds: u64) {
let wait = Duration::from_secs(interval_seconds.max(1));
loop {