diff --git a/apps/api/src/pages.rs b/apps/api/src/pages.rs index 4ad0e9a..1ab8302 100644 --- a/apps/api/src/pages.rs +++ b/apps/api/src/pages.rs @@ -363,6 +363,11 @@ fn extract_cbr_page(abs_path: &str, page_number: u32) -> Result, ApiErro .map(|s| s.to_string()) .collect(); entries.sort(); + + // Debug: show first few entries + if entries.len() > 0 { + debug!("First 5 entries in CBR {}: {:?}", abs_path, &entries[..entries.len().min(5)]); + } debug!("Found {} images in CBR {}", entries.len(), abs_path); let index = page_number as usize - 1; @@ -375,6 +380,7 @@ fn extract_cbr_page(abs_path: &str, page_number: u32) -> Result, ApiErro let page_output = std::process::Command::new("unrar") .arg("p") .arg("-inul") + .arg("-y") .arg(abs_path) .arg(selected) .output() @@ -387,8 +393,45 @@ fn extract_cbr_page(abs_path: &str, page_number: u32) -> Result, ApiErro error!("unrar could not extract page {} from {}: {}", selected, abs_path, stderr); return Err(ApiError::internal("unrar could not extract page")); } - debug!("Successfully extracted {} bytes from CBR page {}", page_output.stdout.len(), page_number); - Ok(page_output.stdout) + + let extracted_data = &page_output.stdout; + debug!("Extracted {} bytes from CBR page {}", extracted_data.len(), page_number); + + // Verify it's actually an image by checking magic bytes + if extracted_data.len() < 1000 { + // Show first few bytes for debugging + let preview: Vec = extracted_data.iter().take(32).copied().collect(); + let hex_preview: String = preview.iter().map(|b| format!("{:02x}", b)).collect(); + error!("Extracted data too small ({} bytes) for page {} from {} - first bytes: {}", + extracted_data.len(), page_number, abs_path, hex_preview); + return Err(ApiError::internal("extracted data too small - not a valid image")); + } + + // Check magic bytes to verify it's an image + let is_valid_image = extracted_data.len() > 4 && ( + // JPEG + extracted_data.starts_with(&[0xFF, 0xD8, 0xFF]) || + // PNG + extracted_data.starts_with(&[0x89, 0x50, 0x4E, 0x47]) || + // GIF + extracted_data.starts_with(b"GIF87a") || extracted_data.starts_with(b"GIF89a") || + // WebP + extracted_data.starts_with(b"RIFF") && extracted_data.len() > 12 && &extracted_data[8..12] == b"WEBP" || + // TIFF + extracted_data.starts_with(&[0x49, 0x49, 0x2A, 0x00]) || extracted_data.starts_with(&[0x4D, 0x4D, 0x00, 0x2A]) + ); + + if !is_valid_image { + // Show first few bytes for debugging + let preview: Vec = extracted_data.iter().take(32).copied().collect(); + let hex_preview: String = preview.iter().map(|b| format!("{:02x}", b)).collect(); + error!("Extracted data for page {} from {} is not a valid image format. First bytes: {} (size: {})", + page_number, abs_path, hex_preview, extracted_data.len()); + return Err(ApiError::internal("extracted data is not a valid image")); + } + + debug!("Successfully extracted {} bytes from CBR page {}", extracted_data.len(), page_number); + Ok(extracted_data.to_vec()) } fn render_pdf_page(abs_path: &str, page_number: u32, width: u32) -> Result, ApiError> { diff --git a/apps/api/src/settings.rs b/apps/api/src/settings.rs index 151b7e4..c1d8100 100644 --- a/apps/api/src/settings.rs +++ b/apps/api/src/settings.rs @@ -1,6 +1,5 @@ use axum::{ - extract::{Query, State}, - response::IntoResponse, + extract::State, routing::{get, post}, Json, Router, }; @@ -10,40 +9,24 @@ use sqlx::Row; use crate::{error::ApiError, AppState}; -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ImageProcessingSettings { - pub format: String, - pub quality: u8, - pub filter: String, - pub max_width: u32, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CacheSettings { - pub enabled: bool, - pub directory: String, - pub max_size_mb: u32, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct LimitsSettings { - pub concurrent_renders: u8, - pub timeout_seconds: u8, - pub rate_limit_per_second: u16, -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AppSettings { - pub image_processing: ImageProcessingSettings, - pub cache: CacheSettings, - pub limits: LimitsSettings, -} - #[derive(Debug, Clone, Serialize, Deserialize)] pub struct UpdateSettingRequest { pub value: Value, } +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ClearCacheResponse { + pub success: bool, + pub message: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CacheStats { + pub total_size_mb: f64, + pub file_count: u64, + pub directory: String, +} + pub fn settings_routes() -> Router { Router::new() .route("/settings", get(get_settings)) @@ -108,12 +91,6 @@ async fn update_setting( Ok(Json(value)) } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct ClearCacheResponse { - pub success: bool, - pub message: String, -} - async fn clear_cache(State(_state): State) -> Result, ApiError> { let cache_dir = std::env::var("IMAGE_CACHE_DIR") .unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string()); @@ -143,13 +120,6 @@ async fn clear_cache(State(_state): State) -> Result) -> Result, ApiError> { let cache_dir = std::env::var("IMAGE_CACHE_DIR") .unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string()); @@ -201,60 +171,3 @@ async fn get_cache_stats(State(_state): State) -> Result Result { - let settings = get_settings_from_db_raw(pool).await?; - - let image_processing = settings - .get("image_processing") - .and_then(|v| serde_json::from_value(v.clone()).ok()) - .unwrap_or_else(|| ImageProcessingSettings { - format: "webp".to_string(), - quality: 85, - filter: "lanczos3".to_string(), - max_width: 2160, - }); - - let cache = settings - .get("cache") - .and_then(|v| serde_json::from_value(v.clone()).ok()) - .unwrap_or_else(|| CacheSettings { - enabled: true, - directory: "/tmp/stripstream-image-cache".to_string(), - max_size_mb: 10000, - }); - - let limits = settings - .get("limits") - .and_then(|v| serde_json::from_value(v.clone()).ok()) - .unwrap_or_else(|| LimitsSettings { - concurrent_renders: 4, - timeout_seconds: 12, - rate_limit_per_second: 120, - }); - - Ok(AppSettings { - image_processing, - cache, - limits, - }) -} - -async fn get_settings_from_db_raw( - pool: &sqlx::PgPool, -) -> Result, ApiError> { - let rows = sqlx::query(r#"SELECT key, value FROM app_settings"#) - .fetch_all(pool) - .await?; - - let mut settings = std::collections::HashMap::new(); - for row in rows { - let key: String = row.get("key"); - let value: Value = row.get("value"); - settings.insert(key, value); - } - - Ok(settings) -}