add indexing jobs, parsers, and search APIs
This commit is contained in:
@@ -32,6 +32,18 @@ pub async fn require_admin(
|
||||
Ok(next.run(req).await)
|
||||
}
|
||||
|
||||
pub async fn require_read(
|
||||
State(state): State<AppState>,
|
||||
mut req: Request,
|
||||
next: Next,
|
||||
) -> Result<Response, ApiError> {
|
||||
let token = bearer_token(&req).ok_or_else(|| ApiError::unauthorized("missing bearer token"))?;
|
||||
let scope = authenticate(&state, token).await?;
|
||||
|
||||
req.extensions_mut().insert(scope);
|
||||
Ok(next.run(req).await)
|
||||
}
|
||||
|
||||
fn bearer_token(req: &Request) -> Option<&str> {
|
||||
req.headers()
|
||||
.get(AUTHORIZATION)
|
||||
|
||||
143
apps/api/src/books.rs
Normal file
143
apps/api/src/books.rs
Normal file
@@ -0,0 +1,143 @@
|
||||
use axum::{extract::{Path, Query, State}, Json};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Row;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{error::ApiError, AppState};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ListBooksQuery {
|
||||
pub library_id: Option<Uuid>,
|
||||
pub kind: Option<String>,
|
||||
pub cursor: Option<Uuid>,
|
||||
pub limit: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct BookItem {
|
||||
pub id: Uuid,
|
||||
pub library_id: Uuid,
|
||||
pub kind: String,
|
||||
pub title: String,
|
||||
pub author: Option<String>,
|
||||
pub series: Option<String>,
|
||||
pub volume: Option<String>,
|
||||
pub language: Option<String>,
|
||||
pub page_count: Option<i32>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct BooksPage {
|
||||
pub items: Vec<BookItem>,
|
||||
pub next_cursor: Option<Uuid>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct BookDetails {
|
||||
pub id: Uuid,
|
||||
pub library_id: Uuid,
|
||||
pub kind: String,
|
||||
pub title: String,
|
||||
pub author: Option<String>,
|
||||
pub series: Option<String>,
|
||||
pub volume: Option<String>,
|
||||
pub language: Option<String>,
|
||||
pub page_count: Option<i32>,
|
||||
pub file_path: Option<String>,
|
||||
pub file_format: Option<String>,
|
||||
pub file_parse_status: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn list_books(
|
||||
State(state): State<AppState>,
|
||||
Query(query): Query<ListBooksQuery>,
|
||||
) -> Result<Json<BooksPage>, ApiError> {
|
||||
let limit = query.limit.unwrap_or(50).clamp(1, 200);
|
||||
let rows = sqlx::query(
|
||||
r#"
|
||||
SELECT id, library_id, kind, title, author, series, volume, language, page_count, updated_at
|
||||
FROM books
|
||||
WHERE ($1::uuid IS NULL OR library_id = $1)
|
||||
AND ($2::text IS NULL OR kind = $2)
|
||||
AND ($3::uuid IS NULL OR id > $3)
|
||||
ORDER BY id ASC
|
||||
LIMIT $4
|
||||
"#,
|
||||
)
|
||||
.bind(query.library_id)
|
||||
.bind(query.kind.as_deref())
|
||||
.bind(query.cursor)
|
||||
.bind(limit + 1)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
let mut items: Vec<BookItem> = rows
|
||||
.iter()
|
||||
.take(limit as usize)
|
||||
.map(|row| BookItem {
|
||||
id: row.get("id"),
|
||||
library_id: row.get("library_id"),
|
||||
kind: row.get("kind"),
|
||||
title: row.get("title"),
|
||||
author: row.get("author"),
|
||||
series: row.get("series"),
|
||||
volume: row.get("volume"),
|
||||
language: row.get("language"),
|
||||
page_count: row.get("page_count"),
|
||||
updated_at: row.get("updated_at"),
|
||||
})
|
||||
.collect();
|
||||
|
||||
let next_cursor = if rows.len() > limit as usize {
|
||||
items.last().map(|b| b.id)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Json(BooksPage {
|
||||
items: std::mem::take(&mut items),
|
||||
next_cursor,
|
||||
}))
|
||||
}
|
||||
|
||||
pub async fn get_book(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<Uuid>,
|
||||
) -> Result<Json<BookDetails>, ApiError> {
|
||||
let row = sqlx::query(
|
||||
r#"
|
||||
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.series, b.volume, b.language, b.page_count,
|
||||
bf.abs_path, bf.format, bf.parse_status
|
||||
FROM books b
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT abs_path, format, parse_status
|
||||
FROM book_files
|
||||
WHERE book_id = b.id
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT 1
|
||||
) bf ON TRUE
|
||||
WHERE b.id = $1
|
||||
"#,
|
||||
)
|
||||
.bind(id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||
Ok(Json(BookDetails {
|
||||
id: row.get("id"),
|
||||
library_id: row.get("library_id"),
|
||||
kind: row.get("kind"),
|
||||
title: row.get("title"),
|
||||
author: row.get("author"),
|
||||
series: row.get("series"),
|
||||
volume: row.get("volume"),
|
||||
language: row.get("language"),
|
||||
page_count: row.get("page_count"),
|
||||
file_path: row.get("abs_path"),
|
||||
file_format: row.get("format"),
|
||||
file_parse_status: row.get("parse_status"),
|
||||
}))
|
||||
}
|
||||
74
apps/api/src/index_jobs.rs
Normal file
74
apps/api/src/index_jobs.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
use axum::{extract::State, Json};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Row;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{error::ApiError, AppState};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct RebuildRequest {
|
||||
pub library_id: Option<Uuid>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct IndexJobItem {
|
||||
pub id: Uuid,
|
||||
pub library_id: Option<Uuid>,
|
||||
pub r#type: String,
|
||||
pub status: String,
|
||||
pub started_at: Option<DateTime<Utc>>,
|
||||
pub finished_at: Option<DateTime<Utc>>,
|
||||
pub stats_json: Option<serde_json::Value>,
|
||||
pub error_opt: Option<String>,
|
||||
pub created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
pub async fn enqueue_rebuild(
|
||||
State(state): State<AppState>,
|
||||
payload: Option<Json<RebuildRequest>>,
|
||||
) -> Result<Json<IndexJobItem>, ApiError> {
|
||||
let library_id = payload.and_then(|p| p.0.library_id);
|
||||
let id = Uuid::new_v4();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, 'rebuild', 'pending')",
|
||||
)
|
||||
.bind(id)
|
||||
.bind(library_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
let row = sqlx::query(
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs WHERE id = $1",
|
||||
)
|
||||
.bind(id)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
|
||||
Ok(Json(map_row(row)))
|
||||
}
|
||||
|
||||
pub async fn list_index_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobItem>>, ApiError> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs ORDER BY created_at DESC LIMIT 100",
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
Ok(Json(rows.into_iter().map(map_row).collect()))
|
||||
}
|
||||
|
||||
fn map_row(row: sqlx::postgres::PgRow) -> IndexJobItem {
|
||||
IndexJobItem {
|
||||
id: row.get("id"),
|
||||
library_id: row.get("library_id"),
|
||||
r#type: row.get("type"),
|
||||
status: row.get("status"),
|
||||
started_at: row.get("started_at"),
|
||||
finished_at: row.get("finished_at"),
|
||||
stats_json: row.get("stats_json"),
|
||||
error_opt: row.get("error_opt"),
|
||||
created_at: row.get("created_at"),
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,9 @@
|
||||
mod auth;
|
||||
mod books;
|
||||
mod error;
|
||||
mod index_jobs;
|
||||
mod libraries;
|
||||
mod search;
|
||||
mod tokens;
|
||||
|
||||
use std::sync::Arc;
|
||||
@@ -14,6 +17,8 @@ use tracing::info;
|
||||
struct AppState {
|
||||
pool: sqlx::PgPool,
|
||||
bootstrap_token: Arc<str>,
|
||||
meili_url: Arc<str>,
|
||||
meili_master_key: Arc<str>,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
@@ -33,18 +38,29 @@ async fn main() -> anyhow::Result<()> {
|
||||
let state = AppState {
|
||||
pool,
|
||||
bootstrap_token: Arc::from(config.api_bootstrap_token),
|
||||
meili_url: Arc::from(config.meili_url),
|
||||
meili_master_key: Arc::from(config.meili_master_key),
|
||||
};
|
||||
|
||||
let protected = Router::new()
|
||||
let admin_routes = Router::new()
|
||||
.route("/libraries", get(libraries::list_libraries).post(libraries::create_library))
|
||||
.route("/libraries/:id", delete(libraries::delete_library))
|
||||
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
|
||||
.route("/index/status", get(index_jobs::list_index_jobs))
|
||||
.route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token))
|
||||
.route("/admin/tokens/:id", delete(tokens::revoke_token))
|
||||
.layer(middleware::from_fn_with_state(state.clone(), auth::require_admin));
|
||||
|
||||
let read_routes = Router::new()
|
||||
.route("/books", get(books::list_books))
|
||||
.route("/books/:id", get(books::get_book))
|
||||
.route("/search", get(search::search_books))
|
||||
.layer(middleware::from_fn_with_state(state.clone(), auth::require_read));
|
||||
|
||||
let app = Router::new()
|
||||
.route("/health", get(health))
|
||||
.merge(protected)
|
||||
.merge(admin_routes)
|
||||
.merge(read_routes)
|
||||
.with_state(state);
|
||||
|
||||
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
|
||||
|
||||
77
apps/api/src/search.rs
Normal file
77
apps/api/src/search.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
use axum::{extract::{Query, State}, Json};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{error::ApiError, AppState};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SearchQuery {
|
||||
pub q: String,
|
||||
pub library_id: Option<String>,
|
||||
pub r#type: Option<String>,
|
||||
pub kind: Option<String>,
|
||||
pub limit: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct SearchResponse {
|
||||
pub hits: serde_json::Value,
|
||||
pub estimated_total_hits: Option<u64>,
|
||||
pub processing_time_ms: Option<u64>,
|
||||
}
|
||||
|
||||
pub async fn search_books(
|
||||
State(state): State<AppState>,
|
||||
Query(query): Query<SearchQuery>,
|
||||
) -> Result<Json<SearchResponse>, ApiError> {
|
||||
if query.q.trim().is_empty() {
|
||||
return Err(ApiError::bad_request("q is required"));
|
||||
}
|
||||
|
||||
let mut filters: Vec<String> = Vec::new();
|
||||
if let Some(library_id) = query.library_id.as_deref() {
|
||||
filters.push(format!("library_id = '{}'", library_id.replace('"', "")));
|
||||
}
|
||||
let kind_filter = query.r#type.as_deref().or(query.kind.as_deref());
|
||||
if let Some(kind) = kind_filter {
|
||||
filters.push(format!("kind = '{}'", kind.replace('"', "")));
|
||||
}
|
||||
|
||||
let body = serde_json::json!({
|
||||
"q": query.q,
|
||||
"limit": query.limit.unwrap_or(20).clamp(1, 100),
|
||||
"filter": if filters.is_empty() { serde_json::Value::Null } else { serde_json::Value::String(filters.join(" AND ")) }
|
||||
});
|
||||
|
||||
let client = reqwest::Client::new();
|
||||
let url = format!("{}/indexes/books/search", state.meili_url.trim_end_matches('/'));
|
||||
let response = client
|
||||
.post(url)
|
||||
.header("Authorization", format!("Bearer {}", state.meili_master_key))
|
||||
.json(&body)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(format!("meili request failed: {e}")))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let body = response.text().await.unwrap_or_else(|_| "unknown meili error".to_string());
|
||||
if body.contains("index_not_found") {
|
||||
return Ok(Json(SearchResponse {
|
||||
hits: serde_json::json!([]),
|
||||
estimated_total_hits: Some(0),
|
||||
processing_time_ms: Some(0),
|
||||
}));
|
||||
}
|
||||
return Err(ApiError::internal(format!("meili error: {body}")));
|
||||
}
|
||||
|
||||
let payload: serde_json::Value = response
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(format!("invalid meili response: {e}")))?;
|
||||
|
||||
Ok(Json(SearchResponse {
|
||||
hits: payload.get("hits").cloned().unwrap_or_else(|| serde_json::json!([])),
|
||||
estimated_total_hits: payload.get("estimatedTotalHits").and_then(|v| v.as_u64()),
|
||||
processing_time_ms: payload.get("processingTimeMs").and_then(|v| v.as_u64()),
|
||||
}))
|
||||
}
|
||||
Reference in New Issue
Block a user