bootstrap rust services, auth, and compose stack

This commit is contained in:
2026-03-05 14:51:02 +01:00
parent 1238079454
commit 88db9805b5
25 changed files with 3576 additions and 22 deletions

7
.env.example Normal file
View File

@@ -0,0 +1,7 @@
API_LISTEN_ADDR=0.0.0.0:8080
ADMIN_UI_LISTEN_ADDR=0.0.0.0:8082
INDEXER_LISTEN_ADDR=0.0.0.0:8081
DATABASE_URL=postgres://stripstream:stripstream@postgres:5432/stripstream
MEILI_URL=http://meilisearch:7700
MEILI_MASTER_KEY=change-me
API_BOOTSTRAP_TOKEN=change-me-bootstrap-token

4
.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
target/
.env
.DS_Store
tmp/

2679
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

30
Cargo.toml Normal file
View File

@@ -0,0 +1,30 @@
[workspace]
members = [
"apps/api",
"apps/indexer",
"apps/admin-ui",
"crates/core",
"crates/parsers",
]
resolver = "2"
[workspace.package]
edition = "2021"
version = "0.1.0"
license = "MIT"
[workspace.dependencies]
anyhow = "1.0"
argon2 = "0.5"
axum = "0.7"
base64 = "0.22"
chrono = { version = "0.4", features = ["serde"] }
rand = "0.8"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "uuid", "chrono"] }
tokio = { version = "1.43", features = ["macros", "rt-multi-thread", "signal"] }
tower = "0.5"
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] }
uuid = { version = "1.12", features = ["serde", "v4"] }

48
PLAN.md
View File

@@ -17,7 +17,7 @@ Construire un serveur ultra performant pour indexer et servir des bibliotheques
- Auth: token bootstrap env + tokens admin en DB (creables/revocables) - Auth: token bootstrap env + tokens admin en DB (creables/revocables)
- Expiration tokens admin: aucune par defaut (revocation manuelle) - Expiration tokens admin: aucune par defaut (revocation manuelle)
- Rendu PDF: a la volee - Rendu PDF: a la volee
- CBR: extraction temporaire disque (`unrar`) + cleanup - CBR: extraction temporaire disque (`unrar-free`, commande `unrar`) + cleanup
- Formats pages: `webp`, `jpeg`, `png` - Formats pages: `webp`, `jpeg`, `png`
--- ---
@@ -25,46 +25,46 @@ Construire un serveur ultra performant pour indexer et servir des bibliotheques
## Backlog executable (ordre recommande) ## Backlog executable (ordre recommande)
### T1 - Bootstrap monorepo Rust ### T1 - Bootstrap monorepo Rust
- [ ] Creer workspace Cargo - [x] Creer workspace Cargo
- [ ] Creer crates/apps: `apps/api`, `apps/indexer`, `apps/admin-ui`, `crates/core`, `crates/parsers` - [x] Creer crates/apps: `apps/api`, `apps/indexer`, `apps/admin-ui`, `crates/core`, `crates/parsers`
- [ ] Config env centralisee + logging de base - [x] Config env centralisee + logging de base
**DoD:** Build des crates OK. **DoD:** Build des crates OK.
### T2 - Infra Docker Compose ### T2 - Infra Docker Compose
- [ ] Definir services `postgres`, `meilisearch`, `api`, `indexer` - [x] Definir services `postgres`, `meilisearch`, `api`, `indexer`
- [ ] Volumes persistants - [x] Volumes persistants
- [ ] Healthchecks - [x] Healthchecks
**DoD:** `docker compose up` demarre tout, services healthy. **DoD:** `docker compose up` demarre tout, services healthy.
### T3 - Schema DB + migrations ### T3 - Schema DB + migrations
- [ ] Tables: `libraries`, `books`, `book_files`, `index_jobs`, `api_tokens` - [x] Tables: `libraries`, `books`, `book_files`, `index_jobs`, `api_tokens`
- [ ] Index/contraintes (uniques, FK) - [x] Index/contraintes (uniques, FK)
- [ ] Scripts de migration - [x] Scripts de migration
**DoD:** Migrations appliquees sans erreur, schema stable. **DoD:** Migrations appliquees sans erreur, schema stable.
### T4 - Auth hybride ### T4 - Auth hybride
- [ ] Middleware `Authorization: Bearer <token>` - [x] Middleware `Authorization: Bearer <token>`
- [ ] Verif `API_BOOTSTRAP_TOKEN` - [x] Verif `API_BOOTSTRAP_TOKEN`
- [ ] Verif tokens DB (hash Argon2id, non revoques/non expires) - [x] Verif tokens DB (hash Argon2id, non revoques/non expires)
- [ ] MAJ `last_used_at` - [x] MAJ `last_used_at`
**DoD:** Acces protege fonctionnel, tokens revoques refuses. **DoD:** Acces protege fonctionnel, tokens revoques refuses.
### T5 - API admin tokens ### T5 - API admin tokens
- [ ] `POST /admin/tokens` (affichage secret une seule fois) - [x] `POST /admin/tokens` (affichage secret une seule fois)
- [ ] `GET /admin/tokens` (sans secret) - [x] `GET /admin/tokens` (sans secret)
- [ ] `DELETE /admin/tokens/:id` (revoke) - [x] `DELETE /admin/tokens/:id` (revoke)
**DoD:** Flux creation/liste/revocation valide. **DoD:** Flux creation/liste/revocation valide.
### T6 - CRUD librairies ### T6 - CRUD librairies
- [ ] `GET /libraries` - [x] `GET /libraries`
- [ ] `POST /libraries` - [x] `POST /libraries`
- [ ] `DELETE /libraries/:id` - [x] `DELETE /libraries/:id`
- [ ] Validation stricte des chemins (anti traversal) - [x] Validation stricte des chemins (anti traversal)
**DoD:** Gestion librairies robuste et securisee. **DoD:** Gestion librairies robuste et securisee.
@@ -189,10 +189,14 @@ Construire un serveur ultra performant pour indexer et servir des bibliotheques
--- ---
## Suivi d'avancement ## Suivi d'avancement
- [ ] Lot 1: Fondations (T1 -> T6) - [x] Lot 1: Fondations (T1 -> T6)
- [ ] Lot 2: Ingestion + Search (T7 -> T13) - [ ] Lot 2: Ingestion + Search (T7 -> T13)
- [ ] Lot 3: Lecture + UI + Hardening (T14 -> T18) - [ ] Lot 3: Lecture + UI + Hardening (T14 -> T18)
## Notes ## Notes
- Scope token v1: `admin`, `read` - Scope token v1: `admin`, `read`
- Bootstrap token = break-glass (peut etre desactive plus tard) - Bootstrap token = break-glass (peut etre desactive plus tard)
## Journal
- 2026-03-05: `docker compose up -d --build` valide, stack complete en healthy (`postgres`, `meilisearch`, `api`, `indexer`, `admin-ui`).
- 2026-03-05: ajustements infra appliques pour demarrage stable (`unrar` -> `unrar-free`, image `rust:1-bookworm`, healthchecks `127.0.0.1`).

13
apps/admin-ui/Cargo.toml Normal file
View File

@@ -0,0 +1,13 @@
[package]
name = "admin-ui"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
anyhow.workspace = true
axum.workspace = true
stripstream-core = { path = "../../crates/core" }
tokio.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true

22
apps/admin-ui/Dockerfile Normal file
View File

@@ -0,0 +1,22 @@
FROM rust:1-bookworm AS builder
WORKDIR /app
COPY Cargo.toml ./
COPY apps/api/Cargo.toml apps/api/Cargo.toml
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
COPY apps/admin-ui/Cargo.toml apps/admin-ui/Cargo.toml
COPY crates/core/Cargo.toml crates/core/Cargo.toml
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
COPY apps/api/src apps/api/src
COPY apps/indexer/src apps/indexer/src
COPY apps/admin-ui/src apps/admin-ui/src
COPY crates/core/src crates/core/src
COPY crates/parsers/src crates/parsers/src
RUN cargo build --release -p admin-ui
FROM debian:bookworm-slim
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates wget && rm -rf /var/lib/apt/lists/*
COPY --from=builder /app/target/release/admin-ui /usr/local/bin/admin-ui
EXPOSE 8082
CMD ["/usr/local/bin/admin-ui"]

32
apps/admin-ui/src/main.rs Normal file
View File

@@ -0,0 +1,32 @@
use axum::{response::Html, routing::get, Router};
use stripstream_core::config::AdminUiConfig;
use tracing::info;
#[tokio::main]
async fn main() -> anyhow::Result<()> {
tracing_subscriber::fmt()
.with_env_filter(
std::env::var("RUST_LOG").unwrap_or_else(|_| "admin_ui=info,axum=info".to_string()),
)
.init();
let config = AdminUiConfig::from_env();
let app = Router::new()
.route("/health", get(health))
.route("/", get(index));
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
info!(addr = %config.listen_addr, "admin ui listening");
axum::serve(listener, app).await?;
Ok(())
}
async fn health() -> &'static str {
"ok"
}
async fn index() -> Html<&'static str> {
Html(
"<html><body><h1>Stripstream Admin</h1><p>UI skeleton ready. Next: libraries, jobs, tokens screens.</p></body></html>",
)
}

22
apps/api/Cargo.toml Normal file
View File

@@ -0,0 +1,22 @@
[package]
name = "api"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
anyhow.workspace = true
argon2.workspace = true
axum.workspace = true
base64.workspace = true
chrono.workspace = true
stripstream-core = { path = "../../crates/core" }
rand.workspace = true
serde.workspace = true
serde_json.workspace = true
sqlx.workspace = true
tokio.workspace = true
tower.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true
uuid.workspace = true

22
apps/api/Dockerfile Normal file
View File

@@ -0,0 +1,22 @@
FROM rust:1-bookworm AS builder
WORKDIR /app
COPY Cargo.toml ./
COPY apps/api/Cargo.toml apps/api/Cargo.toml
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
COPY apps/admin-ui/Cargo.toml apps/admin-ui/Cargo.toml
COPY crates/core/Cargo.toml crates/core/Cargo.toml
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
COPY apps/api/src apps/api/src
COPY apps/indexer/src apps/indexer/src
COPY apps/admin-ui/src apps/admin-ui/src
COPY crates/core/src crates/core/src
COPY crates/parsers/src crates/parsers/src
RUN cargo build --release -p api
FROM debian:bookworm-slim
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates wget && rm -rf /var/lib/apt/lists/*
COPY --from=builder /app/target/release/api /usr/local/bin/api
EXPOSE 8080
CMD ["/usr/local/bin/api"]

93
apps/api/src/auth.rs Normal file
View File

@@ -0,0 +1,93 @@
use argon2::{Argon2, PasswordHash, PasswordVerifier};
use axum::{
extract::{Request, State},
http::header::AUTHORIZATION,
middleware::Next,
response::Response,
};
use chrono::Utc;
use sqlx::Row;
use crate::{error::ApiError, AppState};
#[derive(Clone, Debug)]
pub enum Scope {
Admin,
Read,
}
pub async fn require_admin(
State(state): State<AppState>,
mut req: Request,
next: Next,
) -> Result<Response, ApiError> {
let token = bearer_token(&req).ok_or_else(|| ApiError::unauthorized("missing bearer token"))?;
let scope = authenticate(&state, token).await?;
if !matches!(scope, Scope::Admin) {
return Err(ApiError::forbidden("admin scope required"));
}
req.extensions_mut().insert(scope);
Ok(next.run(req).await)
}
fn bearer_token(req: &Request) -> Option<&str> {
req.headers()
.get(AUTHORIZATION)
.and_then(|value| value.to_str().ok())
.and_then(|value| value.strip_prefix("Bearer "))
}
async fn authenticate(state: &AppState, token: &str) -> Result<Scope, ApiError> {
if token == state.bootstrap_token.as_ref() {
return Ok(Scope::Admin);
}
let prefix = parse_prefix(token).ok_or_else(|| ApiError::unauthorized("invalid token format"))?;
let maybe_row = sqlx::query(
r#"
SELECT id, token_hash, scope
FROM api_tokens
WHERE prefix = $1 AND revoked_at IS NULL AND (expires_at IS NULL OR expires_at > NOW())
"#,
)
.bind(prefix)
.fetch_optional(&state.pool)
.await?;
let row = maybe_row.ok_or_else(|| ApiError::unauthorized("invalid token"))?;
let token_hash: String = row.try_get("token_hash").map_err(|_| ApiError::unauthorized("invalid token"))?;
let parsed_hash = PasswordHash::new(&token_hash).map_err(|_| ApiError::unauthorized("invalid token"))?;
Argon2::default()
.verify_password(token.as_bytes(), &parsed_hash)
.map_err(|_| ApiError::unauthorized("invalid token"))?;
let token_id: uuid::Uuid = row.try_get("id").map_err(|_| ApiError::unauthorized("invalid token"))?;
sqlx::query("UPDATE api_tokens SET last_used_at = $1 WHERE id = $2")
.bind(Utc::now())
.bind(token_id)
.execute(&state.pool)
.await?;
let scope: String = row.try_get("scope").map_err(|_| ApiError::unauthorized("invalid token"))?;
match scope.as_str() {
"admin" => Ok(Scope::Admin),
"read" => Ok(Scope::Read),
_ => Err(ApiError::unauthorized("invalid token scope")),
}
}
fn parse_prefix(token: &str) -> Option<&str> {
let mut parts = token.split('_');
let namespace = parts.next()?;
let prefix = parts.next()?;
let secret = parts.next()?;
if namespace != "stl" || secret.is_empty() || prefix.len() < 6 {
return None;
}
Some(prefix)
}

62
apps/api/src/error.rs Normal file
View File

@@ -0,0 +1,62 @@
use axum::{http::StatusCode, response::{IntoResponse, Response}, Json};
use serde::Serialize;
#[derive(Debug)]
pub struct ApiError {
pub status: StatusCode,
pub message: String,
}
#[derive(Serialize)]
struct ErrorBody<'a> {
error: &'a str,
}
impl ApiError {
pub fn bad_request(message: impl Into<String>) -> Self {
Self {
status: StatusCode::BAD_REQUEST,
message: message.into(),
}
}
pub fn unauthorized(message: impl Into<String>) -> Self {
Self {
status: StatusCode::UNAUTHORIZED,
message: message.into(),
}
}
pub fn forbidden(message: impl Into<String>) -> Self {
Self {
status: StatusCode::FORBIDDEN,
message: message.into(),
}
}
pub fn not_found(message: impl Into<String>) -> Self {
Self {
status: StatusCode::NOT_FOUND,
message: message.into(),
}
}
pub fn internal(message: impl Into<String>) -> Self {
Self {
status: StatusCode::INTERNAL_SERVER_ERROR,
message: message.into(),
}
}
}
impl IntoResponse for ApiError {
fn into_response(self) -> Response {
(self.status, Json(ErrorBody { error: &self.message })).into_response()
}
}
impl From<sqlx::Error> for ApiError {
fn from(err: sqlx::Error) -> Self {
Self::internal(format!("database error: {err}"))
}
}

101
apps/api/src/libraries.rs Normal file
View File

@@ -0,0 +1,101 @@
use std::path::{Path, PathBuf};
use axum::{extract::{Path as AxumPath, State}, Json};
use serde::{Deserialize, Serialize};
use sqlx::Row;
use uuid::Uuid;
use crate::{error::ApiError, AppState};
#[derive(Serialize)]
pub struct LibraryDto {
pub id: Uuid,
pub name: String,
pub root_path: String,
pub enabled: bool,
}
#[derive(Deserialize)]
pub struct CreateLibraryInput {
pub name: String,
pub root_path: String,
}
pub async fn list_libraries(State(state): State<AppState>) -> Result<Json<Vec<LibraryDto>>, ApiError> {
let rows = sqlx::query("SELECT id, name, root_path, enabled FROM libraries ORDER BY created_at DESC")
.fetch_all(&state.pool)
.await?;
let items = rows
.into_iter()
.map(|row| LibraryDto {
id: row.get("id"),
name: row.get("name"),
root_path: row.get("root_path"),
enabled: row.get("enabled"),
})
.collect();
Ok(Json(items))
}
pub async fn create_library(
State(state): State<AppState>,
Json(input): Json<CreateLibraryInput>,
) -> Result<Json<LibraryDto>, ApiError> {
if input.name.trim().is_empty() {
return Err(ApiError::bad_request("name is required"));
}
let canonical = canonicalize_library_root(&input.root_path)?;
let id = Uuid::new_v4();
let root_path = canonical.to_string_lossy().to_string();
sqlx::query(
"INSERT INTO libraries (id, name, root_path, enabled) VALUES ($1, $2, $3, TRUE)",
)
.bind(id)
.bind(input.name.trim())
.bind(&root_path)
.execute(&state.pool)
.await?;
Ok(Json(LibraryDto {
id,
name: input.name.trim().to_string(),
root_path,
enabled: true,
}))
}
pub async fn delete_library(
State(state): State<AppState>,
AxumPath(id): AxumPath<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let result = sqlx::query("DELETE FROM libraries WHERE id = $1")
.bind(id)
.execute(&state.pool)
.await?;
if result.rows_affected() == 0 {
return Err(ApiError::not_found("library not found"));
}
Ok(Json(serde_json::json!({"deleted": true, "id": id})))
}
fn canonicalize_library_root(root_path: &str) -> Result<PathBuf, ApiError> {
let path = Path::new(root_path);
if !path.is_absolute() {
return Err(ApiError::bad_request("root_path must be absolute"));
}
let canonical = std::fs::canonicalize(path)
.map_err(|_| ApiError::bad_request("root_path does not exist or is inaccessible"))?;
if !canonical.is_dir() {
return Err(ApiError::bad_request("root_path must point to a directory"));
}
Ok(canonical)
}

58
apps/api/src/main.rs Normal file
View File

@@ -0,0 +1,58 @@
mod auth;
mod error;
mod libraries;
mod tokens;
use std::sync::Arc;
use axum::{middleware, routing::{delete, get}, Router};
use stripstream_core::config::ApiConfig;
use sqlx::postgres::PgPoolOptions;
use tracing::info;
#[derive(Clone)]
struct AppState {
pool: sqlx::PgPool,
bootstrap_token: Arc<str>,
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
tracing_subscriber::fmt()
.with_env_filter(
std::env::var("RUST_LOG").unwrap_or_else(|_| "api=info,axum=info".to_string()),
)
.init();
let config = ApiConfig::from_env()?;
let pool = PgPoolOptions::new()
.max_connections(10)
.connect(&config.database_url)
.await?;
let state = AppState {
pool,
bootstrap_token: Arc::from(config.api_bootstrap_token),
};
let protected = Router::new()
.route("/libraries", get(libraries::list_libraries).post(libraries::create_library))
.route("/libraries/:id", delete(libraries::delete_library))
.route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token))
.route("/admin/tokens/:id", delete(tokens::revoke_token))
.layer(middleware::from_fn_with_state(state.clone(), auth::require_admin));
let app = Router::new()
.route("/health", get(health))
.merge(protected)
.with_state(state);
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
info!(addr = %config.listen_addr, "api listening");
axum::serve(listener, app).await?;
Ok(())
}
async fn health() -> &'static str {
"ok"
}

122
apps/api/src/tokens.rs Normal file
View File

@@ -0,0 +1,122 @@
use argon2::{password_hash::SaltString, Argon2, PasswordHasher};
use axum::{extract::{Path, State}, Json};
use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine};
use chrono::{DateTime, Utc};
use rand::{rngs::OsRng, RngCore};
use serde::{Deserialize, Serialize};
use sqlx::Row;
use uuid::Uuid;
use crate::{error::ApiError, AppState};
#[derive(Deserialize)]
pub struct CreateTokenInput {
pub name: String,
pub scope: Option<String>,
}
#[derive(Serialize)]
pub struct CreatedToken {
pub id: Uuid,
pub name: String,
pub scope: String,
pub token: String,
pub prefix: String,
}
#[derive(Serialize)]
pub struct TokenItem {
pub id: Uuid,
pub name: String,
pub scope: String,
pub prefix: String,
pub last_used_at: Option<DateTime<Utc>>,
pub revoked_at: Option<DateTime<Utc>>,
pub created_at: DateTime<Utc>,
}
pub async fn create_token(
State(state): State<AppState>,
Json(input): Json<CreateTokenInput>,
) -> Result<Json<CreatedToken>, ApiError> {
if input.name.trim().is_empty() {
return Err(ApiError::bad_request("name is required"));
}
let scope = match input.scope.as_deref().unwrap_or("read") {
"admin" => "admin",
"read" => "read",
_ => return Err(ApiError::bad_request("scope must be 'admin' or 'read'")),
};
let mut random = [0u8; 24];
OsRng.fill_bytes(&mut random);
let secret = URL_SAFE_NO_PAD.encode(random);
let prefix: String = secret.chars().take(8).collect();
let token = format!("stl_{prefix}_{secret}");
let salt = SaltString::generate(&mut argon2::password_hash::rand_core::OsRng);
let token_hash = Argon2::default()
.hash_password(token.as_bytes(), &salt)
.map_err(|_| ApiError::internal("failed to hash token"))?
.to_string();
let id = Uuid::new_v4();
sqlx::query(
"INSERT INTO api_tokens (id, name, prefix, token_hash, scope) VALUES ($1, $2, $3, $4, $5)",
)
.bind(id)
.bind(input.name.trim())
.bind(&prefix)
.bind(token_hash)
.bind(scope)
.execute(&state.pool)
.await?;
Ok(Json(CreatedToken {
id,
name: input.name.trim().to_string(),
scope: scope.to_string(),
token,
prefix,
}))
}
pub async fn list_tokens(State(state): State<AppState>) -> Result<Json<Vec<TokenItem>>, ApiError> {
let rows = sqlx::query(
"SELECT id, name, scope, prefix, last_used_at, revoked_at, created_at FROM api_tokens ORDER BY created_at DESC",
)
.fetch_all(&state.pool)
.await?;
let items = rows
.into_iter()
.map(|row| TokenItem {
id: row.get("id"),
name: row.get("name"),
scope: row.get("scope"),
prefix: row.get("prefix"),
last_used_at: row.get("last_used_at"),
revoked_at: row.get("revoked_at"),
created_at: row.get("created_at"),
})
.collect();
Ok(Json(items))
}
pub async fn revoke_token(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let result = sqlx::query("UPDATE api_tokens SET revoked_at = NOW() WHERE id = $1 AND revoked_at IS NULL")
.bind(id)
.execute(&state.pool)
.await?;
if result.rows_affected() == 0 {
return Err(ApiError::not_found("token not found"));
}
Ok(Json(serde_json::json!({"revoked": true, "id": id})))
}

13
apps/indexer/Cargo.toml Normal file
View File

@@ -0,0 +1,13 @@
[package]
name = "indexer"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
anyhow.workspace = true
axum.workspace = true
stripstream-core = { path = "../../crates/core" }
tokio.workspace = true
tracing.workspace = true
tracing-subscriber.workspace = true

22
apps/indexer/Dockerfile Normal file
View File

@@ -0,0 +1,22 @@
FROM rust:1-bookworm AS builder
WORKDIR /app
COPY Cargo.toml ./
COPY apps/api/Cargo.toml apps/api/Cargo.toml
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
COPY apps/admin-ui/Cargo.toml apps/admin-ui/Cargo.toml
COPY crates/core/Cargo.toml crates/core/Cargo.toml
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
COPY apps/api/src apps/api/src
COPY apps/indexer/src apps/indexer/src
COPY apps/admin-ui/src apps/admin-ui/src
COPY crates/core/src crates/core/src
COPY crates/parsers/src crates/parsers/src
RUN cargo build --release -p indexer
FROM debian:bookworm-slim
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates wget unrar-free && rm -rf /var/lib/apt/lists/*
COPY --from=builder /app/target/release/indexer /usr/local/bin/indexer
EXPOSE 8081
CMD ["/usr/local/bin/indexer"]

24
apps/indexer/src/main.rs Normal file
View File

@@ -0,0 +1,24 @@
use axum::{routing::get, Router};
use stripstream_core::config::IndexerConfig;
use tracing::info;
#[tokio::main]
async fn main() -> anyhow::Result<()> {
tracing_subscriber::fmt()
.with_env_filter(
std::env::var("RUST_LOG").unwrap_or_else(|_| "indexer=info,axum=info".to_string()),
)
.init();
let config = IndexerConfig::from_env();
let app = Router::new().route("/health", get(health));
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
info!(addr = %config.listen_addr, "indexer listening");
axum::serve(listener, app).await?;
Ok(())
}
async fn health() -> &'static str {
"ok"
}

9
crates/core/Cargo.toml Normal file
View File

@@ -0,0 +1,9 @@
[package]
name = "stripstream-core"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
anyhow.workspace = true
serde.workspace = true

47
crates/core/src/config.rs Normal file
View File

@@ -0,0 +1,47 @@
use anyhow::{Context, Result};
#[derive(Debug, Clone)]
pub struct ApiConfig {
pub listen_addr: String,
pub database_url: String,
pub api_bootstrap_token: String,
}
impl ApiConfig {
pub fn from_env() -> Result<Self> {
Ok(Self {
listen_addr: std::env::var("API_LISTEN_ADDR").unwrap_or_else(|_| "0.0.0.0:8080".to_string()),
database_url: std::env::var("DATABASE_URL").context("DATABASE_URL is required")?,
api_bootstrap_token: std::env::var("API_BOOTSTRAP_TOKEN")
.context("API_BOOTSTRAP_TOKEN is required")?,
})
}
}
#[derive(Debug, Clone)]
pub struct IndexerConfig {
pub listen_addr: String,
}
impl IndexerConfig {
pub fn from_env() -> Self {
Self {
listen_addr: std::env::var("INDEXER_LISTEN_ADDR")
.unwrap_or_else(|_| "0.0.0.0:8081".to_string()),
}
}
}
#[derive(Debug, Clone)]
pub struct AdminUiConfig {
pub listen_addr: String,
}
impl AdminUiConfig {
pub fn from_env() -> Self {
Self {
listen_addr: std::env::var("ADMIN_UI_LISTEN_ADDR")
.unwrap_or_else(|_| "0.0.0.0:8082".to_string()),
}
}
}

1
crates/core/src/lib.rs Normal file
View File

@@ -0,0 +1 @@
pub mod config;

View File

@@ -0,0 +1,8 @@
[package]
name = "parsers"
version.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
anyhow.workspace = true

View File

@@ -0,0 +1,3 @@
pub fn supported_formats() -> &'static [&'static str] {
&["cbz", "cbr", "pdf"]
}

89
infra/docker-compose.yml Normal file
View File

@@ -0,0 +1,89 @@
services:
postgres:
image: postgres:16-alpine
environment:
POSTGRES_DB: stripstream
POSTGRES_USER: stripstream
POSTGRES_PASSWORD: stripstream
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U stripstream -d stripstream"]
interval: 10s
timeout: 5s
retries: 5
meilisearch:
image: getmeili/meilisearch:v1.12
environment:
MEILI_MASTER_KEY: ${MEILI_MASTER_KEY:-change-me}
ports:
- "7700:7700"
volumes:
- meili_data:/meili_data
healthcheck:
test: ["CMD", "wget", "-q", "-O", "-", "http://127.0.0.1:7700/health"]
interval: 10s
timeout: 5s
retries: 5
api:
build:
context: ..
dockerfile: apps/api/Dockerfile
env_file:
- ../.env
ports:
- "8080:8080"
depends_on:
postgres:
condition: service_healthy
meilisearch:
condition: service_healthy
healthcheck:
test: ["CMD", "wget", "-q", "-O", "-", "http://127.0.0.1:8080/health"]
interval: 10s
timeout: 5s
retries: 5
indexer:
build:
context: ..
dockerfile: apps/indexer/Dockerfile
env_file:
- ../.env
ports:
- "8081:8081"
depends_on:
postgres:
condition: service_healthy
meilisearch:
condition: service_healthy
healthcheck:
test: ["CMD", "wget", "-q", "-O", "-", "http://127.0.0.1:8081/health"]
interval: 10s
timeout: 5s
retries: 5
admin-ui:
build:
context: ..
dockerfile: apps/admin-ui/Dockerfile
env_file:
- ../.env
ports:
- "8082:8082"
depends_on:
api:
condition: service_healthy
healthcheck:
test: ["CMD", "wget", "-q", "-O", "-", "http://127.0.0.1:8082/health"]
interval: 10s
timeout: 5s
retries: 5
volumes:
postgres_data:
meili_data:

View File

@@ -0,0 +1,67 @@
CREATE TABLE IF NOT EXISTS libraries (
id UUID PRIMARY KEY,
name TEXT NOT NULL,
root_path TEXT NOT NULL UNIQUE,
enabled BOOLEAN NOT NULL DEFAULT TRUE,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE TABLE IF NOT EXISTS books (
id UUID PRIMARY KEY,
library_id UUID NOT NULL REFERENCES libraries(id) ON DELETE CASCADE,
kind TEXT NOT NULL CHECK (kind IN ('ebook', 'comic', 'bd')),
title TEXT NOT NULL,
author TEXT,
series TEXT,
volume TEXT,
language TEXT,
page_count INT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE TABLE IF NOT EXISTS book_files (
id UUID PRIMARY KEY,
book_id UUID NOT NULL REFERENCES books(id) ON DELETE CASCADE,
format TEXT NOT NULL CHECK (format IN ('pdf', 'cbz', 'cbr')),
abs_path TEXT NOT NULL UNIQUE,
size_bytes BIGINT NOT NULL,
mtime TIMESTAMPTZ NOT NULL,
fingerprint TEXT NOT NULL,
checksum_opt TEXT,
parse_status TEXT NOT NULL DEFAULT 'pending' CHECK (parse_status IN ('pending', 'ok', 'error')),
parse_error_opt TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE TABLE IF NOT EXISTS index_jobs (
id UUID PRIMARY KEY,
library_id UUID REFERENCES libraries(id) ON DELETE SET NULL,
type TEXT NOT NULL CHECK (type IN ('scan', 'rebuild')),
status TEXT NOT NULL CHECK (status IN ('pending', 'running', 'success', 'failed')),
started_at TIMESTAMPTZ,
finished_at TIMESTAMPTZ,
stats_json JSONB,
error_opt TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE TABLE IF NOT EXISTS api_tokens (
id UUID PRIMARY KEY,
name TEXT NOT NULL,
prefix TEXT NOT NULL UNIQUE,
token_hash TEXT NOT NULL,
scope TEXT NOT NULL CHECK (scope IN ('admin', 'read')),
last_used_at TIMESTAMPTZ,
revoked_at TIMESTAMPTZ,
expires_at TIMESTAMPTZ,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_books_library_id ON books(library_id);
CREATE INDEX IF NOT EXISTS idx_book_files_book_id ON book_files(book_id);
CREATE INDEX IF NOT EXISTS idx_book_files_parse_status ON book_files(parse_status);
CREATE INDEX IF NOT EXISTS idx_index_jobs_status ON index_jobs(status);
CREATE INDEX IF NOT EXISTS idx_api_tokens_scope ON api_tokens(scope);