diff --git a/apps/api/src/auth.rs b/apps/api/src/auth.rs index 8e680f8..afcf28c 100644 --- a/apps/api/src/auth.rs +++ b/apps/api/src/auth.rs @@ -10,10 +10,15 @@ use sqlx::Row; use crate::{error::ApiError, state::AppState}; +#[derive(Clone, Debug)] +pub struct AuthUser { + pub user_id: uuid::Uuid, +} + #[derive(Clone, Debug)] pub enum Scope { Admin, - Read, + Read { user_id: uuid::Uuid }, } pub async fn require_admin( @@ -40,6 +45,20 @@ pub async fn require_read( let token = bearer_token(&req).ok_or_else(|| ApiError::unauthorized("missing bearer token"))?; let scope = authenticate(&state, token).await?; + if let Scope::Read { user_id } = &scope { + req.extensions_mut().insert(AuthUser { user_id: *user_id }); + } else if matches!(scope, Scope::Admin) { + // Admin peut s'impersonifier via le header X-As-User + if let Some(as_user_id) = req + .headers() + .get("X-As-User") + .and_then(|v| v.to_str().ok()) + .and_then(|v| uuid::Uuid::parse_str(v).ok()) + { + req.extensions_mut().insert(AuthUser { user_id: as_user_id }); + } + } + req.extensions_mut().insert(scope); Ok(next.run(req).await) } @@ -60,8 +79,7 @@ async fn authenticate(state: &AppState, token: &str) -> Result let maybe_row = sqlx::query( r#" - SELECT id, token_hash, scope - FROM api_tokens + SELECT id, token_hash, scope, user_id FROM api_tokens WHERE prefix = $1 AND revoked_at IS NULL AND (expires_at IS NULL OR expires_at > NOW()) "#, ) @@ -88,7 +106,12 @@ async fn authenticate(state: &AppState, token: &str) -> Result let scope: String = row.try_get("scope").map_err(|_| ApiError::unauthorized("invalid token"))?; match scope.as_str() { "admin" => Ok(Scope::Admin), - "read" => Ok(Scope::Read), + "read" => { + let user_id: uuid::Uuid = row + .try_get("user_id") + .map_err(|_| ApiError::unauthorized("read token missing user_id"))?; + Ok(Scope::Read { user_id }) + } _ => Err(ApiError::unauthorized("invalid token scope")), } } diff --git a/apps/api/src/books.rs b/apps/api/src/books.rs index 93d4c1e..31bc0f8 100644 --- a/apps/api/src/books.rs +++ b/apps/api/src/books.rs @@ -1,11 +1,11 @@ -use axum::{extract::{Path, Query, State}, Json}; +use axum::{extract::{Extension, Path, Query, State}, Json}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use sqlx::Row; use uuid::Uuid; use utoipa::ToSchema; -use crate::{error::ApiError, index_jobs::IndexJobResponse, state::AppState}; +use crate::{auth::AuthUser, error::ApiError, index_jobs::IndexJobResponse, state::AppState}; #[derive(Deserialize, ToSchema)] pub struct ListBooksQuery { @@ -122,7 +122,9 @@ pub struct BookDetails { pub async fn list_books( State(state): State, Query(query): Query, + user: Option>, ) -> Result, ApiError> { + let user_id: Option = user.map(|u| u.0.user_id); let limit = query.limit.unwrap_or(50).clamp(1, 200); let page = query.page.unwrap_or(1).max(1); let offset = (page - 1) * limit; @@ -151,6 +153,8 @@ pub async fn list_books( Some(_) => { p += 1; format!("AND eml.provider = ${p}") }, None => String::new(), }; + p += 1; + let uid_p = p; let metadata_links_cte = r#" metadata_links AS ( @@ -164,7 +168,7 @@ pub async fn list_books( let count_sql = format!( r#"WITH {metadata_links_cte} SELECT COUNT(*) FROM books b - LEFT JOIN book_reading_progress brp ON brp.book_id = b.id + LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p} LEFT JOIN metadata_links eml ON eml.series_name = b.series AND eml.library_id = b.library_id WHERE ($1::uuid IS NULL OR b.library_id = $1) AND ($2::text IS NULL OR b.kind = $2) @@ -192,7 +196,7 @@ pub async fn list_books( brp.current_page AS reading_current_page, brp.last_read_at AS reading_last_read_at FROM books b - LEFT JOIN book_reading_progress brp ON brp.book_id = b.id + LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p} LEFT JOIN metadata_links eml ON eml.series_name = b.series AND eml.library_id = b.library_id WHERE ($1::uuid IS NULL OR b.library_id = $1) AND ($2::text IS NULL OR b.kind = $2) @@ -235,8 +239,8 @@ pub async fn list_books( data_builder = data_builder.bind(mp.clone()); } } - - data_builder = data_builder.bind(limit).bind(offset); + count_builder = count_builder.bind(user_id); + data_builder = data_builder.bind(user_id).bind(limit).bind(offset); let (count_row, rows) = tokio::try_join!( count_builder.fetch_one(&state.pool), @@ -295,7 +299,9 @@ pub async fn list_books( pub async fn get_book( State(state): State, Path(id): Path, + user: Option>, ) -> Result, ApiError> { + let user_id: Option = user.map(|u| u.0.user_id); let row = sqlx::query( r#" SELECT b.id, b.library_id, b.kind, b.title, b.author, b.authors, b.series, b.volume, b.language, b.page_count, b.thumbnail_path, b.locked_fields, b.summary, b.isbn, b.publish_date, @@ -311,11 +317,12 @@ pub async fn get_book( ORDER BY updated_at DESC LIMIT 1 ) bf ON TRUE - LEFT JOIN book_reading_progress brp ON brp.book_id = b.id + LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2 WHERE b.id = $1 "#, ) .bind(id) + .bind(user_id) .fetch_optional(&state.pool) .await?; @@ -521,9 +528,9 @@ pub async fn update_book( WHERE id = $1 RETURNING id, library_id, kind, title, author, authors, series, volume, language, page_count, thumbnail_path, summary, isbn, publish_date, - COALESCE((SELECT status FROM book_reading_progress WHERE book_id = $1), 'unread') AS reading_status, - (SELECT current_page FROM book_reading_progress WHERE book_id = $1) AS reading_current_page, - (SELECT last_read_at FROM book_reading_progress WHERE book_id = $1) AS reading_last_read_at + 'unread' AS reading_status, + NULL::integer AS reading_current_page, + NULL::timestamptz AS reading_last_read_at "#, ) .bind(id) diff --git a/apps/api/src/komga.rs b/apps/api/src/komga.rs index abeba93..4490e0d 100644 --- a/apps/api/src/komga.rs +++ b/apps/api/src/komga.rs @@ -38,6 +38,8 @@ pub struct KomgaSyncRequest { pub url: String, pub username: String, pub password: String, + #[schema(value_type = String)] + pub user_id: Uuid, } #[derive(Serialize, ToSchema)] @@ -45,6 +47,8 @@ pub struct KomgaSyncResponse { #[schema(value_type = String)] pub id: Uuid, pub komga_url: String, + #[schema(value_type = Option)] + pub user_id: Option, pub total_komga_read: i64, pub matched: i64, pub already_read: i64, @@ -61,6 +65,8 @@ pub struct KomgaSyncReportSummary { #[schema(value_type = String)] pub id: Uuid, pub komga_url: String, + #[schema(value_type = Option)] + pub user_id: Option, pub total_komga_read: i64, pub matched: i64, pub already_read: i64, @@ -215,11 +221,12 @@ pub async fn sync_komga_read_books( let mut already_read_ids: std::collections::HashSet = std::collections::HashSet::new(); if !matched_ids.is_empty() { - // Get already-read book IDs + // Get already-read book IDs for this user let ar_rows = sqlx::query( - "SELECT book_id FROM book_reading_progress WHERE book_id = ANY($1) AND status = 'read'", + "SELECT book_id FROM book_reading_progress WHERE book_id = ANY($1) AND user_id = $2 AND status = 'read'", ) .bind(&matched_ids) + .bind(body.user_id) .fetch_all(&state.pool) .await?; @@ -228,12 +235,12 @@ pub async fn sync_komga_read_books( } already_read = already_read_ids.len() as i64; - // Bulk upsert all matched books as read + // Bulk upsert all matched books as read for this user sqlx::query( r#" - INSERT INTO book_reading_progress (book_id, status, current_page, last_read_at, updated_at) - SELECT unnest($1::uuid[]), 'read', NULL, NOW(), NOW() - ON CONFLICT (book_id) DO UPDATE + INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at) + SELECT unnest($1::uuid[]), $2, 'read', NULL, NOW(), NOW() + ON CONFLICT (book_id, user_id) DO UPDATE SET status = 'read', current_page = NULL, last_read_at = NOW(), @@ -242,6 +249,7 @@ pub async fn sync_komga_read_books( "#, ) .bind(&matched_ids) + .bind(body.user_id) .execute(&state.pool) .await?; } @@ -273,12 +281,13 @@ pub async fn sync_komga_read_books( let newly_marked_books_json = serde_json::to_value(&newly_marked_books).unwrap_or_default(); let report_row = sqlx::query( r#" - INSERT INTO komga_sync_reports (komga_url, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8) + INSERT INTO komga_sync_reports (komga_url, user_id, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING id, created_at "#, ) .bind(&url) + .bind(body.user_id) .bind(total_komga_read) .bind(matched) .bind(already_read) @@ -292,6 +301,7 @@ pub async fn sync_komga_read_books( Ok(Json(KomgaSyncResponse { id: report_row.get("id"), komga_url: url, + user_id: Some(body.user_id), total_komga_read, matched, already_read, @@ -319,7 +329,7 @@ pub async fn list_sync_reports( ) -> Result>, ApiError> { let rows = sqlx::query( r#" - SELECT id, komga_url, total_komga_read, matched, already_read, newly_marked, + SELECT id, komga_url, user_id, total_komga_read, matched, already_read, newly_marked, jsonb_array_length(unmatched) as unmatched_count, created_at FROM komga_sync_reports ORDER BY created_at DESC @@ -334,6 +344,7 @@ pub async fn list_sync_reports( .map(|row| KomgaSyncReportSummary { id: row.get("id"), komga_url: row.get("komga_url"), + user_id: row.get("user_id"), total_komga_read: row.get("total_komga_read"), matched: row.get("matched"), already_read: row.get("already_read"), @@ -365,7 +376,7 @@ pub async fn get_sync_report( ) -> Result, ApiError> { let row = sqlx::query( r#" - SELECT id, komga_url, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched, created_at + SELECT id, komga_url, user_id, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched, created_at FROM komga_sync_reports WHERE id = $1 "#, @@ -386,6 +397,7 @@ pub async fn get_sync_report( Ok(Json(KomgaSyncResponse { id: row.get("id"), komga_url: row.get("komga_url"), + user_id: row.get("user_id"), total_komga_read: row.get("total_komga_read"), matched: row.get("matched"), already_read: row.get("already_read"), diff --git a/apps/api/src/main.rs b/apps/api/src/main.rs index 180fa64..cd55e21 100644 --- a/apps/api/src/main.rs +++ b/apps/api/src/main.rs @@ -25,6 +25,7 @@ mod stats; mod telegram; mod thumbnails; mod tokens; +mod users; use std::sync::Arc; use std::time::Instant; @@ -106,8 +107,10 @@ async fn main() -> anyhow::Result<()> { .route("/index/jobs/:id/errors", get(index_jobs::get_job_errors)) .route("/index/cancel/:id", axum::routing::post(index_jobs::cancel_job)) .route("/folders", get(index_jobs::list_folders)) + .route("/admin/users", get(users::list_users).post(users::create_user)) + .route("/admin/users/:id", delete(users::delete_user).patch(users::update_user)) .route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token)) - .route("/admin/tokens/:id", delete(tokens::revoke_token)) + .route("/admin/tokens/:id", delete(tokens::revoke_token).patch(tokens::update_token)) .route("/admin/tokens/:id/delete", axum::routing::post(tokens::delete_token)) .route("/prowlarr/search", axum::routing::post(prowlarr::search_prowlarr)) .route("/prowlarr/test", get(prowlarr::test_prowlarr)) diff --git a/apps/api/src/reading_progress.rs b/apps/api/src/reading_progress.rs index 91014d4..8ccb738 100644 --- a/apps/api/src/reading_progress.rs +++ b/apps/api/src/reading_progress.rs @@ -1,11 +1,11 @@ -use axum::{extract::{Path, State}, Json}; +use axum::{extract::{Extension, Path, State}, Json}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use sqlx::Row; use uuid::Uuid; use utoipa::ToSchema; -use crate::{error::ApiError, state::AppState}; +use crate::{auth::AuthUser, error::ApiError, state::AppState}; #[derive(Serialize, ToSchema)] pub struct ReadingProgressResponse { @@ -42,8 +42,10 @@ pub struct UpdateReadingProgressRequest { )] pub async fn get_reading_progress( State(state): State, + user: Option>, Path(id): Path, ) -> Result, ApiError> { + let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0; // Verify book exists let exists: bool = sqlx::query_scalar("SELECT EXISTS(SELECT 1 FROM books WHERE id = $1)") .bind(id) @@ -55,9 +57,10 @@ pub async fn get_reading_progress( } let row = sqlx::query( - "SELECT status, current_page, last_read_at FROM book_reading_progress WHERE book_id = $1", + "SELECT status, current_page, last_read_at FROM book_reading_progress WHERE book_id = $1 AND user_id = $2", ) .bind(id) + .bind(auth_user.user_id) .fetch_optional(&state.pool) .await?; @@ -96,9 +99,11 @@ pub async fn get_reading_progress( )] pub async fn update_reading_progress( State(state): State, + user: Option>, Path(id): Path, Json(body): Json, ) -> Result, ApiError> { + let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0; // Validate status value if !["unread", "reading", "read"].contains(&body.status.as_str()) { return Err(ApiError::bad_request(format!( @@ -143,9 +148,9 @@ pub async fn update_reading_progress( let row = sqlx::query( r#" - INSERT INTO book_reading_progress (book_id, status, current_page, last_read_at, updated_at) - VALUES ($1, $2, $3, NOW(), NOW()) - ON CONFLICT (book_id) DO UPDATE + INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at) + VALUES ($1, $2, $3, $4, NOW(), NOW()) + ON CONFLICT (book_id, user_id) DO UPDATE SET status = EXCLUDED.status, current_page = EXCLUDED.current_page, last_read_at = NOW(), @@ -154,6 +159,7 @@ pub async fn update_reading_progress( "#, ) .bind(id) + .bind(auth_user.user_id) .bind(&body.status) .bind(current_page) .fetch_one(&state.pool) @@ -194,8 +200,10 @@ pub struct MarkSeriesReadResponse { )] pub async fn mark_series_read( State(state): State, + user: Option>, Json(body): Json, ) -> Result, ApiError> { + let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0; if !["read", "unread"].contains(&body.status.as_str()) { return Err(ApiError::bad_request( "status must be 'read' or 'unread'", @@ -209,24 +217,50 @@ pub async fn mark_series_read( }; let sql = if body.status == "unread" { - // Delete progress records to reset to unread + // Delete progress records to reset to unread (scoped to this user) + if body.series == "unclassified" { + format!( + r#" + WITH target_books AS ( + SELECT id FROM books WHERE {series_filter} + ) + DELETE FROM book_reading_progress + WHERE book_id IN (SELECT id FROM target_books) AND user_id = $1 + "# + ) + } else { + format!( + r#" + WITH target_books AS ( + SELECT id FROM books WHERE {series_filter} + ) + DELETE FROM book_reading_progress + WHERE book_id IN (SELECT id FROM target_books) AND user_id = $2 + "# + ) + } + } else if body.series == "unclassified" { format!( r#" - WITH target_books AS ( - SELECT id FROM books WHERE {series_filter} - ) - DELETE FROM book_reading_progress - WHERE book_id IN (SELECT id FROM target_books) + INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at) + SELECT id, $1, 'read', NULL, NOW(), NOW() + FROM books + WHERE {series_filter} + ON CONFLICT (book_id, user_id) DO UPDATE + SET status = 'read', + current_page = NULL, + last_read_at = NOW(), + updated_at = NOW() "# ) } else { format!( r#" - INSERT INTO book_reading_progress (book_id, status, current_page, last_read_at, updated_at) - SELECT id, 'read', NULL, NOW(), NOW() + INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at) + SELECT id, $2, 'read', NULL, NOW(), NOW() FROM books WHERE {series_filter} - ON CONFLICT (book_id) DO UPDATE + ON CONFLICT (book_id, user_id) DO UPDATE SET status = 'read', current_page = NULL, last_read_at = NOW(), @@ -236,9 +270,18 @@ pub async fn mark_series_read( }; let result = if body.series == "unclassified" { - sqlx::query(&sql).execute(&state.pool).await? + // $1 = user_id (no series bind needed) + sqlx::query(&sql) + .bind(auth_user.user_id) + .execute(&state.pool) + .await? } else { - sqlx::query(&sql).bind(&body.series).execute(&state.pool).await? + // $1 = series, $2 = user_id + sqlx::query(&sql) + .bind(&body.series) + .bind(auth_user.user_id) + .execute(&state.pool) + .await? }; Ok(Json(MarkSeriesReadResponse { diff --git a/apps/api/src/series.rs b/apps/api/src/series.rs index d7144fe..069c6e6 100644 --- a/apps/api/src/series.rs +++ b/apps/api/src/series.rs @@ -1,10 +1,11 @@ +use axum::extract::Extension; use axum::{extract::{Path, Query, State}, Json}; use serde::{Deserialize, Serialize}; use sqlx::Row; use uuid::Uuid; use utoipa::ToSchema; -use crate::{books::BookItem, error::ApiError, state::AppState}; +use crate::{auth::AuthUser, books::BookItem, error::ApiError, state::AppState}; #[derive(Serialize, ToSchema)] pub struct SeriesItem { @@ -70,9 +71,11 @@ pub struct ListSeriesQuery { )] pub async fn list_series( State(state): State, + user: Option>, Path(library_id): Path, Query(query): Query, ) -> Result, ApiError> { + let user_id: Option = user.map(|u| u.0.user_id); let limit = query.limit.unwrap_or(50).clamp(1, 200); let page = query.page.unwrap_or(1).max(1); let offset = (page - 1) * limit; @@ -115,6 +118,10 @@ pub async fn list_series( None => String::new(), }; + let user_id_p = p + 1; + let limit_p = p + 2; + let offset_p = p + 3; + let missing_cte = r#" missing_counts AS ( SELECT eml.series_name, @@ -147,7 +154,7 @@ pub async fn list_series( COUNT(*) as book_count, COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count FROM sorted_books sb - LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id + LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p} GROUP BY sb.name ), {missing_cte}, @@ -160,9 +167,6 @@ pub async fn list_series( "# ); - let limit_p = p + 1; - let offset_p = p + 2; - let data_sql = format!( r#" WITH sorted_books AS ( @@ -186,7 +190,7 @@ pub async fn list_series( COUNT(*) as book_count, COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count FROM sorted_books sb - LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id + LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p} GROUP BY sb.name ), {missing_cte}, @@ -245,7 +249,8 @@ pub async fn list_series( } } - data_builder = data_builder.bind(limit).bind(offset); + count_builder = count_builder.bind(user_id); + data_builder = data_builder.bind(user_id).bind(limit).bind(offset); let (count_row, rows) = tokio::try_join!( count_builder.fetch_one(&state.pool), @@ -327,8 +332,10 @@ pub struct ListAllSeriesQuery { )] pub async fn list_all_series( State(state): State, + user: Option>, Query(query): Query, ) -> Result, ApiError> { + let user_id: Option = user.map(|u| u.0.user_id); let limit = query.limit.unwrap_or(50).clamp(1, 200); let page = query.page.unwrap_or(1).max(1); let offset = (page - 1) * limit; @@ -415,6 +422,10 @@ pub async fn list_all_series( ) "#; + let user_id_p = p + 1; + let limit_p = p + 2; + let offset_p = p + 3; + let count_sql = format!( r#" WITH sorted_books AS ( @@ -426,7 +437,7 @@ pub async fn list_all_series( COUNT(*) as book_count, COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count FROM sorted_books sb - LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id + LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p} GROUP BY sb.name, sb.library_id ), {missing_cte}, @@ -445,9 +456,6 @@ pub async fn list_all_series( "REGEXP_REPLACE(LOWER(sc.name), '[0-9].*$', ''), COALESCE((REGEXP_MATCH(LOWER(sc.name), '\\d+'))[1]::int, 0), sc.name ASC".to_string() }; - let limit_p = p + 1; - let offset_p = p + 2; - let data_sql = format!( r#" WITH sorted_books AS ( @@ -475,7 +483,7 @@ pub async fn list_all_series( COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count, MAX(sb.updated_at) as latest_updated_at FROM sorted_books sb - LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id + LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p} GROUP BY sb.name, sb.library_id ), {missing_cte}, @@ -538,7 +546,8 @@ pub async fn list_all_series( data_builder = data_builder.bind(author.clone()); } - data_builder = data_builder.bind(limit).bind(offset); + count_builder = count_builder.bind(user_id); + data_builder = data_builder.bind(user_id).bind(limit).bind(offset); let (count_row, rows) = tokio::try_join!( count_builder.fetch_one(&state.pool), @@ -642,8 +651,10 @@ pub struct OngoingQuery { )] pub async fn ongoing_series( State(state): State, + user: Option>, Query(query): Query, ) -> Result>, ApiError> { + let user_id: Option = user.map(|u| u.0.user_id); let limit = query.limit.unwrap_or(10).clamp(1, 50); let rows = sqlx::query( @@ -655,7 +666,7 @@ pub async fn ongoing_series( COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read_count, MAX(brp.last_read_at) AS last_read_at FROM books b - LEFT JOIN book_reading_progress brp ON brp.book_id = b.id + LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2 GROUP BY COALESCE(NULLIF(b.series, ''), 'unclassified') HAVING ( COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0 @@ -685,6 +696,7 @@ pub async fn ongoing_series( "#, ) .bind(limit) + .bind(user_id) .fetch_all(&state.pool) .await?; @@ -721,8 +733,10 @@ pub async fn ongoing_series( )] pub async fn ongoing_books( State(state): State, + user: Option>, Query(query): Query, ) -> Result>, ApiError> { + let user_id: Option = user.map(|u| u.0.user_id); let limit = query.limit.unwrap_or(10).clamp(1, 50); let rows = sqlx::query( @@ -732,7 +746,7 @@ pub async fn ongoing_books( COALESCE(NULLIF(b.series, ''), 'unclassified') AS name, MAX(brp.last_read_at) AS series_last_read_at FROM books b - LEFT JOIN book_reading_progress brp ON brp.book_id = b.id + LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2 GROUP BY COALESCE(NULLIF(b.series, ''), 'unclassified') HAVING ( COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0 @@ -753,7 +767,7 @@ pub async fn ongoing_books( ) AS rn FROM books b JOIN ongoing_series os ON COALESCE(NULLIF(b.series, ''), 'unclassified') = os.name - LEFT JOIN book_reading_progress brp ON brp.book_id = b.id + LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2 WHERE COALESCE(brp.status, 'unread') != 'read' ) SELECT id, library_id, kind, format, title, author, authors, series, volume, language, page_count, @@ -765,6 +779,7 @@ pub async fn ongoing_books( "#, ) .bind(limit) + .bind(user_id) .fetch_all(&state.pool) .await?; diff --git a/apps/api/src/stats.rs b/apps/api/src/stats.rs index f9d0d76..9c58b8d 100644 --- a/apps/api/src/stats.rs +++ b/apps/api/src/stats.rs @@ -1,12 +1,12 @@ use axum::{ - extract::{Query, State}, + extract::{Extension, Query, State}, Json, }; use serde::{Deserialize, Serialize}; use sqlx::Row; use utoipa::{IntoParams, ToSchema}; -use crate::{error::ApiError, state::AppState}; +use crate::{auth::AuthUser, error::ApiError, state::AppState}; #[derive(Deserialize, IntoParams)] pub struct StatsQuery { @@ -90,6 +90,7 @@ pub struct CurrentlyReadingItem { pub series: Option, pub current_page: i32, pub page_count: i32, + pub username: Option, } #[derive(Serialize, ToSchema)] @@ -98,6 +99,7 @@ pub struct RecentlyReadItem { pub title: String, pub series: Option, pub last_read_at: String, + pub username: Option, } #[derive(Serialize, ToSchema)] @@ -106,6 +108,13 @@ pub struct MonthlyReading { pub books_read: i64, } +#[derive(Serialize, ToSchema)] +pub struct UserMonthlyReading { + pub month: String, + pub username: String, + pub books_read: i64, +} + #[derive(Serialize, ToSchema)] pub struct JobTimePoint { pub label: String, @@ -129,6 +138,7 @@ pub struct StatsResponse { pub additions_over_time: Vec, pub jobs_over_time: Vec, pub metadata: MetadataStats, + pub users_reading_over_time: Vec, } /// Get collection statistics for the dashboard @@ -146,7 +156,9 @@ pub struct StatsResponse { pub async fn get_stats( State(state): State, Query(query): Query, + user: Option>, ) -> Result, ApiError> { + let user_id: Option = user.map(|u| u.0.user_id); let period = query.period.as_deref().unwrap_or("month"); // Overview + reading status in one query let overview_row = sqlx::query( @@ -165,9 +177,10 @@ pub async fn get_stats( COUNT(*) FILTER (WHERE brp.status = 'reading') AS reading, COUNT(*) FILTER (WHERE brp.status = 'read') AS read FROM books b - LEFT JOIN book_reading_progress brp ON brp.book_id = b.id + LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1) "#, ) + .bind(user_id) .fetch_one(&state.pool) .await?; @@ -255,7 +268,7 @@ pub async fn get_stats( COUNT(*) FILTER (WHERE COALESCE(brp.status, 'unread') = 'unread') AS unread_count FROM libraries l LEFT JOIN books b ON b.library_id = l.id - LEFT JOIN book_reading_progress brp ON brp.book_id = b.id + LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1) LEFT JOIN LATERAL ( SELECT size_bytes FROM book_files WHERE book_id = b.id ORDER BY updated_at DESC LIMIT 1 ) bf ON TRUE @@ -263,6 +276,7 @@ pub async fn get_stats( ORDER BY book_count DESC "#, ) + .bind(user_id) .fetch_all(&state.pool) .await?; @@ -287,13 +301,14 @@ pub async fn get_stats( COUNT(*) FILTER (WHERE brp.status = 'read') AS read_count, COALESCE(SUM(b.page_count), 0)::BIGINT AS total_pages FROM books b - LEFT JOIN book_reading_progress brp ON brp.book_id = b.id + LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1) WHERE b.series IS NOT NULL AND b.series != '' GROUP BY b.series ORDER BY book_count DESC LIMIT 10 "#, ) + .bind(user_id) .fetch_all(&state.pool) .await?; @@ -432,14 +447,17 @@ pub async fn get_stats( // Currently reading books let reading_rows = sqlx::query( r#" - SELECT b.id AS book_id, b.title, b.series, brp.current_page, b.page_count + SELECT b.id AS book_id, b.title, b.series, brp.current_page, b.page_count, u.username FROM book_reading_progress brp JOIN books b ON b.id = brp.book_id + LEFT JOIN users u ON u.id = brp.user_id WHERE brp.status = 'reading' AND brp.current_page IS NOT NULL + AND ($1::uuid IS NULL OR brp.user_id = $1) ORDER BY brp.updated_at DESC LIMIT 20 "#, ) + .bind(user_id) .fetch_all(&state.pool) .await?; @@ -453,6 +471,7 @@ pub async fn get_stats( series: r.get("series"), current_page: r.get::, _>("current_page").unwrap_or(0), page_count: r.get::, _>("page_count").unwrap_or(0), + username: r.get("username"), } }) .collect(); @@ -461,14 +480,18 @@ pub async fn get_stats( let recent_rows = sqlx::query( r#" SELECT b.id AS book_id, b.title, b.series, - TO_CHAR(brp.last_read_at, 'YYYY-MM-DD') AS last_read_at + TO_CHAR(brp.last_read_at, 'YYYY-MM-DD') AS last_read_at, + u.username FROM book_reading_progress brp JOIN books b ON b.id = brp.book_id + LEFT JOIN users u ON u.id = brp.user_id WHERE brp.status = 'read' AND brp.last_read_at IS NOT NULL + AND ($1::uuid IS NULL OR brp.user_id = $1) ORDER BY brp.last_read_at DESC LIMIT 10 "#, ) + .bind(user_id) .fetch_all(&state.pool) .await?; @@ -481,6 +504,7 @@ pub async fn get_stats( title: r.get("title"), series: r.get("series"), last_read_at: r.get::, _>("last_read_at").unwrap_or_default(), + username: r.get("username"), } }) .collect(); @@ -499,11 +523,13 @@ pub async fn get_stats( FROM book_reading_progress brp WHERE brp.status = 'read' AND brp.last_read_at >= CURRENT_DATE - INTERVAL '6 days' + AND ($1::uuid IS NULL OR brp.user_id = $1) GROUP BY brp.last_read_at::date ) cnt ON cnt.dt = d.dt ORDER BY month ASC "#, ) + .bind(user_id) .fetch_all(&state.pool) .await? } @@ -523,11 +549,13 @@ pub async fn get_stats( FROM book_reading_progress brp WHERE brp.status = 'read' AND brp.last_read_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months') + AND ($1::uuid IS NULL OR brp.user_id = $1) GROUP BY DATE_TRUNC('week', brp.last_read_at) ) cnt ON cnt.dt = d.dt ORDER BY month ASC "#, ) + .bind(user_id) .fetch_all(&state.pool) .await? } @@ -547,11 +575,13 @@ pub async fn get_stats( FROM book_reading_progress brp WHERE brp.status = 'read' AND brp.last_read_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months' + AND ($1::uuid IS NULL OR brp.user_id = $1) GROUP BY DATE_TRUNC('month', brp.last_read_at) ) cnt ON cnt.dt = d.dt ORDER BY month ASC "#, ) + .bind(user_id) .fetch_all(&state.pool) .await? } @@ -565,6 +595,93 @@ pub async fn get_stats( }) .collect(); + // Per-user reading over time (admin view — always all users, no user_id filter) + let users_reading_time_rows = match period { + "day" => { + sqlx::query( + r#" + SELECT + TO_CHAR(d.dt, 'YYYY-MM-DD') AS month, + u.username, + COALESCE(cnt.books_read, 0) AS books_read + FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt) + CROSS JOIN users u + LEFT JOIN ( + SELECT brp.last_read_at::date AS dt, brp.user_id, COUNT(*) AS books_read + FROM book_reading_progress brp + WHERE brp.status = 'read' + AND brp.last_read_at >= CURRENT_DATE - INTERVAL '6 days' + GROUP BY brp.last_read_at::date, brp.user_id + ) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id + ORDER BY month ASC, u.username + "#, + ) + .fetch_all(&state.pool) + .await? + } + "week" => { + sqlx::query( + r#" + SELECT + TO_CHAR(d.dt, 'YYYY-MM-DD') AS month, + u.username, + COALESCE(cnt.books_read, 0) AS books_read + FROM generate_series( + DATE_TRUNC('week', NOW() - INTERVAL '2 months'), + DATE_TRUNC('week', NOW()), + '1 week' + ) AS d(dt) + CROSS JOIN users u + LEFT JOIN ( + SELECT DATE_TRUNC('week', brp.last_read_at) AS dt, brp.user_id, COUNT(*) AS books_read + FROM book_reading_progress brp + WHERE brp.status = 'read' + AND brp.last_read_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months') + GROUP BY DATE_TRUNC('week', brp.last_read_at), brp.user_id + ) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id + ORDER BY month ASC, u.username + "#, + ) + .fetch_all(&state.pool) + .await? + } + _ => { + sqlx::query( + r#" + SELECT + TO_CHAR(d.dt, 'YYYY-MM') AS month, + u.username, + COALESCE(cnt.books_read, 0) AS books_read + FROM generate_series( + DATE_TRUNC('month', NOW()) - INTERVAL '11 months', + DATE_TRUNC('month', NOW()), + '1 month' + ) AS d(dt) + CROSS JOIN users u + LEFT JOIN ( + SELECT DATE_TRUNC('month', brp.last_read_at) AS dt, brp.user_id, COUNT(*) AS books_read + FROM book_reading_progress brp + WHERE brp.status = 'read' + AND brp.last_read_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months' + GROUP BY DATE_TRUNC('month', brp.last_read_at), brp.user_id + ) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id + ORDER BY month ASC, u.username + "#, + ) + .fetch_all(&state.pool) + .await? + } + }; + + let users_reading_over_time: Vec = users_reading_time_rows + .iter() + .map(|r| UserMonthlyReading { + month: r.get::, _>("month").unwrap_or_default(), + username: r.get("username"), + books_read: r.get("books_read"), + }) + .collect(); + // Jobs over time (with gap filling, grouped by type category) let jobs_rows = match period { "day" => { @@ -697,5 +814,6 @@ pub async fn get_stats( additions_over_time, jobs_over_time, metadata, + users_reading_over_time, })) } diff --git a/apps/api/src/tokens.rs b/apps/api/src/tokens.rs index 6adbd2e..000498a 100644 --- a/apps/api/src/tokens.rs +++ b/apps/api/src/tokens.rs @@ -16,6 +16,8 @@ pub struct CreateTokenRequest { pub name: String, #[schema(value_type = Option, example = "read")] pub scope: Option, + #[schema(value_type = Option)] + pub user_id: Option, } #[derive(Serialize, ToSchema)] @@ -26,6 +28,9 @@ pub struct TokenResponse { pub scope: String, pub prefix: String, #[schema(value_type = Option)] + pub user_id: Option, + pub username: Option, + #[schema(value_type = Option)] pub last_used_at: Option>, #[schema(value_type = Option)] pub revoked_at: Option>, @@ -71,6 +76,10 @@ pub async fn create_token( _ => return Err(ApiError::bad_request("scope must be 'admin' or 'read'")), }; + if scope == "read" && input.user_id.is_none() { + return Err(ApiError::bad_request("user_id is required for read-scoped tokens")); + } + let mut random = [0u8; 24]; OsRng.fill_bytes(&mut random); let secret = URL_SAFE_NO_PAD.encode(random); @@ -85,13 +94,14 @@ pub async fn create_token( let id = Uuid::new_v4(); sqlx::query( - "INSERT INTO api_tokens (id, name, prefix, token_hash, scope) VALUES ($1, $2, $3, $4, $5)", + "INSERT INTO api_tokens (id, name, prefix, token_hash, scope, user_id) VALUES ($1, $2, $3, $4, $5, $6)", ) .bind(id) .bind(input.name.trim()) .bind(&prefix) .bind(token_hash) .bind(scope) + .bind(input.user_id) .execute(&state.pool) .await?; @@ -118,7 +128,13 @@ pub async fn create_token( )] pub async fn list_tokens(State(state): State) -> Result>, ApiError> { let rows = sqlx::query( - "SELECT id, name, scope, prefix, last_used_at, revoked_at, created_at FROM api_tokens ORDER BY created_at DESC", + r#" + SELECT t.id, t.name, t.scope, t.prefix, t.user_id, u.username, + t.last_used_at, t.revoked_at, t.created_at + FROM api_tokens t + LEFT JOIN users u ON u.id = t.user_id + ORDER BY t.created_at DESC + "#, ) .fetch_all(&state.pool) .await?; @@ -130,6 +146,8 @@ pub async fn list_tokens(State(state): State) -> Result)] + pub user_id: Option, +} + +/// Update a token's assigned user +#[utoipa::path( + patch, + path = "/admin/tokens/{id}", + tag = "tokens", + params( + ("id" = String, Path, description = "Token UUID"), + ), + request_body = UpdateTokenRequest, + responses( + (status = 200, description = "Token updated"), + (status = 404, description = "Token not found"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden - Admin scope required"), + ), + security(("Bearer" = [])) +)] +pub async fn update_token( + State(state): State, + Path(id): Path, + Json(input): Json, +) -> Result, ApiError> { + let result = sqlx::query("UPDATE api_tokens SET user_id = $1 WHERE id = $2") + .bind(input.user_id) + .bind(id) + .execute(&state.pool) + .await?; + + if result.rows_affected() == 0 { + return Err(ApiError::not_found("token not found")); + } + + Ok(Json(serde_json::json!({"updated": true, "id": id}))) +} + /// Permanently delete a revoked API token #[utoipa::path( post, diff --git a/apps/api/src/users.rs b/apps/api/src/users.rs new file mode 100644 index 0000000..ab55df5 --- /dev/null +++ b/apps/api/src/users.rs @@ -0,0 +1,195 @@ +use axum::{extract::{Path, State}, Json}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::Row; +use uuid::Uuid; +use utoipa::ToSchema; + +use crate::{error::ApiError, state::AppState}; + +#[derive(Serialize, ToSchema)] +pub struct UserResponse { + #[schema(value_type = String)] + pub id: Uuid, + pub username: String, + pub token_count: i64, + pub books_read: i64, + pub books_reading: i64, + #[schema(value_type = String)] + pub created_at: DateTime, +} + +#[derive(Deserialize, ToSchema)] +pub struct CreateUserRequest { + pub username: String, +} + +/// List all reader users with their associated token count +#[utoipa::path( + get, + path = "/admin/users", + tag = "users", + responses( + (status = 200, body = Vec), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden - Admin scope required"), + ), + security(("Bearer" = [])) +)] +pub async fn list_users(State(state): State) -> Result>, ApiError> { + let rows = sqlx::query( + r#" + SELECT u.id, u.username, u.created_at, + COUNT(DISTINCT t.id) AS token_count, + COUNT(DISTINCT brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read, + COUNT(DISTINCT brp.book_id) FILTER (WHERE brp.status = 'reading') AS books_reading + FROM users u + LEFT JOIN api_tokens t ON t.user_id = u.id AND t.revoked_at IS NULL + LEFT JOIN book_reading_progress brp ON brp.user_id = u.id + GROUP BY u.id, u.username, u.created_at + ORDER BY u.created_at DESC + "#, + ) + .fetch_all(&state.pool) + .await?; + + let items = rows + .into_iter() + .map(|row| UserResponse { + id: row.get("id"), + username: row.get("username"), + token_count: row.get("token_count"), + books_read: row.get("books_read"), + books_reading: row.get("books_reading"), + created_at: row.get("created_at"), + }) + .collect(); + + Ok(Json(items)) +} + +/// Create a new reader user +#[utoipa::path( + post, + path = "/admin/users", + tag = "users", + request_body = CreateUserRequest, + responses( + (status = 200, body = UserResponse, description = "User created"), + (status = 400, description = "Invalid input"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden - Admin scope required"), + ), + security(("Bearer" = [])) +)] +pub async fn create_user( + State(state): State, + Json(input): Json, +) -> Result, ApiError> { + if input.username.trim().is_empty() { + return Err(ApiError::bad_request("username is required")); + } + + let id = Uuid::new_v4(); + let row = sqlx::query( + "INSERT INTO users (id, username) VALUES ($1, $2) RETURNING id, username, created_at", + ) + .bind(id) + .bind(input.username.trim()) + .fetch_one(&state.pool) + .await + .map_err(|e| { + if let sqlx::Error::Database(ref db_err) = e { + if db_err.constraint() == Some("users_username_key") { + return ApiError::bad_request("username already exists"); + } + } + ApiError::from(e) + })?; + + Ok(Json(UserResponse { + id: row.get("id"), + username: row.get("username"), + token_count: 0, + books_read: 0, + books_reading: 0, + created_at: row.get("created_at"), + })) +} + +/// Update a reader user's username +#[utoipa::path( + patch, + path = "/admin/users/{id}", + tag = "users", + request_body = CreateUserRequest, + responses( + (status = 200, body = UserResponse, description = "User updated"), + (status = 400, description = "Invalid input"), + (status = 404, description = "User not found"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden - Admin scope required"), + ), + security(("Bearer" = [])) +)] +pub async fn update_user( + State(state): State, + Path(id): Path, + Json(input): Json, +) -> Result, ApiError> { + if input.username.trim().is_empty() { + return Err(ApiError::bad_request("username is required")); + } + + let result = sqlx::query("UPDATE users SET username = $1 WHERE id = $2") + .bind(input.username.trim()) + .bind(id) + .execute(&state.pool) + .await + .map_err(|e| { + if let sqlx::Error::Database(ref db_err) = e { + if db_err.constraint() == Some("users_username_key") { + return ApiError::bad_request("username already exists"); + } + } + ApiError::from(e) + })?; + + if result.rows_affected() == 0 { + return Err(ApiError::not_found("user not found")); + } + + Ok(Json(serde_json::json!({"updated": true, "id": id}))) +} + +/// Delete a reader user (cascades on tokens and reading progress) +#[utoipa::path( + delete, + path = "/admin/users/{id}", + tag = "users", + params( + ("id" = String, Path, description = "User UUID"), + ), + responses( + (status = 200, description = "User deleted"), + (status = 404, description = "User not found"), + (status = 401, description = "Unauthorized"), + (status = 403, description = "Forbidden - Admin scope required"), + ), + security(("Bearer" = [])) +)] +pub async fn delete_user( + State(state): State, + Path(id): Path, +) -> Result, ApiError> { + let result = sqlx::query("DELETE FROM users WHERE id = $1") + .bind(id) + .execute(&state.pool) + .await?; + + if result.rows_affected() == 0 { + return Err(ApiError::not_found("user not found")); + } + + Ok(Json(serde_json::json!({"deleted": true, "id": id}))) +} diff --git a/apps/backoffice/app/(app)/layout.tsx b/apps/backoffice/app/(app)/layout.tsx index c657f63..915e84c 100644 --- a/apps/backoffice/app/(app)/layout.tsx +++ b/apps/backoffice/app/(app)/layout.tsx @@ -1,11 +1,15 @@ import Image from "next/image"; import Link from "next/link"; import type { ReactNode } from "react"; +import { cookies } from "next/headers"; +import { revalidatePath } from "next/cache"; import { ThemeToggle } from "@/app/theme-toggle"; import { JobsIndicator } from "@/app/components/JobsIndicator"; import { NavIcon, Icon } from "@/app/components/ui"; import { LogoutButton } from "@/app/components/LogoutButton"; import { MobileNav } from "@/app/components/MobileNav"; +import { UserSwitcher } from "@/app/components/UserSwitcher"; +import { fetchUsers } from "@/lib/api"; import { getServerTranslations } from "@/lib/i18n/server"; import type { TranslationKey } from "@/lib/i18n/fr"; @@ -27,6 +31,21 @@ const navItems: NavItem[] = [ export default async function AppLayout({ children }: { children: ReactNode }) { const { t } = await getServerTranslations(); + const cookieStore = await cookies(); + const activeUserId = cookieStore.get("as_user_id")?.value || null; + const users = await fetchUsers().catch(() => []); + + async function setActiveUserAction(formData: FormData) { + "use server"; + const userId = formData.get("user_id") as string; + const store = await cookies(); + if (userId) { + store.set("as_user_id", userId, { path: "/", httpOnly: false, sameSite: "lax" }); + } else { + store.delete("as_user_id"); + } + revalidatePath("/", "layout"); + } return ( <> @@ -39,7 +58,7 @@ export default async function AppLayout({ children }: { children: ReactNode }) { StripStream
StripStream - + {t("common.backoffice")}
@@ -50,16 +69,22 @@ export default async function AppLayout({ children }: { children: ReactNode }) { {navItems.map((item) => ( - {t(item.labelKey)} + {t(item.labelKey)} ))} + +
diff --git a/apps/backoffice/app/(app)/page.tsx b/apps/backoffice/app/(app)/page.tsx index cd70715..621da50 100644 --- a/apps/backoffice/app/(app)/page.tsx +++ b/apps/backoffice/app/(app)/page.tsx @@ -1,9 +1,9 @@ import React from "react"; -import { fetchStats, StatsResponse, getBookCoverUrl } from "@/lib/api"; +import { fetchStats, fetchUsers, StatsResponse, UserDto } from "@/lib/api"; import { Card, CardContent, CardHeader, CardTitle } from "@/app/components/ui"; import { RcDonutChart, RcBarChart, RcAreaChart, RcStackedBar, RcHorizontalBar, RcMultiLineChart } from "@/app/components/DashboardCharts"; import { PeriodToggle } from "@/app/components/PeriodToggle"; -import Image from "next/image"; +import { CurrentlyReadingList, RecentlyReadList } from "@/app/components/ReadingUserFilter"; import Link from "next/link"; import { getServerTranslations } from "@/lib/i18n/server"; import type { TranslateFunction } from "@/lib/i18n/dictionaries"; @@ -70,8 +70,12 @@ export default async function DashboardPage({ const { t, locale } = await getServerTranslations(); let stats: StatsResponse | null = null; + let users: UserDto[] = []; try { - stats = await fetchStats(period); + [stats, users] = await Promise.all([ + fetchStats(period), + fetchUsers().catch(() => []), + ]); } catch (e) { console.error("Failed to fetch stats:", e); } @@ -94,6 +98,7 @@ export default async function DashboardPage({ currently_reading = [], recently_read = [], reading_over_time = [], + users_reading_over_time = [], by_format, by_library, top_series, @@ -145,37 +150,12 @@ export default async function DashboardPage({ {t("dashboard.currentlyReading")} - {currently_reading.length === 0 ? ( -

{t("dashboard.noCurrentlyReading")}

- ) : ( -
- {currently_reading.slice(0, 8).map((book) => { - const pct = book.page_count > 0 ? Math.round((book.current_page / book.page_count) * 100) : 0; - return ( - - {book.title} -
-

{book.title}

- {book.series &&

{book.series}

} -
-
-
-
- {pct}% -
-

{t("dashboard.pageProgress", { current: book.current_page, total: book.page_count })}

-
- - ); - })} -
- )} + @@ -185,28 +165,11 @@ export default async function DashboardPage({ {t("dashboard.recentlyRead")} - {recently_read.length === 0 ? ( -

{t("dashboard.noRecentlyRead")}

- ) : ( -
- {recently_read.map((book) => ( - - {book.title} -
-

{book.title}

- {book.series &&

{book.series}

} -
- {book.last_read_at} - - ))} -
- )} +
@@ -219,30 +182,84 @@ export default async function DashboardPage({ - ({ label: formatChartLabel(m.month, period, locale), value: m.books_read }))} - color="hsl(142 60% 45%)" - /> + {(() => { + const userColors = [ + "hsl(142 60% 45%)", "hsl(198 78% 37%)", "hsl(45 93% 47%)", + "hsl(2 72% 48%)", "hsl(280 60% 50%)", "hsl(32 80% 50%)", + ]; + const usernames = [...new Set(users_reading_over_time.map(r => r.username))]; + if (usernames.length === 0) { + return ( + ({ label: formatChartLabel(m.month, period, locale), value: m.books_read }))} + color="hsl(142 60% 45%)" + /> + ); + } + // Pivot: { label, username1: n, username2: n, ... } + const byMonth = new Map>(); + for (const row of users_reading_over_time) { + const label = formatChartLabel(row.month, period, locale); + if (!byMonth.has(row.month)) byMonth.set(row.month, { label }); + byMonth.get(row.month)![row.username] = row.books_read; + } + const chartData = [...byMonth.values()]; + const lines = usernames.map((u, i) => ({ + key: u, + label: u, + color: userColors[i % userColors.length], + })); + return ; + })()} {/* Charts row */}
- {/* Reading status donut */} + {/* Reading status par lecteur */} {t("dashboard.readingStatus")} - + {users.length === 0 ? ( + + ) : ( +
+ {users.map((user) => { + const total = overview.total_books; + const read = user.books_read; + const reading = user.books_reading; + const unread = Math.max(0, total - read - reading); + const readPct = total > 0 ? (read / total) * 100 : 0; + const readingPct = total > 0 ? (reading / total) * 100 : 0; + return ( +
+
+ {user.username} + + {read} + {reading > 0 && · {reading}} + / {total} + +
+
+
+
+
+
+ ); + })} +
+ )} diff --git a/apps/backoffice/app/(app)/settings/SettingsPage.tsx b/apps/backoffice/app/(app)/settings/SettingsPage.tsx index 86cccf4..d378702 100644 --- a/apps/backoffice/app/(app)/settings/SettingsPage.tsx +++ b/apps/backoffice/app/(app)/settings/SettingsPage.tsx @@ -3,7 +3,7 @@ import { useState, useEffect, useCallback, useMemo } from "react"; import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, FormSelect, FormRow, Icon } from "@/app/components/ui"; import { ProviderIcon } from "@/app/components/ProviderIcon"; -import { Settings, CacheStats, ClearCacheResponse, ThumbnailStats, KomgaSyncResponse, KomgaSyncReportSummary, StatusMappingDto } from "@/lib/api"; +import { Settings, CacheStats, ClearCacheResponse, ThumbnailStats, KomgaSyncResponse, KomgaSyncReportSummary, StatusMappingDto, UserDto } from "@/lib/api"; import { useTranslation } from "@/lib/i18n/context"; import type { Locale } from "@/lib/i18n/types"; @@ -11,9 +11,10 @@ interface SettingsPageProps { initialSettings: Settings; initialCacheStats: CacheStats; initialThumbnailStats: ThumbnailStats; + users: UserDto[]; } -export default function SettingsPage({ initialSettings, initialCacheStats, initialThumbnailStats }: SettingsPageProps) { +export default function SettingsPage({ initialSettings, initialCacheStats, initialThumbnailStats, users }: SettingsPageProps) { const { t, locale, setLocale } = useTranslation(); const [settings, setSettings] = useState({ ...initialSettings, @@ -29,6 +30,7 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi const [komgaUrl, setKomgaUrl] = useState(""); const [komgaUsername, setKomgaUsername] = useState(""); const [komgaPassword, setKomgaPassword] = useState(""); + const [komgaUserId, setKomgaUserId] = useState(users[0]?.id ?? ""); const [isSyncing, setIsSyncing] = useState(false); const [syncResult, setSyncResult] = useState(null); const [syncError, setSyncError] = useState(null); @@ -104,6 +106,7 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi if (data) { if (data.url) setKomgaUrl(data.url); if (data.username) setKomgaUsername(data.username); + if (data.user_id) setKomgaUserId(data.user_id); } }).catch(() => {}); }, [fetchReports]); @@ -128,7 +131,7 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi const response = await fetch("/api/komga/sync", { method: "POST", headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ url: komgaUrl, username: komgaUsername, password: komgaPassword }), + body: JSON.stringify({ url: komgaUrl, username: komgaUsername, password: komgaPassword, user_id: komgaUserId }), }); const data = await response.json(); if (!response.ok) { @@ -140,7 +143,7 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi fetch("/api/settings/komga", { method: "POST", headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ value: { url: komgaUrl, username: komgaUsername } }), + body: JSON.stringify({ value: { url: komgaUrl, username: komgaUsername, user_id: komgaUserId } }), }).catch(() => {}); } } catch { @@ -627,9 +630,22 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi + {users.length > 0 && ( + + + + setKomgaUserId(e.target.value)}> + {users.map((u) => ( + + ))} + + + + )} + + + + + + + +
+ + + + + + + + + + + + + {/* Ligne admin synthétique */} + + + + + + + + + {/* Ligne tokens read non assignés */} + {(() => { + const unassigned = tokens.filter(tok => tok.scope === "read" && !tok.user_id && !tok.revoked_at); + if (unassigned.length === 0) return null; + return ( + + + + + + + + + ); + })()} + {users.map((user) => ( + + + + + + + + + ))} + +
{t("users.name")}{t("users.tokenCount")}{t("status.read")}{t("status.reading")}{t("users.createdAt")}{t("users.actions")}
+ {process.env.ADMIN_USERNAME ?? "admin"} + {t("tokens.scopeAdmin")} + + {tokens.filter(tok => tok.scope === "admin" && !tok.revoked_at).length} +
+ {t("tokens.noUser")} + {unassigned.length}
+ + {user.token_count} + {user.books_read > 0 + ? {user.books_read} + : } + + {user.books_reading > 0 + ? {user.books_reading} + : } + + {new Date(user.created_at).toLocaleDateString()} + +
+ + +
+
+
+
+ + {/* ── Tokens API ───────────────────────────────────────── */} +
+

{t("tokens.apiTokens")}

+
+ {params.created ? ( @@ -72,7 +219,7 @@ export default async function TokensPage({
- + @@ -80,6 +227,14 @@ export default async function TokensPage({ + + + + {users.map((user) => ( + + ))} + +
@@ -92,6 +247,7 @@ export default async function TokensPage({ {t("tokens.name")} + {t("tokens.user")} {t("tokens.scope")} {t("tokens.prefix")} {t("tokens.status")} @@ -102,6 +258,15 @@ export default async function TokensPage({ {tokens.map((token) => ( {token.name} + + + {token.scope} diff --git a/apps/backoffice/app/components/ReadingUserFilter.tsx b/apps/backoffice/app/components/ReadingUserFilter.tsx new file mode 100644 index 0000000..d134b79 --- /dev/null +++ b/apps/backoffice/app/components/ReadingUserFilter.tsx @@ -0,0 +1,145 @@ +"use client"; + +import { useState } from "react"; +import Link from "next/link"; +import Image from "next/image"; +import type { CurrentlyReadingItem, RecentlyReadItem } from "@/lib/api"; +import { getBookCoverUrl } from "@/lib/api"; + +function FilterPills({ usernames, selected, allLabel, onSelect }: { + usernames: string[]; + selected: string | null; + allLabel: string; + onSelect: (u: string | null) => void; +}) { + if (usernames.length <= 1) return null; + return ( +
+ + {usernames.map((u) => ( + + ))} +
+ ); +} + +export function CurrentlyReadingList({ + items, + allLabel, + emptyLabel, + pageProgressTemplate, +}: { + items: CurrentlyReadingItem[]; + allLabel: string; + emptyLabel: string; + /** Template with {{current}} and {{total}} placeholders */ + pageProgressTemplate: string; +}) { + const usernames = [...new Set(items.map((i) => i.username).filter((u): u is string => !!u))]; + const [selected, setSelected] = useState(null); + const filtered = selected ? items.filter((i) => i.username === selected) : items; + + return ( +
+ + {filtered.length === 0 ? ( +

{emptyLabel}

+ ) : ( +
+ {filtered.slice(0, 8).map((book) => { + const pct = book.page_count > 0 ? Math.round((book.current_page / book.page_count) * 100) : 0; + return ( + + {book.title} +
+

{book.title}

+ {book.series &&

{book.series}

} + {book.username && usernames.length > 1 && ( +

{book.username}

+ )} +
+
+
+
+ {pct}% +
+

{pageProgressTemplate.replace("{{current}}", String(book.current_page)).replace("{{total}}", String(book.page_count))}

+
+ + ); + })} +
+ )} +
+ ); +} + +export function RecentlyReadList({ + items, + allLabel, + emptyLabel, +}: { + items: RecentlyReadItem[]; + allLabel: string; + emptyLabel: string; +}) { + const usernames = [...new Set(items.map((i) => i.username).filter((u): u is string => !!u))]; + const [selected, setSelected] = useState(null); + const filtered = selected ? items.filter((i) => i.username === selected) : items; + + return ( +
+ + {filtered.length === 0 ? ( +

{emptyLabel}

+ ) : ( +
+ {filtered.map((book) => ( + + {book.title} +
+

{book.title}

+ {book.series &&

{book.series}

} + {book.username && usernames.length > 1 && ( +

{book.username}

+ )} +
+ {book.last_read_at} + + ))} +
+ )} +
+ ); +} diff --git a/apps/backoffice/app/components/TokenUserSelect.tsx b/apps/backoffice/app/components/TokenUserSelect.tsx new file mode 100644 index 0000000..cec40b0 --- /dev/null +++ b/apps/backoffice/app/components/TokenUserSelect.tsx @@ -0,0 +1,38 @@ +"use client"; + +import { useOptimistic, useTransition } from "react"; + +interface TokenUserSelectProps { + tokenId: string; + currentUserId?: string; + users: { id: string; username: string }[]; + action: (formData: FormData) => Promise; + noUserLabel: string; +} + +export function TokenUserSelect({ tokenId, currentUserId, users, action, noUserLabel }: TokenUserSelectProps) { + const [optimisticValue, setOptimisticValue] = useOptimistic(currentUserId ?? ""); + const [, startTransition] = useTransition(); + + return ( + + ); +} diff --git a/apps/backoffice/app/components/UserSwitcher.tsx b/apps/backoffice/app/components/UserSwitcher.tsx new file mode 100644 index 0000000..b77df17 --- /dev/null +++ b/apps/backoffice/app/components/UserSwitcher.tsx @@ -0,0 +1,121 @@ +"use client"; + +import { useState, useTransition, useRef, useEffect } from "react"; +import type { UserDto } from "@/lib/api"; + +export function UserSwitcher({ + users, + activeUserId, + setActiveUserAction, +}: { + users: UserDto[]; + activeUserId: string | null; + setActiveUserAction: (formData: FormData) => Promise; +}) { + const [open, setOpen] = useState(false); + const [, startTransition] = useTransition(); + const ref = useRef(null); + + const activeUser = users.find((u) => u.id === activeUserId) ?? null; + + useEffect(() => { + function handleClickOutside(e: MouseEvent) { + if (ref.current && !ref.current.contains(e.target as Node)) setOpen(false); + } + document.addEventListener("mousedown", handleClickOutside); + return () => document.removeEventListener("mousedown", handleClickOutside); + }, []); + + function select(userId: string | null) { + setOpen(false); + startTransition(async () => { + const fd = new FormData(); + fd.append("user_id", userId ?? ""); + await setActiveUserAction(fd); + }); + } + + if (users.length === 0) return null; + + const isImpersonating = activeUserId !== null; + + return ( +
+ + + {open && ( +
+ + +
+ + {users.map((user) => ( + + ))} +
+ )} +
+ ); +} diff --git a/apps/backoffice/app/components/UsernameEdit.tsx b/apps/backoffice/app/components/UsernameEdit.tsx new file mode 100644 index 0000000..067d2bb --- /dev/null +++ b/apps/backoffice/app/components/UsernameEdit.tsx @@ -0,0 +1,73 @@ +"use client"; + +import { useOptimistic, useTransition, useRef, useState } from "react"; + +export function UsernameEdit({ + userId, + currentUsername, + action, +}: { + userId: string; + currentUsername: string; + action: (formData: FormData) => Promise; +}) { + const [optimisticUsername, setOptimisticUsername] = useOptimistic(currentUsername); + const [editing, setEditing] = useState(false); + const [, startTransition] = useTransition(); + const inputRef = useRef(null); + + function startEdit() { + setEditing(true); + setTimeout(() => inputRef.current?.select(), 0); + } + + function submit(value: string) { + const trimmed = value.trim(); + if (!trimmed || trimmed === currentUsername) { + setEditing(false); + return; + } + setEditing(false); + startTransition(async () => { + setOptimisticUsername(trimmed); + const fd = new FormData(); + fd.append("id", userId); + fd.append("username", trimmed); + await action(fd); + }); + } + + if (editing) { + return ( + submit(e.target.value)} + onKeyDown={(e) => { + if (e.key === "Enter") submit((e.target as HTMLInputElement).value); + if (e.key === "Escape") setEditing(false); + }} + /> + ); + } + + return ( + + ); +} diff --git a/apps/backoffice/lib/api.ts b/apps/backoffice/lib/api.ts index 41a2ad9..f384da5 100644 --- a/apps/backoffice/lib/api.ts +++ b/apps/backoffice/lib/api.ts @@ -45,6 +45,17 @@ export type TokenDto = { scope: string; prefix: string; revoked_at: string | null; + user_id?: string; + username?: string; +}; + +export type UserDto = { + id: string; + username: string; + token_count: number; + books_read: number; + books_reading: number; + created_at: string; }; export type FolderItem = { @@ -151,6 +162,16 @@ export async function apiFetch( headers.set("Content-Type", "application/json"); } + // Impersonation : injecte X-As-User si un user est sélectionné dans le backoffice + try { + const { cookies } = await import("next/headers"); + const cookieStore = await cookies(); + const asUserId = cookieStore.get("as_user_id")?.value; + if (asUserId) headers.set("X-As-User", asUserId); + } catch { + // Hors contexte Next.js (tests, etc.) + } + const { next: nextOptions, ...restInit } = init ?? {}; const res = await fetch(`${baseUrl}${path}`, { @@ -268,10 +289,32 @@ export async function listTokens() { return apiFetch("/admin/tokens"); } -export async function createToken(name: string, scope: string) { +export async function createToken(name: string, scope: string, userId?: string) { return apiFetch<{ token: string }>("/admin/tokens", { method: "POST", - body: JSON.stringify({ name, scope }), + body: JSON.stringify({ name, scope, ...(userId ? { user_id: userId } : {}) }), + }); +} + +export async function fetchUsers(): Promise { + return apiFetch("/admin/users"); +} + +export async function createUser(username: string): Promise { + return apiFetch("/admin/users", { + method: "POST", + body: JSON.stringify({ username }), + }); +} + +export async function deleteUser(id: string): Promise { + return apiFetch(`/admin/users/${id}`, { method: "DELETE" }); +} + +export async function updateUser(id: string, username: string): Promise { + return apiFetch(`/admin/users/${id}`, { + method: "PATCH", + body: JSON.stringify({ username }), }); } @@ -283,6 +326,13 @@ export async function deleteToken(id: string) { return apiFetch(`/admin/tokens/${id}/delete`, { method: "POST" }); } +export async function updateToken(id: string, userId: string | null) { + return apiFetch(`/admin/tokens/${id}`, { + method: "PATCH", + body: JSON.stringify({ user_id: userId || null }), + }); +} + export async function fetchBooks( libraryId?: string, series?: string, @@ -557,6 +607,7 @@ export type CurrentlyReadingItem = { series: string | null; current_page: number; page_count: number; + username?: string; }; export type RecentlyReadItem = { @@ -564,6 +615,7 @@ export type RecentlyReadItem = { title: string; series: string | null; last_read_at: string; + username?: string; }; export type MonthlyReading = { @@ -571,6 +623,12 @@ export type MonthlyReading = { books_read: number; }; +export type UserMonthlyReading = { + month: string; + username: string; + books_read: number; +}; + export type JobTimePoint = { label: string; scan: number; @@ -585,6 +643,7 @@ export type StatsResponse = { currently_reading: CurrentlyReadingItem[]; recently_read: RecentlyReadItem[]; reading_over_time: MonthlyReading[]; + users_reading_over_time: UserMonthlyReading[]; by_format: FormatCount[]; by_language: LanguageCount[]; by_library: LibraryStatsItem[]; @@ -699,11 +758,13 @@ export type KomgaSyncRequest = { url: string; username: string; password: string; + user_id: string; }; export type KomgaSyncResponse = { id: string; komga_url: string; + user_id?: string; total_komga_read: number; matched: number; already_read: number; @@ -717,6 +778,7 @@ export type KomgaSyncResponse = { export type KomgaSyncReportSummary = { id: string; komga_url: string; + user_id?: string; total_komga_read: number; matched: number; already_read: number; diff --git a/apps/backoffice/lib/i18n/en.ts b/apps/backoffice/lib/i18n/en.ts index 24fd73a..6938782 100644 --- a/apps/backoffice/lib/i18n/en.ts +++ b/apps/backoffice/lib/i18n/en.ts @@ -8,6 +8,7 @@ const en: Record = { "nav.libraries": "Libraries", "nav.jobs": "Jobs", "nav.tokens": "Tokens", + "nav.users": "Users", "nav.settings": "Settings", "nav.navigation": "Navigation", "nav.closeMenu": "Close menu", @@ -96,6 +97,7 @@ const en: Record = { "dashboard.pageProgress": "p. {{current}} / {{total}}", "dashboard.noCurrentlyReading": "No books in progress", "dashboard.noRecentlyRead": "No books read recently", + "dashboard.allUsers": "All", // Books page "books.title": "Books", @@ -405,6 +407,21 @@ const en: Record = { "tokens.revoked": "Revoked", "tokens.active": "Active", "tokens.revoke": "Revoke", + "tokens.user": "User", + "tokens.noUser": "None (admin)", + "tokens.apiTokens": "API Tokens", + + // Users page + "users.title": "Users", + "users.createNew": "Create a user", + "users.createDescription": "Create a user account for read access", + "users.username": "Username", + "users.createButton": "Create", + "users.name": "Username", + "users.tokenCount": "Tokens", + "users.createdAt": "Created", + "users.actions": "Actions", + "users.noUsers": "No users", // Settings page "settings.title": "Settings", diff --git a/apps/backoffice/lib/i18n/fr.ts b/apps/backoffice/lib/i18n/fr.ts index 4ba7c23..eff0534 100644 --- a/apps/backoffice/lib/i18n/fr.ts +++ b/apps/backoffice/lib/i18n/fr.ts @@ -6,6 +6,7 @@ const fr = { "nav.libraries": "Bibliothèques", "nav.jobs": "Tâches", "nav.tokens": "Jetons", + "nav.users": "Utilisateurs", "nav.settings": "Paramètres", "nav.navigation": "Navigation", "nav.closeMenu": "Fermer le menu", @@ -94,6 +95,7 @@ const fr = { "dashboard.pageProgress": "p. {{current}} / {{total}}", "dashboard.noCurrentlyReading": "Aucun livre en cours", "dashboard.noRecentlyRead": "Aucun livre lu récemment", + "dashboard.allUsers": "Tous", // Books page "books.title": "Livres", @@ -403,6 +405,21 @@ const fr = { "tokens.revoked": "Révoqué", "tokens.active": "Actif", "tokens.revoke": "Révoquer", + "tokens.user": "Utilisateur", + "tokens.noUser": "Aucun (admin)", + "tokens.apiTokens": "Tokens API", + + // Users page + "users.title": "Utilisateurs", + "users.createNew": "Créer un utilisateur", + "users.createDescription": "Créer un compte utilisateur pour accès lecture", + "users.username": "Nom d'utilisateur", + "users.createButton": "Créer", + "users.name": "Nom d'utilisateur", + "users.tokenCount": "Nb de jetons", + "users.createdAt": "Créé le", + "users.actions": "Actions", + "users.noUsers": "Aucun utilisateur", // Settings page "settings.title": "Paramètres", diff --git a/infra/migrations/0051_add_user_to_komga_sync.sql b/infra/migrations/0051_add_user_to_komga_sync.sql new file mode 100644 index 0000000..df73e50 --- /dev/null +++ b/infra/migrations/0051_add_user_to_komga_sync.sql @@ -0,0 +1 @@ +ALTER TABLE komga_sync_reports ADD COLUMN user_id UUID REFERENCES users(id) ON DELETE SET NULL;