feat: multi-user reading progress & backoffice impersonation
- Scope all reading progress (books, series, stats) by user via Option<Extension<AuthUser>> — admin sees aggregate, read token sees own data - Fix duplicate book rows when admin views lists (IS NOT NULL guard on JOIN) - Add X-As-User header support: admin can impersonate any user from backoffice - UserSwitcher dropdown in nav header (persisted via as_user_id cookie) - Per-user filter pills on "Currently reading" and "Recently read" dashboard sections - Inline username editing (UsernameEdit component with optimistic update) - PATCH /admin/users/:id endpoint to rename a user - Unassigned read tokens row in users table - Komga sync now requires a user_id — reading progress attributed to selected user - Migration 0051: add user_id column to komga_sync_reports - Nav breakpoints: icons-only from md, labels from xl, hamburger until md Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -10,10 +10,15 @@ use sqlx::Row;
|
||||
|
||||
use crate::{error::ApiError, state::AppState};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AuthUser {
|
||||
pub user_id: uuid::Uuid,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Scope {
|
||||
Admin,
|
||||
Read,
|
||||
Read { user_id: uuid::Uuid },
|
||||
}
|
||||
|
||||
pub async fn require_admin(
|
||||
@@ -40,6 +45,20 @@ pub async fn require_read(
|
||||
let token = bearer_token(&req).ok_or_else(|| ApiError::unauthorized("missing bearer token"))?;
|
||||
let scope = authenticate(&state, token).await?;
|
||||
|
||||
if let Scope::Read { user_id } = &scope {
|
||||
req.extensions_mut().insert(AuthUser { user_id: *user_id });
|
||||
} else if matches!(scope, Scope::Admin) {
|
||||
// Admin peut s'impersonifier via le header X-As-User
|
||||
if let Some(as_user_id) = req
|
||||
.headers()
|
||||
.get("X-As-User")
|
||||
.and_then(|v| v.to_str().ok())
|
||||
.and_then(|v| uuid::Uuid::parse_str(v).ok())
|
||||
{
|
||||
req.extensions_mut().insert(AuthUser { user_id: as_user_id });
|
||||
}
|
||||
}
|
||||
|
||||
req.extensions_mut().insert(scope);
|
||||
Ok(next.run(req).await)
|
||||
}
|
||||
@@ -60,8 +79,7 @@ async fn authenticate(state: &AppState, token: &str) -> Result<Scope, ApiError>
|
||||
|
||||
let maybe_row = sqlx::query(
|
||||
r#"
|
||||
SELECT id, token_hash, scope
|
||||
FROM api_tokens
|
||||
SELECT id, token_hash, scope, user_id FROM api_tokens
|
||||
WHERE prefix = $1 AND revoked_at IS NULL AND (expires_at IS NULL OR expires_at > NOW())
|
||||
"#,
|
||||
)
|
||||
@@ -88,7 +106,12 @@ async fn authenticate(state: &AppState, token: &str) -> Result<Scope, ApiError>
|
||||
let scope: String = row.try_get("scope").map_err(|_| ApiError::unauthorized("invalid token"))?;
|
||||
match scope.as_str() {
|
||||
"admin" => Ok(Scope::Admin),
|
||||
"read" => Ok(Scope::Read),
|
||||
"read" => {
|
||||
let user_id: uuid::Uuid = row
|
||||
.try_get("user_id")
|
||||
.map_err(|_| ApiError::unauthorized("read token missing user_id"))?;
|
||||
Ok(Scope::Read { user_id })
|
||||
}
|
||||
_ => Err(ApiError::unauthorized("invalid token scope")),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use axum::{extract::{Path, Query, State}, Json};
|
||||
use axum::{extract::{Extension, Path, Query, State}, Json};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Row;
|
||||
use uuid::Uuid;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::{error::ApiError, index_jobs::IndexJobResponse, state::AppState};
|
||||
use crate::{auth::AuthUser, error::ApiError, index_jobs::IndexJobResponse, state::AppState};
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
pub struct ListBooksQuery {
|
||||
@@ -122,7 +122,9 @@ pub struct BookDetails {
|
||||
pub async fn list_books(
|
||||
State(state): State<AppState>,
|
||||
Query(query): Query<ListBooksQuery>,
|
||||
user: Option<Extension<AuthUser>>,
|
||||
) -> Result<Json<BooksPage>, ApiError> {
|
||||
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
|
||||
let limit = query.limit.unwrap_or(50).clamp(1, 200);
|
||||
let page = query.page.unwrap_or(1).max(1);
|
||||
let offset = (page - 1) * limit;
|
||||
@@ -151,6 +153,8 @@ pub async fn list_books(
|
||||
Some(_) => { p += 1; format!("AND eml.provider = ${p}") },
|
||||
None => String::new(),
|
||||
};
|
||||
p += 1;
|
||||
let uid_p = p;
|
||||
|
||||
let metadata_links_cte = r#"
|
||||
metadata_links AS (
|
||||
@@ -164,7 +168,7 @@ pub async fn list_books(
|
||||
let count_sql = format!(
|
||||
r#"WITH {metadata_links_cte}
|
||||
SELECT COUNT(*) FROM books b
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p}
|
||||
LEFT JOIN metadata_links eml ON eml.series_name = b.series AND eml.library_id = b.library_id
|
||||
WHERE ($1::uuid IS NULL OR b.library_id = $1)
|
||||
AND ($2::text IS NULL OR b.kind = $2)
|
||||
@@ -192,7 +196,7 @@ pub async fn list_books(
|
||||
brp.current_page AS reading_current_page,
|
||||
brp.last_read_at AS reading_last_read_at
|
||||
FROM books b
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p}
|
||||
LEFT JOIN metadata_links eml ON eml.series_name = b.series AND eml.library_id = b.library_id
|
||||
WHERE ($1::uuid IS NULL OR b.library_id = $1)
|
||||
AND ($2::text IS NULL OR b.kind = $2)
|
||||
@@ -235,8 +239,8 @@ pub async fn list_books(
|
||||
data_builder = data_builder.bind(mp.clone());
|
||||
}
|
||||
}
|
||||
|
||||
data_builder = data_builder.bind(limit).bind(offset);
|
||||
count_builder = count_builder.bind(user_id);
|
||||
data_builder = data_builder.bind(user_id).bind(limit).bind(offset);
|
||||
|
||||
let (count_row, rows) = tokio::try_join!(
|
||||
count_builder.fetch_one(&state.pool),
|
||||
@@ -295,7 +299,9 @@ pub async fn list_books(
|
||||
pub async fn get_book(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<Uuid>,
|
||||
user: Option<Extension<AuthUser>>,
|
||||
) -> Result<Json<BookDetails>, ApiError> {
|
||||
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
|
||||
let row = sqlx::query(
|
||||
r#"
|
||||
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.authors, b.series, b.volume, b.language, b.page_count, b.thumbnail_path, b.locked_fields, b.summary, b.isbn, b.publish_date,
|
||||
@@ -311,11 +317,12 @@ pub async fn get_book(
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT 1
|
||||
) bf ON TRUE
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2
|
||||
WHERE b.id = $1
|
||||
"#,
|
||||
)
|
||||
.bind(id)
|
||||
.bind(user_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
@@ -521,9 +528,9 @@ pub async fn update_book(
|
||||
WHERE id = $1
|
||||
RETURNING id, library_id, kind, title, author, authors, series, volume, language, page_count, thumbnail_path,
|
||||
summary, isbn, publish_date,
|
||||
COALESCE((SELECT status FROM book_reading_progress WHERE book_id = $1), 'unread') AS reading_status,
|
||||
(SELECT current_page FROM book_reading_progress WHERE book_id = $1) AS reading_current_page,
|
||||
(SELECT last_read_at FROM book_reading_progress WHERE book_id = $1) AS reading_last_read_at
|
||||
'unread' AS reading_status,
|
||||
NULL::integer AS reading_current_page,
|
||||
NULL::timestamptz AS reading_last_read_at
|
||||
"#,
|
||||
)
|
||||
.bind(id)
|
||||
|
||||
@@ -38,6 +38,8 @@ pub struct KomgaSyncRequest {
|
||||
pub url: String,
|
||||
pub username: String,
|
||||
pub password: String,
|
||||
#[schema(value_type = String)]
|
||||
pub user_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
@@ -45,6 +47,8 @@ pub struct KomgaSyncResponse {
|
||||
#[schema(value_type = String)]
|
||||
pub id: Uuid,
|
||||
pub komga_url: String,
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub user_id: Option<Uuid>,
|
||||
pub total_komga_read: i64,
|
||||
pub matched: i64,
|
||||
pub already_read: i64,
|
||||
@@ -61,6 +65,8 @@ pub struct KomgaSyncReportSummary {
|
||||
#[schema(value_type = String)]
|
||||
pub id: Uuid,
|
||||
pub komga_url: String,
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub user_id: Option<Uuid>,
|
||||
pub total_komga_read: i64,
|
||||
pub matched: i64,
|
||||
pub already_read: i64,
|
||||
@@ -215,11 +221,12 @@ pub async fn sync_komga_read_books(
|
||||
let mut already_read_ids: std::collections::HashSet<Uuid> = std::collections::HashSet::new();
|
||||
|
||||
if !matched_ids.is_empty() {
|
||||
// Get already-read book IDs
|
||||
// Get already-read book IDs for this user
|
||||
let ar_rows = sqlx::query(
|
||||
"SELECT book_id FROM book_reading_progress WHERE book_id = ANY($1) AND status = 'read'",
|
||||
"SELECT book_id FROM book_reading_progress WHERE book_id = ANY($1) AND user_id = $2 AND status = 'read'",
|
||||
)
|
||||
.bind(&matched_ids)
|
||||
.bind(body.user_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
@@ -228,12 +235,12 @@ pub async fn sync_komga_read_books(
|
||||
}
|
||||
already_read = already_read_ids.len() as i64;
|
||||
|
||||
// Bulk upsert all matched books as read
|
||||
// Bulk upsert all matched books as read for this user
|
||||
sqlx::query(
|
||||
r#"
|
||||
INSERT INTO book_reading_progress (book_id, status, current_page, last_read_at, updated_at)
|
||||
SELECT unnest($1::uuid[]), 'read', NULL, NOW(), NOW()
|
||||
ON CONFLICT (book_id) DO UPDATE
|
||||
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
|
||||
SELECT unnest($1::uuid[]), $2, 'read', NULL, NOW(), NOW()
|
||||
ON CONFLICT (book_id, user_id) DO UPDATE
|
||||
SET status = 'read',
|
||||
current_page = NULL,
|
||||
last_read_at = NOW(),
|
||||
@@ -242,6 +249,7 @@ pub async fn sync_komga_read_books(
|
||||
"#,
|
||||
)
|
||||
.bind(&matched_ids)
|
||||
.bind(body.user_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
}
|
||||
@@ -273,12 +281,13 @@ pub async fn sync_komga_read_books(
|
||||
let newly_marked_books_json = serde_json::to_value(&newly_marked_books).unwrap_or_default();
|
||||
let report_row = sqlx::query(
|
||||
r#"
|
||||
INSERT INTO komga_sync_reports (komga_url, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
||||
INSERT INTO komga_sync_reports (komga_url, user_id, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||
RETURNING id, created_at
|
||||
"#,
|
||||
)
|
||||
.bind(&url)
|
||||
.bind(body.user_id)
|
||||
.bind(total_komga_read)
|
||||
.bind(matched)
|
||||
.bind(already_read)
|
||||
@@ -292,6 +301,7 @@ pub async fn sync_komga_read_books(
|
||||
Ok(Json(KomgaSyncResponse {
|
||||
id: report_row.get("id"),
|
||||
komga_url: url,
|
||||
user_id: Some(body.user_id),
|
||||
total_komga_read,
|
||||
matched,
|
||||
already_read,
|
||||
@@ -319,7 +329,7 @@ pub async fn list_sync_reports(
|
||||
) -> Result<Json<Vec<KomgaSyncReportSummary>>, ApiError> {
|
||||
let rows = sqlx::query(
|
||||
r#"
|
||||
SELECT id, komga_url, total_komga_read, matched, already_read, newly_marked,
|
||||
SELECT id, komga_url, user_id, total_komga_read, matched, already_read, newly_marked,
|
||||
jsonb_array_length(unmatched) as unmatched_count, created_at
|
||||
FROM komga_sync_reports
|
||||
ORDER BY created_at DESC
|
||||
@@ -334,6 +344,7 @@ pub async fn list_sync_reports(
|
||||
.map(|row| KomgaSyncReportSummary {
|
||||
id: row.get("id"),
|
||||
komga_url: row.get("komga_url"),
|
||||
user_id: row.get("user_id"),
|
||||
total_komga_read: row.get("total_komga_read"),
|
||||
matched: row.get("matched"),
|
||||
already_read: row.get("already_read"),
|
||||
@@ -365,7 +376,7 @@ pub async fn get_sync_report(
|
||||
) -> Result<Json<KomgaSyncResponse>, ApiError> {
|
||||
let row = sqlx::query(
|
||||
r#"
|
||||
SELECT id, komga_url, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched, created_at
|
||||
SELECT id, komga_url, user_id, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched, created_at
|
||||
FROM komga_sync_reports
|
||||
WHERE id = $1
|
||||
"#,
|
||||
@@ -386,6 +397,7 @@ pub async fn get_sync_report(
|
||||
Ok(Json(KomgaSyncResponse {
|
||||
id: row.get("id"),
|
||||
komga_url: row.get("komga_url"),
|
||||
user_id: row.get("user_id"),
|
||||
total_komga_read: row.get("total_komga_read"),
|
||||
matched: row.get("matched"),
|
||||
already_read: row.get("already_read"),
|
||||
|
||||
@@ -25,6 +25,7 @@ mod stats;
|
||||
mod telegram;
|
||||
mod thumbnails;
|
||||
mod tokens;
|
||||
mod users;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
@@ -106,8 +107,10 @@ async fn main() -> anyhow::Result<()> {
|
||||
.route("/index/jobs/:id/errors", get(index_jobs::get_job_errors))
|
||||
.route("/index/cancel/:id", axum::routing::post(index_jobs::cancel_job))
|
||||
.route("/folders", get(index_jobs::list_folders))
|
||||
.route("/admin/users", get(users::list_users).post(users::create_user))
|
||||
.route("/admin/users/:id", delete(users::delete_user).patch(users::update_user))
|
||||
.route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token))
|
||||
.route("/admin/tokens/:id", delete(tokens::revoke_token))
|
||||
.route("/admin/tokens/:id", delete(tokens::revoke_token).patch(tokens::update_token))
|
||||
.route("/admin/tokens/:id/delete", axum::routing::post(tokens::delete_token))
|
||||
.route("/prowlarr/search", axum::routing::post(prowlarr::search_prowlarr))
|
||||
.route("/prowlarr/test", get(prowlarr::test_prowlarr))
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use axum::{extract::{Path, State}, Json};
|
||||
use axum::{extract::{Extension, Path, State}, Json};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Row;
|
||||
use uuid::Uuid;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::{error::ApiError, state::AppState};
|
||||
use crate::{auth::AuthUser, error::ApiError, state::AppState};
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct ReadingProgressResponse {
|
||||
@@ -42,8 +42,10 @@ pub struct UpdateReadingProgressRequest {
|
||||
)]
|
||||
pub async fn get_reading_progress(
|
||||
State(state): State<AppState>,
|
||||
user: Option<Extension<AuthUser>>,
|
||||
Path(id): Path<Uuid>,
|
||||
) -> Result<Json<ReadingProgressResponse>, ApiError> {
|
||||
let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0;
|
||||
// Verify book exists
|
||||
let exists: bool = sqlx::query_scalar("SELECT EXISTS(SELECT 1 FROM books WHERE id = $1)")
|
||||
.bind(id)
|
||||
@@ -55,9 +57,10 @@ pub async fn get_reading_progress(
|
||||
}
|
||||
|
||||
let row = sqlx::query(
|
||||
"SELECT status, current_page, last_read_at FROM book_reading_progress WHERE book_id = $1",
|
||||
"SELECT status, current_page, last_read_at FROM book_reading_progress WHERE book_id = $1 AND user_id = $2",
|
||||
)
|
||||
.bind(id)
|
||||
.bind(auth_user.user_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
@@ -96,9 +99,11 @@ pub async fn get_reading_progress(
|
||||
)]
|
||||
pub async fn update_reading_progress(
|
||||
State(state): State<AppState>,
|
||||
user: Option<Extension<AuthUser>>,
|
||||
Path(id): Path<Uuid>,
|
||||
Json(body): Json<UpdateReadingProgressRequest>,
|
||||
) -> Result<Json<ReadingProgressResponse>, ApiError> {
|
||||
let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0;
|
||||
// Validate status value
|
||||
if !["unread", "reading", "read"].contains(&body.status.as_str()) {
|
||||
return Err(ApiError::bad_request(format!(
|
||||
@@ -143,9 +148,9 @@ pub async fn update_reading_progress(
|
||||
|
||||
let row = sqlx::query(
|
||||
r#"
|
||||
INSERT INTO book_reading_progress (book_id, status, current_page, last_read_at, updated_at)
|
||||
VALUES ($1, $2, $3, NOW(), NOW())
|
||||
ON CONFLICT (book_id) DO UPDATE
|
||||
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, NOW(), NOW())
|
||||
ON CONFLICT (book_id, user_id) DO UPDATE
|
||||
SET status = EXCLUDED.status,
|
||||
current_page = EXCLUDED.current_page,
|
||||
last_read_at = NOW(),
|
||||
@@ -154,6 +159,7 @@ pub async fn update_reading_progress(
|
||||
"#,
|
||||
)
|
||||
.bind(id)
|
||||
.bind(auth_user.user_id)
|
||||
.bind(&body.status)
|
||||
.bind(current_page)
|
||||
.fetch_one(&state.pool)
|
||||
@@ -194,8 +200,10 @@ pub struct MarkSeriesReadResponse {
|
||||
)]
|
||||
pub async fn mark_series_read(
|
||||
State(state): State<AppState>,
|
||||
user: Option<Extension<AuthUser>>,
|
||||
Json(body): Json<MarkSeriesReadRequest>,
|
||||
) -> Result<Json<MarkSeriesReadResponse>, ApiError> {
|
||||
let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0;
|
||||
if !["read", "unread"].contains(&body.status.as_str()) {
|
||||
return Err(ApiError::bad_request(
|
||||
"status must be 'read' or 'unread'",
|
||||
@@ -209,24 +217,50 @@ pub async fn mark_series_read(
|
||||
};
|
||||
|
||||
let sql = if body.status == "unread" {
|
||||
// Delete progress records to reset to unread
|
||||
// Delete progress records to reset to unread (scoped to this user)
|
||||
if body.series == "unclassified" {
|
||||
format!(
|
||||
r#"
|
||||
WITH target_books AS (
|
||||
SELECT id FROM books WHERE {series_filter}
|
||||
)
|
||||
DELETE FROM book_reading_progress
|
||||
WHERE book_id IN (SELECT id FROM target_books) AND user_id = $1
|
||||
"#
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
r#"
|
||||
WITH target_books AS (
|
||||
SELECT id FROM books WHERE {series_filter}
|
||||
)
|
||||
DELETE FROM book_reading_progress
|
||||
WHERE book_id IN (SELECT id FROM target_books) AND user_id = $2
|
||||
"#
|
||||
)
|
||||
}
|
||||
} else if body.series == "unclassified" {
|
||||
format!(
|
||||
r#"
|
||||
WITH target_books AS (
|
||||
SELECT id FROM books WHERE {series_filter}
|
||||
)
|
||||
DELETE FROM book_reading_progress
|
||||
WHERE book_id IN (SELECT id FROM target_books)
|
||||
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
|
||||
SELECT id, $1, 'read', NULL, NOW(), NOW()
|
||||
FROM books
|
||||
WHERE {series_filter}
|
||||
ON CONFLICT (book_id, user_id) DO UPDATE
|
||||
SET status = 'read',
|
||||
current_page = NULL,
|
||||
last_read_at = NOW(),
|
||||
updated_at = NOW()
|
||||
"#
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
r#"
|
||||
INSERT INTO book_reading_progress (book_id, status, current_page, last_read_at, updated_at)
|
||||
SELECT id, 'read', NULL, NOW(), NOW()
|
||||
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
|
||||
SELECT id, $2, 'read', NULL, NOW(), NOW()
|
||||
FROM books
|
||||
WHERE {series_filter}
|
||||
ON CONFLICT (book_id) DO UPDATE
|
||||
ON CONFLICT (book_id, user_id) DO UPDATE
|
||||
SET status = 'read',
|
||||
current_page = NULL,
|
||||
last_read_at = NOW(),
|
||||
@@ -236,9 +270,18 @@ pub async fn mark_series_read(
|
||||
};
|
||||
|
||||
let result = if body.series == "unclassified" {
|
||||
sqlx::query(&sql).execute(&state.pool).await?
|
||||
// $1 = user_id (no series bind needed)
|
||||
sqlx::query(&sql)
|
||||
.bind(auth_user.user_id)
|
||||
.execute(&state.pool)
|
||||
.await?
|
||||
} else {
|
||||
sqlx::query(&sql).bind(&body.series).execute(&state.pool).await?
|
||||
// $1 = series, $2 = user_id
|
||||
sqlx::query(&sql)
|
||||
.bind(&body.series)
|
||||
.bind(auth_user.user_id)
|
||||
.execute(&state.pool)
|
||||
.await?
|
||||
};
|
||||
|
||||
Ok(Json(MarkSeriesReadResponse {
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
use axum::extract::Extension;
|
||||
use axum::{extract::{Path, Query, State}, Json};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Row;
|
||||
use uuid::Uuid;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::{books::BookItem, error::ApiError, state::AppState};
|
||||
use crate::{auth::AuthUser, books::BookItem, error::ApiError, state::AppState};
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct SeriesItem {
|
||||
@@ -70,9 +71,11 @@ pub struct ListSeriesQuery {
|
||||
)]
|
||||
pub async fn list_series(
|
||||
State(state): State<AppState>,
|
||||
user: Option<Extension<AuthUser>>,
|
||||
Path(library_id): Path<Uuid>,
|
||||
Query(query): Query<ListSeriesQuery>,
|
||||
) -> Result<Json<SeriesPage>, ApiError> {
|
||||
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
|
||||
let limit = query.limit.unwrap_or(50).clamp(1, 200);
|
||||
let page = query.page.unwrap_or(1).max(1);
|
||||
let offset = (page - 1) * limit;
|
||||
@@ -115,6 +118,10 @@ pub async fn list_series(
|
||||
None => String::new(),
|
||||
};
|
||||
|
||||
let user_id_p = p + 1;
|
||||
let limit_p = p + 2;
|
||||
let offset_p = p + 3;
|
||||
|
||||
let missing_cte = r#"
|
||||
missing_counts AS (
|
||||
SELECT eml.series_name,
|
||||
@@ -147,7 +154,7 @@ pub async fn list_series(
|
||||
COUNT(*) as book_count,
|
||||
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count
|
||||
FROM sorted_books sb
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
|
||||
GROUP BY sb.name
|
||||
),
|
||||
{missing_cte},
|
||||
@@ -160,9 +167,6 @@ pub async fn list_series(
|
||||
"#
|
||||
);
|
||||
|
||||
let limit_p = p + 1;
|
||||
let offset_p = p + 2;
|
||||
|
||||
let data_sql = format!(
|
||||
r#"
|
||||
WITH sorted_books AS (
|
||||
@@ -186,7 +190,7 @@ pub async fn list_series(
|
||||
COUNT(*) as book_count,
|
||||
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count
|
||||
FROM sorted_books sb
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
|
||||
GROUP BY sb.name
|
||||
),
|
||||
{missing_cte},
|
||||
@@ -245,7 +249,8 @@ pub async fn list_series(
|
||||
}
|
||||
}
|
||||
|
||||
data_builder = data_builder.bind(limit).bind(offset);
|
||||
count_builder = count_builder.bind(user_id);
|
||||
data_builder = data_builder.bind(user_id).bind(limit).bind(offset);
|
||||
|
||||
let (count_row, rows) = tokio::try_join!(
|
||||
count_builder.fetch_one(&state.pool),
|
||||
@@ -327,8 +332,10 @@ pub struct ListAllSeriesQuery {
|
||||
)]
|
||||
pub async fn list_all_series(
|
||||
State(state): State<AppState>,
|
||||
user: Option<Extension<AuthUser>>,
|
||||
Query(query): Query<ListAllSeriesQuery>,
|
||||
) -> Result<Json<SeriesPage>, ApiError> {
|
||||
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
|
||||
let limit = query.limit.unwrap_or(50).clamp(1, 200);
|
||||
let page = query.page.unwrap_or(1).max(1);
|
||||
let offset = (page - 1) * limit;
|
||||
@@ -415,6 +422,10 @@ pub async fn list_all_series(
|
||||
)
|
||||
"#;
|
||||
|
||||
let user_id_p = p + 1;
|
||||
let limit_p = p + 2;
|
||||
let offset_p = p + 3;
|
||||
|
||||
let count_sql = format!(
|
||||
r#"
|
||||
WITH sorted_books AS (
|
||||
@@ -426,7 +437,7 @@ pub async fn list_all_series(
|
||||
COUNT(*) as book_count,
|
||||
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count
|
||||
FROM sorted_books sb
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
|
||||
GROUP BY sb.name, sb.library_id
|
||||
),
|
||||
{missing_cte},
|
||||
@@ -445,9 +456,6 @@ pub async fn list_all_series(
|
||||
"REGEXP_REPLACE(LOWER(sc.name), '[0-9].*$', ''), COALESCE((REGEXP_MATCH(LOWER(sc.name), '\\d+'))[1]::int, 0), sc.name ASC".to_string()
|
||||
};
|
||||
|
||||
let limit_p = p + 1;
|
||||
let offset_p = p + 2;
|
||||
|
||||
let data_sql = format!(
|
||||
r#"
|
||||
WITH sorted_books AS (
|
||||
@@ -475,7 +483,7 @@ pub async fn list_all_series(
|
||||
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count,
|
||||
MAX(sb.updated_at) as latest_updated_at
|
||||
FROM sorted_books sb
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id AND ${user_id_p}::uuid IS NOT NULL AND brp.user_id = ${user_id_p}
|
||||
GROUP BY sb.name, sb.library_id
|
||||
),
|
||||
{missing_cte},
|
||||
@@ -538,7 +546,8 @@ pub async fn list_all_series(
|
||||
data_builder = data_builder.bind(author.clone());
|
||||
}
|
||||
|
||||
data_builder = data_builder.bind(limit).bind(offset);
|
||||
count_builder = count_builder.bind(user_id);
|
||||
data_builder = data_builder.bind(user_id).bind(limit).bind(offset);
|
||||
|
||||
let (count_row, rows) = tokio::try_join!(
|
||||
count_builder.fetch_one(&state.pool),
|
||||
@@ -642,8 +651,10 @@ pub struct OngoingQuery {
|
||||
)]
|
||||
pub async fn ongoing_series(
|
||||
State(state): State<AppState>,
|
||||
user: Option<Extension<AuthUser>>,
|
||||
Query(query): Query<OngoingQuery>,
|
||||
) -> Result<Json<Vec<SeriesItem>>, ApiError> {
|
||||
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
|
||||
let limit = query.limit.unwrap_or(10).clamp(1, 50);
|
||||
|
||||
let rows = sqlx::query(
|
||||
@@ -655,7 +666,7 @@ pub async fn ongoing_series(
|
||||
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read_count,
|
||||
MAX(brp.last_read_at) AS last_read_at
|
||||
FROM books b
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2
|
||||
GROUP BY COALESCE(NULLIF(b.series, ''), 'unclassified')
|
||||
HAVING (
|
||||
COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0
|
||||
@@ -685,6 +696,7 @@ pub async fn ongoing_series(
|
||||
"#,
|
||||
)
|
||||
.bind(limit)
|
||||
.bind(user_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
@@ -721,8 +733,10 @@ pub async fn ongoing_series(
|
||||
)]
|
||||
pub async fn ongoing_books(
|
||||
State(state): State<AppState>,
|
||||
user: Option<Extension<AuthUser>>,
|
||||
Query(query): Query<OngoingQuery>,
|
||||
) -> Result<Json<Vec<BookItem>>, ApiError> {
|
||||
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
|
||||
let limit = query.limit.unwrap_or(10).clamp(1, 50);
|
||||
|
||||
let rows = sqlx::query(
|
||||
@@ -732,7 +746,7 @@ pub async fn ongoing_books(
|
||||
COALESCE(NULLIF(b.series, ''), 'unclassified') AS name,
|
||||
MAX(brp.last_read_at) AS series_last_read_at
|
||||
FROM books b
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2
|
||||
GROUP BY COALESCE(NULLIF(b.series, ''), 'unclassified')
|
||||
HAVING (
|
||||
COUNT(brp.book_id) FILTER (WHERE brp.status IN ('read', 'reading')) > 0
|
||||
@@ -753,7 +767,7 @@ pub async fn ongoing_books(
|
||||
) AS rn
|
||||
FROM books b
|
||||
JOIN ongoing_series os ON COALESCE(NULLIF(b.series, ''), 'unclassified') = os.name
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2
|
||||
WHERE COALESCE(brp.status, 'unread') != 'read'
|
||||
)
|
||||
SELECT id, library_id, kind, format, title, author, authors, series, volume, language, page_count,
|
||||
@@ -765,6 +779,7 @@ pub async fn ongoing_books(
|
||||
"#,
|
||||
)
|
||||
.bind(limit)
|
||||
.bind(user_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use axum::{
|
||||
extract::{Query, State},
|
||||
extract::{Extension, Query, State},
|
||||
Json,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Row;
|
||||
use utoipa::{IntoParams, ToSchema};
|
||||
|
||||
use crate::{error::ApiError, state::AppState};
|
||||
use crate::{auth::AuthUser, error::ApiError, state::AppState};
|
||||
|
||||
#[derive(Deserialize, IntoParams)]
|
||||
pub struct StatsQuery {
|
||||
@@ -90,6 +90,7 @@ pub struct CurrentlyReadingItem {
|
||||
pub series: Option<String>,
|
||||
pub current_page: i32,
|
||||
pub page_count: i32,
|
||||
pub username: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
@@ -98,6 +99,7 @@ pub struct RecentlyReadItem {
|
||||
pub title: String,
|
||||
pub series: Option<String>,
|
||||
pub last_read_at: String,
|
||||
pub username: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
@@ -106,6 +108,13 @@ pub struct MonthlyReading {
|
||||
pub books_read: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct UserMonthlyReading {
|
||||
pub month: String,
|
||||
pub username: String,
|
||||
pub books_read: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct JobTimePoint {
|
||||
pub label: String,
|
||||
@@ -129,6 +138,7 @@ pub struct StatsResponse {
|
||||
pub additions_over_time: Vec<MonthlyAdditions>,
|
||||
pub jobs_over_time: Vec<JobTimePoint>,
|
||||
pub metadata: MetadataStats,
|
||||
pub users_reading_over_time: Vec<UserMonthlyReading>,
|
||||
}
|
||||
|
||||
/// Get collection statistics for the dashboard
|
||||
@@ -146,7 +156,9 @@ pub struct StatsResponse {
|
||||
pub async fn get_stats(
|
||||
State(state): State<AppState>,
|
||||
Query(query): Query<StatsQuery>,
|
||||
user: Option<Extension<AuthUser>>,
|
||||
) -> Result<Json<StatsResponse>, ApiError> {
|
||||
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
|
||||
let period = query.period.as_deref().unwrap_or("month");
|
||||
// Overview + reading status in one query
|
||||
let overview_row = sqlx::query(
|
||||
@@ -165,9 +177,10 @@ pub async fn get_stats(
|
||||
COUNT(*) FILTER (WHERE brp.status = 'reading') AS reading,
|
||||
COUNT(*) FILTER (WHERE brp.status = 'read') AS read
|
||||
FROM books b
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||
"#,
|
||||
)
|
||||
.bind(user_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
|
||||
@@ -255,7 +268,7 @@ pub async fn get_stats(
|
||||
COUNT(*) FILTER (WHERE COALESCE(brp.status, 'unread') = 'unread') AS unread_count
|
||||
FROM libraries l
|
||||
LEFT JOIN books b ON b.library_id = l.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT size_bytes FROM book_files WHERE book_id = b.id ORDER BY updated_at DESC LIMIT 1
|
||||
) bf ON TRUE
|
||||
@@ -263,6 +276,7 @@ pub async fn get_stats(
|
||||
ORDER BY book_count DESC
|
||||
"#,
|
||||
)
|
||||
.bind(user_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
@@ -287,13 +301,14 @@ pub async fn get_stats(
|
||||
COUNT(*) FILTER (WHERE brp.status = 'read') AS read_count,
|
||||
COALESCE(SUM(b.page_count), 0)::BIGINT AS total_pages
|
||||
FROM books b
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
|
||||
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||
WHERE b.series IS NOT NULL AND b.series != ''
|
||||
GROUP BY b.series
|
||||
ORDER BY book_count DESC
|
||||
LIMIT 10
|
||||
"#,
|
||||
)
|
||||
.bind(user_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
@@ -432,14 +447,17 @@ pub async fn get_stats(
|
||||
// Currently reading books
|
||||
let reading_rows = sqlx::query(
|
||||
r#"
|
||||
SELECT b.id AS book_id, b.title, b.series, brp.current_page, b.page_count
|
||||
SELECT b.id AS book_id, b.title, b.series, brp.current_page, b.page_count, u.username
|
||||
FROM book_reading_progress brp
|
||||
JOIN books b ON b.id = brp.book_id
|
||||
LEFT JOIN users u ON u.id = brp.user_id
|
||||
WHERE brp.status = 'reading' AND brp.current_page IS NOT NULL
|
||||
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||
ORDER BY brp.updated_at DESC
|
||||
LIMIT 20
|
||||
"#,
|
||||
)
|
||||
.bind(user_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
@@ -453,6 +471,7 @@ pub async fn get_stats(
|
||||
series: r.get("series"),
|
||||
current_page: r.get::<Option<i32>, _>("current_page").unwrap_or(0),
|
||||
page_count: r.get::<Option<i32>, _>("page_count").unwrap_or(0),
|
||||
username: r.get("username"),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
@@ -461,14 +480,18 @@ pub async fn get_stats(
|
||||
let recent_rows = sqlx::query(
|
||||
r#"
|
||||
SELECT b.id AS book_id, b.title, b.series,
|
||||
TO_CHAR(brp.last_read_at, 'YYYY-MM-DD') AS last_read_at
|
||||
TO_CHAR(brp.last_read_at, 'YYYY-MM-DD') AS last_read_at,
|
||||
u.username
|
||||
FROM book_reading_progress brp
|
||||
JOIN books b ON b.id = brp.book_id
|
||||
LEFT JOIN users u ON u.id = brp.user_id
|
||||
WHERE brp.status = 'read' AND brp.last_read_at IS NOT NULL
|
||||
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||
ORDER BY brp.last_read_at DESC
|
||||
LIMIT 10
|
||||
"#,
|
||||
)
|
||||
.bind(user_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
@@ -481,6 +504,7 @@ pub async fn get_stats(
|
||||
title: r.get("title"),
|
||||
series: r.get("series"),
|
||||
last_read_at: r.get::<Option<String>, _>("last_read_at").unwrap_or_default(),
|
||||
username: r.get("username"),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
@@ -499,11 +523,13 @@ pub async fn get_stats(
|
||||
FROM book_reading_progress brp
|
||||
WHERE brp.status = 'read'
|
||||
AND brp.last_read_at >= CURRENT_DATE - INTERVAL '6 days'
|
||||
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||
GROUP BY brp.last_read_at::date
|
||||
) cnt ON cnt.dt = d.dt
|
||||
ORDER BY month ASC
|
||||
"#,
|
||||
)
|
||||
.bind(user_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await?
|
||||
}
|
||||
@@ -523,11 +549,13 @@ pub async fn get_stats(
|
||||
FROM book_reading_progress brp
|
||||
WHERE brp.status = 'read'
|
||||
AND brp.last_read_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
|
||||
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||
GROUP BY DATE_TRUNC('week', brp.last_read_at)
|
||||
) cnt ON cnt.dt = d.dt
|
||||
ORDER BY month ASC
|
||||
"#,
|
||||
)
|
||||
.bind(user_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await?
|
||||
}
|
||||
@@ -547,11 +575,13 @@ pub async fn get_stats(
|
||||
FROM book_reading_progress brp
|
||||
WHERE brp.status = 'read'
|
||||
AND brp.last_read_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
||||
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||
GROUP BY DATE_TRUNC('month', brp.last_read_at)
|
||||
) cnt ON cnt.dt = d.dt
|
||||
ORDER BY month ASC
|
||||
"#,
|
||||
)
|
||||
.bind(user_id)
|
||||
.fetch_all(&state.pool)
|
||||
.await?
|
||||
}
|
||||
@@ -565,6 +595,93 @@ pub async fn get_stats(
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Per-user reading over time (admin view — always all users, no user_id filter)
|
||||
let users_reading_time_rows = match period {
|
||||
"day" => {
|
||||
sqlx::query(
|
||||
r#"
|
||||
SELECT
|
||||
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||
u.username,
|
||||
COALESCE(cnt.books_read, 0) AS books_read
|
||||
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
|
||||
CROSS JOIN users u
|
||||
LEFT JOIN (
|
||||
SELECT brp.last_read_at::date AS dt, brp.user_id, COUNT(*) AS books_read
|
||||
FROM book_reading_progress brp
|
||||
WHERE brp.status = 'read'
|
||||
AND brp.last_read_at >= CURRENT_DATE - INTERVAL '6 days'
|
||||
GROUP BY brp.last_read_at::date, brp.user_id
|
||||
) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id
|
||||
ORDER BY month ASC, u.username
|
||||
"#,
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
.await?
|
||||
}
|
||||
"week" => {
|
||||
sqlx::query(
|
||||
r#"
|
||||
SELECT
|
||||
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||
u.username,
|
||||
COALESCE(cnt.books_read, 0) AS books_read
|
||||
FROM generate_series(
|
||||
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
|
||||
DATE_TRUNC('week', NOW()),
|
||||
'1 week'
|
||||
) AS d(dt)
|
||||
CROSS JOIN users u
|
||||
LEFT JOIN (
|
||||
SELECT DATE_TRUNC('week', brp.last_read_at) AS dt, brp.user_id, COUNT(*) AS books_read
|
||||
FROM book_reading_progress brp
|
||||
WHERE brp.status = 'read'
|
||||
AND brp.last_read_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
|
||||
GROUP BY DATE_TRUNC('week', brp.last_read_at), brp.user_id
|
||||
) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id
|
||||
ORDER BY month ASC, u.username
|
||||
"#,
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
.await?
|
||||
}
|
||||
_ => {
|
||||
sqlx::query(
|
||||
r#"
|
||||
SELECT
|
||||
TO_CHAR(d.dt, 'YYYY-MM') AS month,
|
||||
u.username,
|
||||
COALESCE(cnt.books_read, 0) AS books_read
|
||||
FROM generate_series(
|
||||
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
|
||||
DATE_TRUNC('month', NOW()),
|
||||
'1 month'
|
||||
) AS d(dt)
|
||||
CROSS JOIN users u
|
||||
LEFT JOIN (
|
||||
SELECT DATE_TRUNC('month', brp.last_read_at) AS dt, brp.user_id, COUNT(*) AS books_read
|
||||
FROM book_reading_progress brp
|
||||
WHERE brp.status = 'read'
|
||||
AND brp.last_read_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
||||
GROUP BY DATE_TRUNC('month', brp.last_read_at), brp.user_id
|
||||
) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id
|
||||
ORDER BY month ASC, u.username
|
||||
"#,
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
let users_reading_over_time: Vec<UserMonthlyReading> = users_reading_time_rows
|
||||
.iter()
|
||||
.map(|r| UserMonthlyReading {
|
||||
month: r.get::<Option<String>, _>("month").unwrap_or_default(),
|
||||
username: r.get("username"),
|
||||
books_read: r.get("books_read"),
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Jobs over time (with gap filling, grouped by type category)
|
||||
let jobs_rows = match period {
|
||||
"day" => {
|
||||
@@ -697,5 +814,6 @@ pub async fn get_stats(
|
||||
additions_over_time,
|
||||
jobs_over_time,
|
||||
metadata,
|
||||
users_reading_over_time,
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -16,6 +16,8 @@ pub struct CreateTokenRequest {
|
||||
pub name: String,
|
||||
#[schema(value_type = Option<String>, example = "read")]
|
||||
pub scope: Option<String>,
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub user_id: Option<Uuid>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
@@ -26,6 +28,9 @@ pub struct TokenResponse {
|
||||
pub scope: String,
|
||||
pub prefix: String,
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub user_id: Option<Uuid>,
|
||||
pub username: Option<String>,
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub last_used_at: Option<DateTime<Utc>>,
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub revoked_at: Option<DateTime<Utc>>,
|
||||
@@ -71,6 +76,10 @@ pub async fn create_token(
|
||||
_ => return Err(ApiError::bad_request("scope must be 'admin' or 'read'")),
|
||||
};
|
||||
|
||||
if scope == "read" && input.user_id.is_none() {
|
||||
return Err(ApiError::bad_request("user_id is required for read-scoped tokens"));
|
||||
}
|
||||
|
||||
let mut random = [0u8; 24];
|
||||
OsRng.fill_bytes(&mut random);
|
||||
let secret = URL_SAFE_NO_PAD.encode(random);
|
||||
@@ -85,13 +94,14 @@ pub async fn create_token(
|
||||
|
||||
let id = Uuid::new_v4();
|
||||
sqlx::query(
|
||||
"INSERT INTO api_tokens (id, name, prefix, token_hash, scope) VALUES ($1, $2, $3, $4, $5)",
|
||||
"INSERT INTO api_tokens (id, name, prefix, token_hash, scope, user_id) VALUES ($1, $2, $3, $4, $5, $6)",
|
||||
)
|
||||
.bind(id)
|
||||
.bind(input.name.trim())
|
||||
.bind(&prefix)
|
||||
.bind(token_hash)
|
||||
.bind(scope)
|
||||
.bind(input.user_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
@@ -118,7 +128,13 @@ pub async fn create_token(
|
||||
)]
|
||||
pub async fn list_tokens(State(state): State<AppState>) -> Result<Json<Vec<TokenResponse>>, ApiError> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, name, scope, prefix, last_used_at, revoked_at, created_at FROM api_tokens ORDER BY created_at DESC",
|
||||
r#"
|
||||
SELECT t.id, t.name, t.scope, t.prefix, t.user_id, u.username,
|
||||
t.last_used_at, t.revoked_at, t.created_at
|
||||
FROM api_tokens t
|
||||
LEFT JOIN users u ON u.id = t.user_id
|
||||
ORDER BY t.created_at DESC
|
||||
"#,
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
@@ -130,6 +146,8 @@ pub async fn list_tokens(State(state): State<AppState>) -> Result<Json<Vec<Token
|
||||
name: row.get("name"),
|
||||
scope: row.get("scope"),
|
||||
prefix: row.get("prefix"),
|
||||
user_id: row.get("user_id"),
|
||||
username: row.get("username"),
|
||||
last_used_at: row.get("last_used_at"),
|
||||
revoked_at: row.get("revoked_at"),
|
||||
created_at: row.get("created_at"),
|
||||
@@ -171,6 +189,47 @@ pub async fn revoke_token(
|
||||
Ok(Json(serde_json::json!({"revoked": true, "id": id})))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
pub struct UpdateTokenRequest {
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub user_id: Option<Uuid>,
|
||||
}
|
||||
|
||||
/// Update a token's assigned user
|
||||
#[utoipa::path(
|
||||
patch,
|
||||
path = "/admin/tokens/{id}",
|
||||
tag = "tokens",
|
||||
params(
|
||||
("id" = String, Path, description = "Token UUID"),
|
||||
),
|
||||
request_body = UpdateTokenRequest,
|
||||
responses(
|
||||
(status = 200, description = "Token updated"),
|
||||
(status = 404, description = "Token not found"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn update_token(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<Uuid>,
|
||||
Json(input): Json<UpdateTokenRequest>,
|
||||
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||
let result = sqlx::query("UPDATE api_tokens SET user_id = $1 WHERE id = $2")
|
||||
.bind(input.user_id)
|
||||
.bind(id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err(ApiError::not_found("token not found"));
|
||||
}
|
||||
|
||||
Ok(Json(serde_json::json!({"updated": true, "id": id})))
|
||||
}
|
||||
|
||||
/// Permanently delete a revoked API token
|
||||
#[utoipa::path(
|
||||
post,
|
||||
|
||||
195
apps/api/src/users.rs
Normal file
195
apps/api/src/users.rs
Normal file
@@ -0,0 +1,195 @@
|
||||
use axum::{extract::{Path, State}, Json};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Row;
|
||||
use uuid::Uuid;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::{error::ApiError, state::AppState};
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct UserResponse {
|
||||
#[schema(value_type = String)]
|
||||
pub id: Uuid,
|
||||
pub username: String,
|
||||
pub token_count: i64,
|
||||
pub books_read: i64,
|
||||
pub books_reading: i64,
|
||||
#[schema(value_type = String)]
|
||||
pub created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
pub struct CreateUserRequest {
|
||||
pub username: String,
|
||||
}
|
||||
|
||||
/// List all reader users with their associated token count
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/admin/users",
|
||||
tag = "users",
|
||||
responses(
|
||||
(status = 200, body = Vec<UserResponse>),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn list_users(State(state): State<AppState>) -> Result<Json<Vec<UserResponse>>, ApiError> {
|
||||
let rows = sqlx::query(
|
||||
r#"
|
||||
SELECT u.id, u.username, u.created_at,
|
||||
COUNT(DISTINCT t.id) AS token_count,
|
||||
COUNT(DISTINCT brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read,
|
||||
COUNT(DISTINCT brp.book_id) FILTER (WHERE brp.status = 'reading') AS books_reading
|
||||
FROM users u
|
||||
LEFT JOIN api_tokens t ON t.user_id = u.id AND t.revoked_at IS NULL
|
||||
LEFT JOIN book_reading_progress brp ON brp.user_id = u.id
|
||||
GROUP BY u.id, u.username, u.created_at
|
||||
ORDER BY u.created_at DESC
|
||||
"#,
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
let items = rows
|
||||
.into_iter()
|
||||
.map(|row| UserResponse {
|
||||
id: row.get("id"),
|
||||
username: row.get("username"),
|
||||
token_count: row.get("token_count"),
|
||||
books_read: row.get("books_read"),
|
||||
books_reading: row.get("books_reading"),
|
||||
created_at: row.get("created_at"),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(items))
|
||||
}
|
||||
|
||||
/// Create a new reader user
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/admin/users",
|
||||
tag = "users",
|
||||
request_body = CreateUserRequest,
|
||||
responses(
|
||||
(status = 200, body = UserResponse, description = "User created"),
|
||||
(status = 400, description = "Invalid input"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn create_user(
|
||||
State(state): State<AppState>,
|
||||
Json(input): Json<CreateUserRequest>,
|
||||
) -> Result<Json<UserResponse>, ApiError> {
|
||||
if input.username.trim().is_empty() {
|
||||
return Err(ApiError::bad_request("username is required"));
|
||||
}
|
||||
|
||||
let id = Uuid::new_v4();
|
||||
let row = sqlx::query(
|
||||
"INSERT INTO users (id, username) VALUES ($1, $2) RETURNING id, username, created_at",
|
||||
)
|
||||
.bind(id)
|
||||
.bind(input.username.trim())
|
||||
.fetch_one(&state.pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
if let sqlx::Error::Database(ref db_err) = e {
|
||||
if db_err.constraint() == Some("users_username_key") {
|
||||
return ApiError::bad_request("username already exists");
|
||||
}
|
||||
}
|
||||
ApiError::from(e)
|
||||
})?;
|
||||
|
||||
Ok(Json(UserResponse {
|
||||
id: row.get("id"),
|
||||
username: row.get("username"),
|
||||
token_count: 0,
|
||||
books_read: 0,
|
||||
books_reading: 0,
|
||||
created_at: row.get("created_at"),
|
||||
}))
|
||||
}
|
||||
|
||||
/// Update a reader user's username
|
||||
#[utoipa::path(
|
||||
patch,
|
||||
path = "/admin/users/{id}",
|
||||
tag = "users",
|
||||
request_body = CreateUserRequest,
|
||||
responses(
|
||||
(status = 200, body = UserResponse, description = "User updated"),
|
||||
(status = 400, description = "Invalid input"),
|
||||
(status = 404, description = "User not found"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn update_user(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<Uuid>,
|
||||
Json(input): Json<CreateUserRequest>,
|
||||
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||
if input.username.trim().is_empty() {
|
||||
return Err(ApiError::bad_request("username is required"));
|
||||
}
|
||||
|
||||
let result = sqlx::query("UPDATE users SET username = $1 WHERE id = $2")
|
||||
.bind(input.username.trim())
|
||||
.bind(id)
|
||||
.execute(&state.pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
if let sqlx::Error::Database(ref db_err) = e {
|
||||
if db_err.constraint() == Some("users_username_key") {
|
||||
return ApiError::bad_request("username already exists");
|
||||
}
|
||||
}
|
||||
ApiError::from(e)
|
||||
})?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err(ApiError::not_found("user not found"));
|
||||
}
|
||||
|
||||
Ok(Json(serde_json::json!({"updated": true, "id": id})))
|
||||
}
|
||||
|
||||
/// Delete a reader user (cascades on tokens and reading progress)
|
||||
#[utoipa::path(
|
||||
delete,
|
||||
path = "/admin/users/{id}",
|
||||
tag = "users",
|
||||
params(
|
||||
("id" = String, Path, description = "User UUID"),
|
||||
),
|
||||
responses(
|
||||
(status = 200, description = "User deleted"),
|
||||
(status = 404, description = "User not found"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn delete_user(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<Uuid>,
|
||||
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||
let result = sqlx::query("DELETE FROM users WHERE id = $1")
|
||||
.bind(id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err(ApiError::not_found("user not found"));
|
||||
}
|
||||
|
||||
Ok(Json(serde_json::json!({"deleted": true, "id": id})))
|
||||
}
|
||||
Reference in New Issue
Block a user