From 127cd8a42c7c7c377f89795fc1254822f796ceae Mon Sep 17 00:00:00 2001 From: Froidefond Julien Date: Mon, 16 Mar 2026 22:04:19 +0100 Subject: [PATCH] feat(komga): add Komga read-status sync with reports and history Adds Komga sync feature to import read status from a Komga server. Books are matched by title (case-insensitive) with series+title primary match and title-only fallback. Sync reports are persisted with matched, newly marked, and unmatched book lists. UI shows check icon for newly marked books, sorted to top. Credentials (URL+username) are saved between sessions. Uses HashSet for O(1) lookups to handle large libraries. Closes #2 Co-Authored-By: Claude Opus 4.6 --- apps/api/src/error.rs | 6 + apps/api/src/komga.rs | 397 ++++++++++++++++++ apps/api/src/main.rs | 4 + .../app/api/komga/reports/[id]/route.ts | 16 + .../backoffice/app/api/komga/reports/route.ts | 12 + apps/backoffice/app/api/komga/sync/route.ts | 16 + apps/backoffice/app/settings/SettingsPage.tsx | 327 ++++++++++++++- apps/backoffice/lib/api.ts | 45 ++ .../0024_add_komga_sync_reports.sql | 10 + ...0025_add_matched_books_to_sync_reports.sql | 1 + ...add_newly_marked_books_to_sync_reports.sql | 1 + 11 files changed, 833 insertions(+), 2 deletions(-) create mode 100644 apps/api/src/komga.rs create mode 100644 apps/backoffice/app/api/komga/reports/[id]/route.ts create mode 100644 apps/backoffice/app/api/komga/reports/route.ts create mode 100644 apps/backoffice/app/api/komga/sync/route.ts create mode 100644 infra/migrations/0024_add_komga_sync_reports.sql create mode 100644 infra/migrations/0025_add_matched_books_to_sync_reports.sql create mode 100644 infra/migrations/0026_add_newly_marked_books_to_sync_reports.sql diff --git a/apps/api/src/error.rs b/apps/api/src/error.rs index 3823588..72c80f2 100644 --- a/apps/api/src/error.rs +++ b/apps/api/src/error.rs @@ -83,3 +83,9 @@ impl From for ApiError { Self::internal(format!("IO error: {err}")) } } + +impl From for ApiError { + fn from(err: reqwest::Error) -> Self { + Self::internal(format!("HTTP client error: {err}")) + } +} diff --git a/apps/api/src/komga.rs b/apps/api/src/komga.rs new file mode 100644 index 0000000..74eb3af --- /dev/null +++ b/apps/api/src/komga.rs @@ -0,0 +1,397 @@ +use axum::{extract::State, Json}; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use sqlx::Row; +use std::collections::HashMap; +use utoipa::ToSchema; +use uuid::Uuid; + +use crate::{error::ApiError, state::AppState}; + +// ─── Komga API types ───────────────────────────────────────────────────────── + +#[derive(Deserialize)] +struct KomgaBooksResponse { + content: Vec, + #[serde(rename = "totalPages")] + total_pages: i32, + number: i32, +} + +#[derive(Deserialize)] +struct KomgaBook { + name: String, + #[serde(rename = "seriesTitle")] + series_title: String, + metadata: KomgaBookMetadata, +} + +#[derive(Deserialize)] +struct KomgaBookMetadata { + title: String, +} + +// ─── Request / Response ────────────────────────────────────────────────────── + +#[derive(Deserialize, ToSchema)] +pub struct KomgaSyncRequest { + pub url: String, + pub username: String, + pub password: String, +} + +#[derive(Serialize, ToSchema)] +pub struct KomgaSyncResponse { + #[schema(value_type = String)] + pub id: Uuid, + pub komga_url: String, + pub total_komga_read: i64, + pub matched: i64, + pub already_read: i64, + pub newly_marked: i64, + pub matched_books: Vec, + pub newly_marked_books: Vec, + pub unmatched: Vec, + #[schema(value_type = String)] + pub created_at: DateTime, +} + +#[derive(Serialize, ToSchema)] +pub struct KomgaSyncReportSummary { + #[schema(value_type = String)] + pub id: Uuid, + pub komga_url: String, + pub total_komga_read: i64, + pub matched: i64, + pub already_read: i64, + pub newly_marked: i64, + pub unmatched_count: i32, + #[schema(value_type = String)] + pub created_at: DateTime, +} + +// ─── Handlers ──────────────────────────────────────────────────────────────── + +/// Sync read books from a Komga server +#[utoipa::path( + post, + path = "/komga/sync", + tag = "komga", + request_body = KomgaSyncRequest, + responses( + (status = 200, body = KomgaSyncResponse), + (status = 400, description = "Bad request"), + (status = 401, description = "Unauthorized"), + (status = 500, description = "Komga connection or sync error"), + ), + security(("Bearer" = [])) +)] +pub async fn sync_komga_read_books( + State(state): State, + Json(body): Json, +) -> Result, ApiError> { + let url = body.url.trim_end_matches('/').to_string(); + if url.is_empty() { + return Err(ApiError::bad_request("url is required")); + } + + // Build HTTP client with basic auth + let client = reqwest::Client::builder() + .timeout(std::time::Duration::from_secs(30)) + .build() + .map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?; + + // Paginate through all READ books from Komga + let mut komga_books: Vec<(String, String)> = Vec::new(); // (series_title, title) + let mut page = 0; + let page_size = 100; + let max_pages = 500; + + loop { + let resp = client + .post(format!("{url}/api/v1/books/list?page={page}&size={page_size}")) + .basic_auth(&body.username, Some(&body.password)) + .header("Content-Type", "application/json") + .json(&serde_json::json!({ "condition": { "readStatus": { "operator": "is", "value": "READ" } } })) + .send() + .await + .map_err(|e| ApiError::internal(format!("Komga request failed: {e}")))?; + + if !resp.status().is_success() { + let status = resp.status(); + let text = resp.text().await.unwrap_or_default(); + return Err(ApiError::internal(format!( + "Komga returned {status}: {text}" + ))); + } + + let data: KomgaBooksResponse = resp + .json() + .await + .map_err(|e| ApiError::internal(format!("Failed to parse Komga response: {e}")))?; + + for book in &data.content { + let title = if !book.metadata.title.is_empty() { + &book.metadata.title + } else { + &book.name + }; + komga_books.push((book.series_title.clone(), title.clone())); + } + + if data.number >= data.total_pages - 1 || page >= max_pages { + break; + } + page += 1; + } + + let total_komga_read = komga_books.len() as i64; + + // Build local lookup maps + let rows = sqlx::query( + "SELECT id, title, COALESCE(series, '') as series, LOWER(title) as title_lower, LOWER(COALESCE(series, '')) as series_lower FROM books", + ) + .fetch_all(&state.pool) + .await?; + + // Primary: (series_lower, title_lower) -> Vec<(Uuid, title, series)> + let mut primary_map: HashMap<(String, String), Vec<(Uuid, String, String)>> = HashMap::new(); + // Secondary: title_lower -> Vec<(Uuid, title, series)> + let mut secondary_map: HashMap> = HashMap::new(); + + for row in &rows { + let id: Uuid = row.get("id"); + let title: String = row.get("title"); + let series: String = row.get("series"); + let title_lower: String = row.get("title_lower"); + let series_lower: String = row.get("series_lower"); + let entry = (id, title, series); + + primary_map + .entry((series_lower, title_lower.clone())) + .or_default() + .push(entry.clone()); + secondary_map.entry(title_lower).or_default().push(entry); + } + + // Match Komga books to local books + let mut matched_entries: Vec<(Uuid, String)> = Vec::new(); // (id, display_title) + let mut unmatched: Vec = Vec::new(); + + for (series_title, title) in &komga_books { + let title_lower = title.to_lowercase(); + let series_lower = series_title.to_lowercase(); + + let found = if let Some(entries) = primary_map.get(&(series_lower.clone(), title_lower.clone())) { + Some(entries) + } else { + secondary_map.get(&title_lower) + }; + + if let Some(entries) = found { + for (id, local_title, local_series) in entries { + let display = if local_series.is_empty() { + local_title.clone() + } else { + format!("{local_series} - {local_title}") + }; + matched_entries.push((*id, display)); + } + } else if series_title.is_empty() { + unmatched.push(title.clone()); + } else { + unmatched.push(format!("{series_title} - {title}")); + } + } + + // Deduplicate by ID + matched_entries.sort_by(|a, b| a.0.cmp(&b.0)); + matched_entries.dedup_by(|a, b| a.0 == b.0); + + let matched_ids: Vec = matched_entries.iter().map(|(id, _)| *id).collect(); + let matched = matched_ids.len() as i64; + let mut already_read: i64 = 0; + let mut already_read_ids: std::collections::HashSet = std::collections::HashSet::new(); + + if !matched_ids.is_empty() { + // Get already-read book IDs + let ar_rows = sqlx::query( + "SELECT book_id FROM book_reading_progress WHERE book_id = ANY($1) AND status = 'read'", + ) + .bind(&matched_ids) + .fetch_all(&state.pool) + .await?; + + for row in &ar_rows { + already_read_ids.insert(row.get("book_id")); + } + already_read = already_read_ids.len() as i64; + + // Bulk upsert all matched books as read + sqlx::query( + r#" + INSERT INTO book_reading_progress (book_id, status, current_page, last_read_at, updated_at) + SELECT unnest($1::uuid[]), 'read', NULL, NOW(), NOW() + ON CONFLICT (book_id) DO UPDATE + SET status = 'read', + current_page = NULL, + last_read_at = NOW(), + updated_at = NOW() + WHERE book_reading_progress.status != 'read' + "#, + ) + .bind(&matched_ids) + .execute(&state.pool) + .await?; + } + + let newly_marked = matched - already_read; + + // Build matched_books and newly_marked_books lists + let mut newly_marked_books: Vec = Vec::new(); + let mut matched_books: Vec = Vec::new(); + for (id, title) in &matched_entries { + if !already_read_ids.contains(id) { + newly_marked_books.push(title.clone()); + } + matched_books.push(title.clone()); + } + // Sort: newly marked first, then alphabetical + let newly_marked_set: std::collections::HashSet<&str> = + newly_marked_books.iter().map(|s| s.as_str()).collect(); + matched_books.sort_by(|a, b| { + let a_new = newly_marked_set.contains(a.as_str()); + let b_new = newly_marked_set.contains(b.as_str()); + b_new.cmp(&a_new).then(a.cmp(b)) + }); + newly_marked_books.sort(); + + // Save sync report + let unmatched_json = serde_json::to_value(&unmatched).unwrap_or_default(); + let matched_books_json = serde_json::to_value(&matched_books).unwrap_or_default(); + let newly_marked_books_json = serde_json::to_value(&newly_marked_books).unwrap_or_default(); + let report_row = sqlx::query( + r#" + INSERT INTO komga_sync_reports (komga_url, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8) + RETURNING id, created_at + "#, + ) + .bind(&url) + .bind(total_komga_read) + .bind(matched) + .bind(already_read) + .bind(newly_marked) + .bind(&matched_books_json) + .bind(&newly_marked_books_json) + .bind(&unmatched_json) + .fetch_one(&state.pool) + .await?; + + Ok(Json(KomgaSyncResponse { + id: report_row.get("id"), + komga_url: url, + total_komga_read, + matched, + already_read, + newly_marked, + matched_books, + newly_marked_books, + unmatched, + created_at: report_row.get("created_at"), + })) +} + +/// List Komga sync reports (most recent first) +#[utoipa::path( + get, + path = "/komga/reports", + tag = "komga", + responses( + (status = 200, body = Vec), + (status = 401, description = "Unauthorized"), + ), + security(("Bearer" = [])) +)] +pub async fn list_sync_reports( + State(state): State, +) -> Result>, ApiError> { + let rows = sqlx::query( + r#" + SELECT id, komga_url, total_komga_read, matched, already_read, newly_marked, + jsonb_array_length(unmatched) as unmatched_count, created_at + FROM komga_sync_reports + ORDER BY created_at DESC + LIMIT 20 + "#, + ) + .fetch_all(&state.pool) + .await?; + + let reports: Vec = rows + .iter() + .map(|row| KomgaSyncReportSummary { + id: row.get("id"), + komga_url: row.get("komga_url"), + total_komga_read: row.get("total_komga_read"), + matched: row.get("matched"), + already_read: row.get("already_read"), + newly_marked: row.get("newly_marked"), + unmatched_count: row.get("unmatched_count"), + created_at: row.get("created_at"), + }) + .collect(); + + Ok(Json(reports)) +} + +/// Get a specific sync report with full unmatched list +#[utoipa::path( + get, + path = "/komga/reports/{id}", + tag = "komga", + params(("id" = String, Path, description = "Report UUID")), + responses( + (status = 200, body = KomgaSyncResponse), + (status = 404, description = "Report not found"), + (status = 401, description = "Unauthorized"), + ), + security(("Bearer" = [])) +)] +pub async fn get_sync_report( + State(state): State, + axum::extract::Path(id): axum::extract::Path, +) -> Result, ApiError> { + let row = sqlx::query( + r#" + SELECT id, komga_url, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched, created_at + FROM komga_sync_reports + WHERE id = $1 + "#, + ) + .bind(id) + .fetch_optional(&state.pool) + .await?; + + let row = row.ok_or_else(|| ApiError::not_found("report not found"))?; + + let matched_books_json: serde_json::Value = row.try_get("matched_books").unwrap_or(serde_json::Value::Array(vec![])); + let matched_books: Vec = serde_json::from_value(matched_books_json).unwrap_or_default(); + let newly_marked_books_json: serde_json::Value = row.try_get("newly_marked_books").unwrap_or(serde_json::Value::Array(vec![])); + let newly_marked_books: Vec = serde_json::from_value(newly_marked_books_json).unwrap_or_default(); + let unmatched_json: serde_json::Value = row.get("unmatched"); + let unmatched: Vec = serde_json::from_value(unmatched_json).unwrap_or_default(); + + Ok(Json(KomgaSyncResponse { + id: row.get("id"), + komga_url: row.get("komga_url"), + total_komga_read: row.get("total_komga_read"), + matched: row.get("matched"), + already_read: row.get("already_read"), + newly_marked: row.get("newly_marked"), + matched_books, + newly_marked_books, + unmatched, + created_at: row.get("created_at"), + })) +} diff --git a/apps/api/src/main.rs b/apps/api/src/main.rs index 9cf816c..94a6999 100644 --- a/apps/api/src/main.rs +++ b/apps/api/src/main.rs @@ -3,6 +3,7 @@ mod books; mod error; mod handlers; mod index_jobs; +mod komga; mod libraries; mod api_middleware; mod openapi; @@ -100,6 +101,9 @@ async fn main() -> anyhow::Result<()> { .route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token)) .route("/admin/tokens/:id", delete(tokens::revoke_token)) .route("/admin/tokens/:id/delete", axum::routing::post(tokens::delete_token)) + .route("/komga/sync", axum::routing::post(komga::sync_komga_read_books)) + .route("/komga/reports", get(komga::list_sync_reports)) + .route("/komga/reports/:id", get(komga::get_sync_report)) .merge(settings::settings_routes()) .route_layer(middleware::from_fn_with_state( state.clone(), diff --git a/apps/backoffice/app/api/komga/reports/[id]/route.ts b/apps/backoffice/app/api/komga/reports/[id]/route.ts new file mode 100644 index 0000000..ca4f95c --- /dev/null +++ b/apps/backoffice/app/api/komga/reports/[id]/route.ts @@ -0,0 +1,16 @@ +import { NextResponse, NextRequest } from "next/server"; +import { getKomgaReport } from "@/lib/api"; + +export async function GET( + _request: NextRequest, + { params }: { params: Promise<{ id: string }> }, +) { + try { + const { id } = await params; + const data = await getKomgaReport(id); + return NextResponse.json(data); + } catch (error) { + const message = error instanceof Error ? error.message : "Failed to fetch report"; + return NextResponse.json({ error: message }, { status: 500 }); + } +} diff --git a/apps/backoffice/app/api/komga/reports/route.ts b/apps/backoffice/app/api/komga/reports/route.ts new file mode 100644 index 0000000..ed624c9 --- /dev/null +++ b/apps/backoffice/app/api/komga/reports/route.ts @@ -0,0 +1,12 @@ +import { NextResponse } from "next/server"; +import { listKomgaReports } from "@/lib/api"; + +export async function GET() { + try { + const data = await listKomgaReports(); + return NextResponse.json(data); + } catch (error) { + const message = error instanceof Error ? error.message : "Failed to fetch reports"; + return NextResponse.json({ error: message }, { status: 500 }); + } +} diff --git a/apps/backoffice/app/api/komga/sync/route.ts b/apps/backoffice/app/api/komga/sync/route.ts new file mode 100644 index 0000000..e71fb51 --- /dev/null +++ b/apps/backoffice/app/api/komga/sync/route.ts @@ -0,0 +1,16 @@ +import { NextResponse, NextRequest } from "next/server"; +import { apiFetch } from "@/lib/api"; + +export async function POST(request: NextRequest) { + try { + const body = await request.json(); + const data = await apiFetch("/komga/sync", { + method: "POST", + body: JSON.stringify(body), + }); + return NextResponse.json(data); + } catch (error) { + const message = error instanceof Error ? error.message : "Failed to sync with Komga"; + return NextResponse.json({ error: message }, { status: 500 }); + } +} diff --git a/apps/backoffice/app/settings/SettingsPage.tsx b/apps/backoffice/app/settings/SettingsPage.tsx index 76ba38d..c951fa6 100644 --- a/apps/backoffice/app/settings/SettingsPage.tsx +++ b/apps/backoffice/app/settings/SettingsPage.tsx @@ -1,8 +1,8 @@ "use client"; -import { useState } from "react"; +import { useState, useEffect, useCallback, useMemo } from "react"; import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, FormSelect, FormRow, Icon } from "../components/ui"; -import { Settings, CacheStats, ClearCacheResponse, ThumbnailStats } from "../../lib/api"; +import { Settings, CacheStats, ClearCacheResponse, ThumbnailStats, KomgaSyncResponse, KomgaSyncReportSummary } from "../../lib/api"; interface SettingsPageProps { initialSettings: Settings; @@ -22,6 +22,29 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi const [isSaving, setIsSaving] = useState(false); const [saveMessage, setSaveMessage] = useState(null); + // Komga sync state — URL and username are persisted in settings + const [komgaUrl, setKomgaUrl] = useState(""); + const [komgaUsername, setKomgaUsername] = useState(""); + const [komgaPassword, setKomgaPassword] = useState(""); + const [isSyncing, setIsSyncing] = useState(false); + const [syncResult, setSyncResult] = useState(null); + const [syncError, setSyncError] = useState(null); + const [showUnmatched, setShowUnmatched] = useState(false); + const [reports, setReports] = useState([]); + const [selectedReport, setSelectedReport] = useState(null); + const [showReportUnmatched, setShowReportUnmatched] = useState(false); + const [showMatchedBooks, setShowMatchedBooks] = useState(false); + const [showReportMatchedBooks, setShowReportMatchedBooks] = useState(false); + + const syncNewlyMarkedSet = useMemo( + () => new Set(syncResult?.newly_marked_books ?? []), + [syncResult?.newly_marked_books], + ); + const reportNewlyMarkedSet = useMemo( + () => new Set(selectedReport?.newly_marked_books ?? []), + [selectedReport?.newly_marked_books], + ); + async function handleUpdateSetting(key: string, value: unknown) { setIsSaving(true); setSaveMessage(null); @@ -64,6 +87,66 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi } } + const fetchReports = useCallback(async () => { + try { + const resp = await fetch("/api/komga/reports"); + if (resp.ok) setReports(await resp.json()); + } catch { /* ignore */ } + }, []); + + useEffect(() => { + fetchReports(); + // Load saved Komga credentials (URL + username only) + fetch("/api/settings/komga").then(r => r.ok ? r.json() : null).then(data => { + if (data) { + if (data.url) setKomgaUrl(data.url); + if (data.username) setKomgaUsername(data.username); + } + }).catch(() => {}); + }, [fetchReports]); + + async function handleViewReport(id: string) { + setSelectedReport(null); + setShowReportUnmatched(false); + setShowReportMatchedBooks(false); + try { + const resp = await fetch(`/api/komga/reports/${id}`); + if (resp.ok) setSelectedReport(await resp.json()); + } catch { /* ignore */ } + } + + async function handleKomgaSync() { + setIsSyncing(true); + setSyncResult(null); + setSyncError(null); + setShowUnmatched(false); + setShowMatchedBooks(false); + try { + const response = await fetch("/api/komga/sync", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ url: komgaUrl, username: komgaUsername, password: komgaPassword }), + }); + const data = await response.json(); + if (!response.ok) { + setSyncError(data.error || "Sync failed"); + } else { + setSyncResult(data); + fetchReports(); + // Persist URL and username (not password) + fetch("/api/settings/komga", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ value: { url: komgaUrl, username: komgaUsername } }), + }).catch(() => {}); + } + } catch { + setSyncError("Failed to connect to sync endpoint"); + } finally { + setIsSyncing(false); + } + } + return ( <>
@@ -438,6 +521,246 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi
+ + {/* Komga Sync */} + + + + + Komga Sync + + Import read status from a Komga server. Books are matched by title (case-insensitive). Credentials are not stored. + + +
+ + + + setKomgaUrl(e.target.value)} + /> + + + + + + setKomgaUsername(e.target.value)} + /> + + + + setKomgaPassword(e.target.value)} + /> + + + + + + {syncError && ( +
+ {syncError} +
+ )} + + {syncResult && ( +
+
+
+

Komga read

+

{syncResult.total_komga_read}

+
+
+

Matched

+

{syncResult.matched}

+
+
+

Already read

+

{syncResult.already_read}

+
+
+

Newly marked

+

{syncResult.newly_marked}

+
+
+ + {syncResult.matched_books.length > 0 && ( +
+ + {showMatchedBooks && ( +
+ {syncResult.matched_books.map((title, i) => ( +

+ {syncNewlyMarkedSet.has(title) && ( + + )} + {title} +

+ ))} +
+ )} +
+ )} + + {syncResult.unmatched.length > 0 && ( +
+ + {showUnmatched && ( +
+ {syncResult.unmatched.map((title, i) => ( +

{title}

+ ))} +
+ )} +
+ )} +
+ )} + {/* Past reports */} + {reports.length > 0 && ( +
+

Sync History

+
+ {reports.map((r) => ( + + ))} +
+ + {/* Selected report detail */} + {selectedReport && ( +
+
+
+

Komga read

+

{selectedReport.total_komga_read}

+
+
+

Matched

+

{selectedReport.matched}

+
+
+

Already read

+

{selectedReport.already_read}

+
+
+

Newly marked

+

{selectedReport.newly_marked}

+
+
+ + {selectedReport.matched_books && selectedReport.matched_books.length > 0 && ( +
+ + {showReportMatchedBooks && ( +
+ {selectedReport.matched_books.map((title, i) => ( +

+ {reportNewlyMarkedSet.has(title) && ( + + )} + {title} +

+ ))} +
+ )} +
+ )} + + {selectedReport.unmatched.length > 0 && ( +
+ + {showReportUnmatched && ( +
+ {selectedReport.unmatched.map((title, i) => ( +

{title}

+ ))} +
+ )} +
+ )} +
+ )} +
+ )} +
+
+
); } diff --git a/apps/backoffice/lib/api.ts b/apps/backoffice/lib/api.ts index 68529c7..da7266d 100644 --- a/apps/backoffice/lib/api.ts +++ b/apps/backoffice/lib/api.ts @@ -539,3 +539,48 @@ export async function markSeriesRead(seriesName: string, status: "read" | "unrea body: JSON.stringify({ series: seriesName, status }), }); } + +export type KomgaSyncRequest = { + url: string; + username: string; + password: string; +}; + +export type KomgaSyncResponse = { + id: string; + komga_url: string; + total_komga_read: number; + matched: number; + already_read: number; + newly_marked: number; + matched_books: string[]; + newly_marked_books: string[]; + unmatched: string[]; + created_at: string; +}; + +export type KomgaSyncReportSummary = { + id: string; + komga_url: string; + total_komga_read: number; + matched: number; + already_read: number; + newly_marked: number; + unmatched_count: number; + created_at: string; +}; + +export async function syncKomga(req: KomgaSyncRequest) { + return apiFetch("/komga/sync", { + method: "POST", + body: JSON.stringify(req), + }); +} + +export async function listKomgaReports() { + return apiFetch("/komga/reports"); +} + +export async function getKomgaReport(id: string) { + return apiFetch(`/komga/reports/${id}`); +} diff --git a/infra/migrations/0024_add_komga_sync_reports.sql b/infra/migrations/0024_add_komga_sync_reports.sql new file mode 100644 index 0000000..45b4a82 --- /dev/null +++ b/infra/migrations/0024_add_komga_sync_reports.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS komga_sync_reports ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + komga_url TEXT NOT NULL, + total_komga_read BIGINT NOT NULL DEFAULT 0, + matched BIGINT NOT NULL DEFAULT 0, + already_read BIGINT NOT NULL DEFAULT 0, + newly_marked BIGINT NOT NULL DEFAULT 0, + unmatched JSONB NOT NULL DEFAULT '[]', + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); diff --git a/infra/migrations/0025_add_matched_books_to_sync_reports.sql b/infra/migrations/0025_add_matched_books_to_sync_reports.sql new file mode 100644 index 0000000..eeafe4c --- /dev/null +++ b/infra/migrations/0025_add_matched_books_to_sync_reports.sql @@ -0,0 +1 @@ +ALTER TABLE komga_sync_reports ADD COLUMN IF NOT EXISTS matched_books JSONB NOT NULL DEFAULT '[]'; diff --git a/infra/migrations/0026_add_newly_marked_books_to_sync_reports.sql b/infra/migrations/0026_add_newly_marked_books_to_sync_reports.sql new file mode 100644 index 0000000..7def097 --- /dev/null +++ b/infra/migrations/0026_add_newly_marked_books_to_sync_reports.sql @@ -0,0 +1 @@ +ALTER TABLE komga_sync_reports ADD COLUMN IF NOT EXISTS newly_marked_books JSONB NOT NULL DEFAULT '[]';