Compare commits

...

148 Commits

Author SHA1 Message Date
a2de2e1601 chore: bump version to 2.10.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 43s
2026-03-26 09:16:50 +01:00
f08fc6b6a6 feat: unify job creation — tous les types créent N jobs par librairie côté backend
- metadata_batch, metadata_refresh, reading_status_match, reading_status_push,
  download_detection : library_id devient optionnel, la boucle passe côté API
- rebuild (index_jobs.rs), thumbnail_rebuild, thumbnail_regenerate : même logique,
  suppression du job unique library_id=NULL au profit d'un job par lib
- Backoffice simplifié : suppression des boucles frontend, les Server Actions
  appellent directement l'API sans library_id pour le cas "toutes les librairies"

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-26 09:16:24 +01:00
8f48c6a876 fix: disable Next.js fetch cache for settings API calls
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-26 08:39:02 +01:00
163e78813e fix: cache getServerSnapshot return value to prevent useSyncExternalStore infinite loop
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-26 08:37:22 +01:00
ef57ad0631 chore: bump version to 2.9.6
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 45s
2026-03-26 08:09:19 +01:00
6a2e1e4b09 chore: bump version to 2.9.5 2026-03-26 08:07:43 +01:00
4293800f83 chore: bump version to 2.9.4 2026-03-26 08:07:13 +01:00
04971b56e8 fix: merge duplicate series created by pre-rename scanner bug
Add migration 0063 to fuse series where the scanner recreated an
old filesystem-named entry alongside the user-renamed canonical one
(e.g. "LES MYTHICS" alongside "Mythics"). Uses the original_name
column from 0062 to identify and collapse all such duplicates:
reassigns books, external_metadata_links, anilist_series_links, then
deletes the stale series_metadata row.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-26 08:06:11 +01:00
336c9dc4c7 chore: bump version to 2.9.3 2026-03-26 08:05:43 +01:00
33dabfb250 chore: bump version to 2.9.2 2026-03-26 08:04:37 +01:00
d103dc20df fix: redirect instead of silent return when metadata refresh fails
When the API returns an error (e.g. no approved links for ongoing series),
the catch block was silently returning undefined from the server action,
making the button appear frozen with no feedback to the user.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-26 08:02:02 +01:00
66d0a9f56d fix: prevent scanner from recreating renamed series
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 47s
When a user renames a series via the UI, the scanner was using the
filesystem directory name to overwrite the DB series name, effectively
undoing the rename. This adds an original_name column to series_metadata
that tracks the filesystem-derived name, so the scanner can map it back
to the user-chosen name. The migration also back-fills existing renamed
series by comparing book file paths with DB series names.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-26 07:00:18 +01:00
c3cbf716a7 chore: bump version to 2.9.1 2026-03-26 07:00:08 +01:00
94a4b7ffcb chore: bump version to 2.9.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 55s
2026-03-26 06:37:31 +01:00
684fcf390c feat: add type, status, and library filters to jobs list
Filter jobs by type, status, or library with dropdowns above the table.
Shows filtered count and a clear button when filters are active.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-26 06:37:16 +01:00
34322f46c3 refactor: split job detail page into dedicated components
Extract 8 components from the 1144-line jobs/[id]/page.tsx:
- JobSummaryBanner, JobOverviewCard, JobTimelineCard
- JobProgressCard, IndexStatsCard, ThumbnailStatsCard
- MetadataReportCards, ReadingStatusReportCards
- DownloadDetectionCards, JobErrorsCard

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-26 06:34:57 +01:00
7db0fb83f8 chore: bump version to 2.8.1
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 44s
2026-03-26 06:27:29 +01:00
d81d941a34 feat: add replay button for download detection jobs and color-coded job type badges
Add download_detection to replayable job types and replay route handler.
Give each job type a unique colored background badge for better visual
distinction in the jobs table.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-26 06:27:04 +01:00
0460ea7c1f feat: add qBittorrent download button to download detection report
Show a download button on each available release in the detection report
when qBittorrent is configured, matching the Prowlarr search modal behavior.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-26 06:24:00 +01:00
a63b658dc4 feat: streamline mobile header navigation
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 52s
Remove dashboard link from desktop/tablet nav (logo already links to /).
Move user switcher into hamburger menu as inline clickable items on mobile.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-26 06:19:15 +01:00
7bce41b73b chore: bump version to 2.8.0 2026-03-26 06:19:05 +01:00
0b8264c8d1 chore: update backoffice .env.local default placeholders
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-26 06:13:48 +01:00
d1261ac9ab feat: replace inline save status with toast notifications in settings
Add a standalone toast notification system (no Provider needed) and use it
for settings save feedback. Skip save when fields are empty. Remove save
button on Anilist local user select in favor of auto-save on change.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-26 06:13:25 +01:00
35450bc050 chore: bump version to 2.7.1
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 41s
2026-03-25 14:06:08 +01:00
5a51673b69 feat: expand volume range packs in Prowlarr title matching
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 45s
T01.T15, [T001.T104], T01-T15 and Tome 01 à Tome 15 are now expanded
to the full range of volumes they contain, so a pack covering volumes
1-15 correctly matches any missing volume within that range.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 14:04:46 +01:00
f136a1bc70 chore: bump version to 2.7.0 2026-03-25 14:00:41 +01:00
e0d94758af feat: add per-library download detection auto-schedule
Adds a configurable schedule (manual/hourly/daily/weekly) for the
download detection job in the library settings modal. The indexer
scheduler triggers the job automatically, and the API job poller
processes it — consistent with the reading_status_push pattern.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 13:57:59 +01:00
19de3ceebb chore: bump version to 2.6.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 51s
2026-03-25 13:47:31 +01:00
d2c9f28227 feat: add download detection job with Prowlarr integration
For each series with missing volumes and an approved metadata link,
calls Prowlarr to find available matching releases and stores them in
a report (no auto-download). Includes per-series detail page, Telegram
notifications with per-event toggles, and stats display in the jobs table.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 13:47:29 +01:00
e5e4993e7b refactor(settings): split SettingsPage into components, restructure tabs
- Extract 7 sub-components into settings/components/ (AnilistTab,
  KomgaSyncCard, MetadataProvidersCard, StatusMappingsCard, ProwlarrCard,
  QBittorrentCard, TelegramCard) — SettingsPage.tsx: 2100 → 551 lines
- Add "Metadata" tab (MetadataProviders + StatusMappings)
- Rename "Integrations" → "Download Tools" (Prowlarr + qBittorrent)
- Rename "AniList" → "Reading Status" tab; Komga sync as standalone card
- Rename cards: "AniList Config" + "AniList Sync"
- Persist active tab in URL searchParams (?tab=...)
- Fix hydration mismatch on AniList redirect URL (window.location via useEffect)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 13:15:43 +01:00
5ba4315e98 fix: revalidate /libraries cache after settings updates
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 42s
Add revalidatePath("/libraries") to the monitoring, metadata-provider
and reading-status-provider route handlers so that saving library
settings invalidates the Next.js data cache and the page reflects
fresh data on reload.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 12:51:16 +01:00
d893702909 chore: bump version to 2.5.1 2026-03-25 12:51:04 +01:00
f3960666fa feat: add reading_status_push auto-refresh schedule per library
- Migration 0059: reading_status_push_mode / last / next columns on libraries
- API: update_reading_status_provider accepts push_mode and calculates next_push_at
- job_poller: handles reading_status_push pending jobs
- Indexer scheduler: check_and_schedule_reading_status_push every minute
- Backoffice: schedule select in library settings modal

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 12:46:48 +01:00
57ff1888eb chore: bump version to 2.5.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 47s
2026-03-25 11:56:51 +01:00
57d2acc923 feat: expose all API endpoints in OpenAPI spec
Add 26 previously missing endpoints (users, tokens, libraries,
reading_status_match/push, anilist) plus 28 missing schemas and 3 new
tags (users, reading_status, anilist). Test passes.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 11:56:38 +01:00
29b27b9a86 feat: add client-side pagination to jobs table (25 per page)
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 11:53:24 +01:00
7ff72cd378 fix: persist partial stats_json on reading_status_match/push job failure
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 11:50:41 +01:00
ee2ed1e1cb chore: bump version to 2.4.1
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 41s
2026-03-25 11:32:32 +01:00
b7bc1ec9d4 feat: add stats display for reading_status_match and reading_status_push jobs
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 11:31:53 +01:00
ca4b7541af style: replace separator line with blank line in Telegram notifications
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 10:48:48 +01:00
d60c18b389 chore: bump version to 2.4.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 41s
2026-03-25 10:30:24 +01:00
10cc69e53f feat: add reading_status_push job — differential push to AniList
Push reading statuses (PLANNING/CURRENT/COMPLETED) to AniList for all
linked series that changed since last sync, or have new books/no sync yet.

- Migration 0057: adds reading_status_push to index_jobs type constraint
- Migration 0058: creates reading_status_push_results table (pushed/skipped/no_books/error)
- API: new reading_status_push module with start_push, get_push_report, get_push_results
- Differential detection: synced_at IS NULL OR reading progress updated OR new books added
- Same 429 retry logic as reading_status_match (wait 10s, retry once, abort on 2nd 429)
- Notifications: ReadingStatusPushCompleted/Failed events
- Backoffice: push button in reading status group, job detail report with per-series list
- Replay support, badge label, i18n (FR + EN)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 10:30:04 +01:00
d977b6b27a chore: bump version to 2.3.1
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 42s
2026-03-25 09:08:38 +01:00
9eea43ce99 fix: retry once after 10s on AniList 429 before aborting job
On rate limit, wait 10 seconds and retry the same series. If the retry
also returns 429, the job stops. Otherwise it continues normally.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 09:08:27 +01:00
31538fac24 fix: abort reading_status_match job on AniList 429 rate limit
Continuing after a 429 is pointless — all subsequent requests will also
fail. The job now returns Err immediately, which sets status='failed' with
a clear message indicating where it stopped.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-25 09:06:34 +01:00
5f7f96f25a chore: bump version to 2.3.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 51s
2026-03-25 08:15:04 +01:00
87f5d9b452 chore: bump version to 2.2.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 1m5s
2026-03-24 21:20:40 +01:00
e995732504 fix: reduce action button size on tokens page to match jobs page
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-24 21:20:31 +01:00
ea4b8798a1 fix: add cursor-pointer to Button component
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-24 21:20:17 +01:00
b2e59d8aa1 fix: refresh jobs list immediately after replay
Add /api/jobs/list endpoint and fetch the updated list right after
a successful replay so the new job appears instantly instead of
waiting for the next SSE poll (up to 15s when idle).

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-24 21:19:47 +01:00
6a838fb840 feat: add replay button on completed jobs in the jobs table
Shows a "Replay" button on non-active jobs that re-creates a new job
of the same type and library. Supports all replayable job types:
rebuild, full_rebuild, rescan, scan, thumbnail_rebuild,
thumbnail_regenerate, metadata_batch, metadata_refresh.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-24 21:18:12 +01:00
2febab2c39 feat: add books/pages metric toggle on reading activity chart
Allow switching between number of books and number of pages on the
dashboard reading activity chart. Adds pages_read to the stats API
response and a MetricToggle component alongside the existing PeriodToggle.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-24 21:12:43 +01:00
4049c94fc0 chore: bump version to 2.1.3
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 43s
2026-03-24 17:54:01 +01:00
cb684ab9ea fix: use correct column name sm.name instead of sm.series_name in series_metadata queries
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-24 17:53:51 +01:00
5e91ecd39d chore: bump version to 2.1.2
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 42s
2026-03-24 17:43:35 +01:00
f2fa4e3ce8 chore: remove unnecessary auto-enable reading_status_provider on link
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-24 17:41:21 +01:00
b61ab45fb4 fix: use subquery for total_volumes to avoid GROUP BY returning 0 rows
GROUP BY sm.total_volumes caused fetch_one to fail when no books matched,
silently skipping all series. COUNT(*) without GROUP BY always returns 1 row.

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-24 17:40:56 +01:00
fd0f57824d chore: add missing migrations and routes
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 6s
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-24 17:35:49 +01:00
4c10702fb7 chore: bump version to 2.1.1
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 41s
2026-03-24 17:18:11 +01:00
301669332c fix: make AniList user_id optional for preview/sync (only required for pull)
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-24 17:18:03 +01:00
f57cc0cae0 chore: bump version to 2.1.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 48s
2026-03-24 17:09:08 +01:00
e94a4a0b13 feat: AniList reading status integration
- Add full AniList integration: OAuth connect, series linking, push/pull sync
- Push: PLANNING/CURRENT/COMPLETED based on books read vs total_volumes (never auto-complete from owned books alone)
- Pull: update local reading progress from AniList list (per-user)
- Detailed sync/pull reports with per-series status and progress
- Local user selector in settings to scope sync to a specific user
- Rename "AniList" tab/buttons to generic "État de lecture" / "Reading status"
- Make Bédéthèque and AniList badges clickable links on series detail page
- Fix ON CONFLICT error on series link (provider column in PK)
- Migration 0054: fix series_metadata missing columns (authors, publishers, locked_fields, total_volumes, status)
- Align button heights on series detail page; move MarkSeriesReadButton to action row

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-24 17:08:11 +01:00
2a7881ac6e chore: bump version to 2.0.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 1m7s
2026-03-24 12:56:40 +01:00
0950018b38 fix: add autoComplete=off on password fields to suppress WebKit autofill error
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-24 12:49:02 +01:00
bc796f4ee5 feat: multi-user reading progress & backoffice impersonation
- Scope all reading progress (books, series, stats) by user via
  Option<Extension<AuthUser>> — admin sees aggregate, read token sees own data
- Fix duplicate book rows when admin views lists (IS NOT NULL guard on JOIN)
- Add X-As-User header support: admin can impersonate any user from backoffice
- UserSwitcher dropdown in nav header (persisted via as_user_id cookie)
- Per-user filter pills on "Currently reading" and "Recently read" dashboard sections
- Inline username editing (UsernameEdit component with optimistic update)
- PATCH /admin/users/:id endpoint to rename a user
- Unassigned read tokens row in users table
- Komga sync now requires a user_id — reading progress attributed to selected user
- Migration 0051: add user_id column to komga_sync_reports
- Nav breakpoints: icons-only from md, labels from xl, hamburger until md

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-24 12:47:58 +01:00
232ecdda41 feat: add backoffice authentication with login page
- Add login page with logo background, glassmorphism card
- Add session management via JWT (jose) with httpOnly cookie
- Add Next.js proxy middleware to protect all routes
- Add logout button in nav
- Restructure app into (app) route group to isolate login layout
- Add ADMIN_USERNAME, ADMIN_PASSWORD, SESSION_SECRET env vars

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-24 08:48:01 +01:00
32d13984a1 chore: bump version to 1.28.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 53s
2026-03-23 19:00:30 +01:00
eab7f2e21b feat: filter metadata refresh to ongoing series & improve job action buttons
- Metadata refresh now skips series with ended/cancelled status
- Add xs size to Button component
- Unify view/cancel button sizes (h-7) with icons (eye & cross)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-23 18:59:33 +01:00
b6422fbf3e feat: enhance jobs list stats with tooltips, icons, and refresh count
- Add Tooltip UI component for styled hover tooltips
- Replace native title attributes with Tooltip on all job stats
- Add refresh icon (green) showing actual refreshed count for metadata refresh
- Add icon+tooltip to scanned files stat
- Add icon prop to StatBox component
- Add refreshed field to stats_json types
- Distinct tooltip labels for total links vs refreshed count

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-23 18:56:42 +01:00
6dbd0c80e6 feat: improve Telegram notification UI with better formatting
Add visual separators, contextual emojis, bold labels, structured
result sections, and conditional error lines for cleaner messages.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-23 18:46:25 +01:00
0c42a9ed04 fix: add API job poller to process scheduler-created metadata jobs
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 1m12s
The scheduler (indexer) created metadata_refresh/metadata_batch jobs in DB,
but the indexer excluded them (API_ONLY_JOB_TYPES) and the API only processed
jobs created via its REST endpoints. Scheduler-created jobs stayed pending forever.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-22 21:05:42 +01:00
95a6e54d06 chore: bump version to 1.27.1 2026-03-22 21:05:23 +01:00
e26219989f feat: add job runs chart and scrollable reading lists on dashboard
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 1m5s
- Add multi-line chart showing job runs over time by type (scan,
  rebuild, thumbnails, other) with the same day/week/month toggle
- Limit currently reading and recently read lists to 3 visible items
  with a scrollbar for overflow
- Fix NUMERIC→BIGINT cast for SUM/COALESCE in jobs SQL queries

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-22 10:43:45 +01:00
5d33a35407 chore: bump version to 1.27.0 2026-03-22 10:43:25 +01:00
d53572dc33 chore: bump version to 1.26.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 1m49s
2026-03-22 10:27:59 +01:00
cf1953d11f feat: add day/week/month period toggle for dashboard line charts
Add a period selector (day, week, month) to the reading activity and
books added charts. The API now accepts a ?period= query param and
returns gap-filled data using generate_series so all time slots appear
even with zero values. Labels are locale-aware (short month, weekday).

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-22 10:27:24 +01:00
6f663eaee7 docs: add MIT license
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-22 10:08:15 +01:00
ee65c6263a perf: add ETag and server-side caching for thumbnail proxy
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 49s
Add ETag header to API thumbnail responses for 304 Not Modified support.
Forward If-None-Match/ETag through the Next.js proxy route handler and
add next.revalidate for 24h server-side fetch caching to reduce
SSR-to-API round trips on the libraries page.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-22 06:52:47 +01:00
691b6b22ab chore: bump version to 1.25.0 2026-03-22 06:52:02 +01:00
11c80a16a3 docs: add Telegram notifications and updated dashboard to README and FEATURES
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 46s
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-22 06:40:34 +01:00
c366b44c54 chore: bump version to 1.24.1 2026-03-22 06:39:23 +01:00
92f80542e6 perf: skip Next.js image re-optimization and stream proxy responses
Thumbnails are already optimized (WebP) by the API, so disable Next.js
image optimization to avoid redundant CPU work. Switch route handlers
from buffering (arrayBuffer) to streaming (response.body) to reduce
memory usage and latency.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-22 06:38:46 +01:00
3a25e42a20 chore: bump version to 1.24.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 1m7s
2026-03-22 06:31:56 +01:00
24763bf5a7 fix: show absolute date/time in jobs "created" column
Replace relative time formatting (which incorrectly showed "just now"
for many jobs due to negative time diffs from server/client timezone
mismatch) with absolute locale-formatted date/time.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-22 06:31:37 +01:00
08f0397029 feat: add reading stats and replace dashboard charts with recharts
Add currently reading, recently read, and reading activity sections to
the dashboard. Replace all custom SVG/CSS charts with recharts library
(donut, area, stacked bar, horizontal bar). Reorganize layout: libraries
and popular series side by side, books added chart full width below.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-22 06:26:45 +01:00
766e3a01b2 chore: bump version to 1.23.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 45s
2026-03-21 17:43:11 +01:00
626e2e035d feat: send book thumbnails in Telegram notifications
Use Telegram sendPhoto API for conversion and metadata-approved events
when a book thumbnail is available on disk. Falls back to text message
if photo upload fails.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 17:43:01 +01:00
cfd2321db2 chore: bump version to 1.22.0 2026-03-21 17:40:22 +01:00
1b715033ce fix: add missing Next.js route handler for Telegram test endpoint
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 17:39:46 +01:00
81d1586501 feat: add Telegram notification system with granular event toggles
Add notifications crate shared between API and indexer to send Telegram
messages on scan/thumbnail/conversion completion/failure, metadata linking,
batch and refresh events. Configurable via a new Notifications tab in the
backoffice settings with per-event toggle switches grouped by category.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 17:24:43 +01:00
bd74c9e3e3 docs: add comprehensive features list to README and docs/FEATURES.md
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 1m1s
Replace the minimal README features section with a concise categorized
summary and link to a detailed docs/FEATURES.md covering all features,
business rules, API endpoints, and integrations.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-21 14:34:36 +01:00
41228430cf chore: bump version to 1.21.2 2026-03-21 14:34:32 +01:00
6a4ba06fac fix: include series_metadata authors in authors listing and detail pages
Authors were only sourced from books.authors/books.author fields which are
often empty. Now also aggregates authors from series_metadata.authors
(populated by metadata providers like bedetheque). Adds author filter to
/series endpoint and updates the author detail page to use it.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 14:34:11 +01:00
e5c3542d3f refactor: split books.rs into books+series, reorganize OpenAPI tags and fix access control
- Extract series code from books.rs into dedicated series.rs module
- Reorganize OpenAPI tags: split overloaded "books" tag into books, series, search, stats
- Add missing endpoints to OpenAPI: metadata_batch, metadata_refresh, komga, update_metadata_provider
- Add missing schemas: MissingVolumeInput, Komga/Batch/Refresh DTOs
- Fix access control: move GET /libraries and POST /libraries/:id/scan to read routes
  so non-admin tokens can list libraries and trigger scans

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 14:23:19 +01:00
24516f1069 chore: bump version to 1.21.1
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 41s
2026-03-21 13:42:17 +01:00
5383cdef60 feat: allow batch metadata and refresh metadata on all libraries
When no specific library is selected, iterate over all libraries and
trigger a job for each one, skipping libraries with metadata disabled.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 13:42:08 +01:00
be5c3f7a34 fix: pass explicit locale to date formatting to prevent hydration mismatch
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 41s
Server and client could use different default locales for
toLocaleDateString/toLocaleString, causing React hydration errors.
Pass the user locale explicitly in JobsList and SettingsPage.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 13:36:35 +01:00
caa9922ff9 chore: bump version to 1.21.0 2026-03-21 13:34:47 +01:00
135f000c71 refactor: switch JobsIndicator from polling to SSE and fix stream endpoint
Replace fetch polling in JobsIndicator with EventSource connected to
/api/jobs/stream. Fix the SSE route to return all jobs (via
/index/status) instead of only active ones, since JobsList also
consumes this stream for the full job history. JobsIndicator now
filters active jobs client-side. SSE server-side uses adaptive
interval (2s active, 15s idle) and only sends when data changes.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 13:33:58 +01:00
d9e50a4235 chore: bump version to 1.20.1
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 1m13s
2026-03-21 13:13:39 +01:00
5f6eb5a5cb perf: add selective fetch caching for stable API endpoints
Make apiFetch support Next.js revalidate option instead of
hardcoding cache: no-store on every request. Stable endpoints
(libraries, settings, stats, series statuses) now use time-based
revalidation while dynamic data (books, search, jobs) stays uncached.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 13:13:28 +01:00
41c77fca2e chore: bump version to 1.20.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 1m15s
2026-03-21 13:06:28 +01:00
49621f3fb1 perf: wrap BookCard and BookImage with React.memo
Prevent unnecessary re-renders of book grid items when parent
components update without changing book data.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 13:03:24 +01:00
6df743b2e6 perf: lazy-load heavy modal components with next/dynamic
Dynamic import EditBookForm, EditSeriesForm, MetadataSearchModal, and
ProwlarrSearchModal so their code is split into separate chunks and
only fetched when the user interacts with them.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 13:02:10 +01:00
edfefc0128 perf: optimize JobsIndicator polling with visibility API and adaptive interval
Pause polling when the tab is hidden, refetch immediately when it
becomes visible again, and use a 30s interval when no jobs are active
instead of polling every 2s unconditionally.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 12:59:06 +01:00
b0185abefe perf: enable Next.js image optimization across backoffice
Remove `unoptimized` flag from all thumbnail/cover Image components
and add proper responsive `sizes` props. Convert raw `<img>` tags on
the libraries page to next/image. Add 24h minimumCacheTTL for
optimized images. BookPreview keeps `unoptimized` since the API
already returns optimized WebP.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 12:57:10 +01:00
b9e54cbfd8 chore: bump version to 1.19.1
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 54s
2026-03-21 12:47:31 +01:00
3f0bd783cd feat: include series_count and thumbnail_book_ids in libraries API response
Eliminates N+1 sequential fetchSeries calls on the libraries page by
returning series count and up to 5 thumbnail book IDs (one per series)
directly from GET /libraries.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-03-21 12:47:10 +01:00
fc8856c83f chore: bump version to 1.19.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 1m19s
2026-03-21 08:12:19 +01:00
bd09f3d943 feat: persist filter state in localStorage across pages
Save/restore filter values in LiveSearchForm using localStorage keyed
by basePath (e.g. filters:/books, filters:/series). Filters are restored
on mount when the URL has no active filters, and cleared when the user
clicks the Clear button.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 08:12:10 +01:00
1f434c3d67 feat: add format and metadata filters to books page
Add two new filters to the books listing page:
- Format filter (CBZ/CBR/PDF/EPUB) using existing API support
- Metadata linked/unlinked filter with new API support via
  LEFT JOIN on external_metadata_links (using DISTINCT ON CTE
  matching the series endpoint pattern)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 08:09:37 +01:00
4972a403df chore: bump version to 1.18.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 1m7s
2026-03-21 07:47:52 +01:00
629708cdd0 feat: redesign libraries page UI with fan thumbnails and modal settings
- Replace thumbnail mosaic with fan/arc layout using series covers as background
- Move library settings from dropdown to full-page portal modal with sections
- Move FolderPicker modal to portal for proper z-index stacking
- Add descriptions to each setting for better clarity
- Move delete button to card header, compact config tags
- Add i18n keys for new labels and descriptions (en/fr)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 07:47:36 +01:00
560087a897 chore: bump version to 1.17.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 1m12s
2026-03-21 07:23:52 +01:00
27f553b005 feat: add rescan job type and improve full rebuild UX
Add "Deep rescan" job type that clears directory mtimes to force
re-walking all directories, discovering newly supported formats (e.g.
EPUB) without deleting existing data or metadata.

Also improve full rebuild button: red destructive styling instead of
warning, and FR description explicitly mentions metadata/reading status
loss. Rename FR rebuild label to "Mise à jour".

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 07:23:38 +01:00
ed7665248e chore: bump version to 1.16.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 1m5s
2026-03-21 07:06:28 +01:00
736b8aedc0 feat: add EPUB format support with spine-aware image extraction
Parse EPUB structure (container.xml → OPF → spine → XHTML) to extract
images in reading order. Zero new dependencies — reuses zip + regex
crates with pre-compiled regexes and per-file index cache for
performance. Falls back to CBZ-style image listing when spine contains
no images. Includes DB migration, API/indexer/backoffice updates.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 07:05:47 +01:00
3daa49ae6c feat: add live refresh to job detail page via SSE
The job detail page was only server-rendered with no live updates,
unlike the jobs list page. Add a lightweight JobDetailLive client
component that subscribes to the existing SSE endpoint and calls
router.refresh() on each update, keeping the page in sync while
a job is running.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-21 06:52:57 +01:00
5fb24188e1 chore: bump version to 1.15.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 44s
2026-03-20 13:35:36 +01:00
54f972db17 chore: bump version to 1.14.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 45s
2026-03-20 12:48:14 +01:00
acd8b62382 chore: bump version to 1.13.0 2026-03-20 12:44:54 +01:00
cc65e3d1ad feat: highlight missing volumes in Prowlarr search results
API extracts volume numbers from release titles and matches them against
missing volumes sent by the frontend. Matched results are highlighted in
green with badges indicating which missing volumes were found.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 12:44:35 +01:00
70889ca955 chore: bump version to 1.12.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 43s
2026-03-20 11:43:34 +01:00
4ad6d57271 feat: add authors page to backoffice with dedicated API endpoint
Add a new GET /authors endpoint that aggregates unique authors from books
with book/series counts, pagination and search. Add author filter to
GET /books. Backoffice gets a list page with search/sort and a detail
page showing the author's series and books.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 11:43:22 +01:00
fe5de3d5c1 feat: add scheduled metadata refresh for libraries
Add metadata_refresh_mode (manual/hourly/daily/weekly) to libraries,
with automatic scheduling via the indexer. Includes API support,
backoffice UI controls, i18n translations, and DB migration.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 10:51:52 +01:00
5a224c48c0 chore: bump version to 1.11.1
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 44s
2026-03-20 10:46:34 +01:00
d08fe31b1b fix: pass metadata_refresh_mode through backoffice proxy to API
The Next.js monitoring route was dropping metadata_refresh_mode from the
request body, so the value was never forwarded to the Rust API and
reverted on reload.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-20 10:46:22 +01:00
4d69ed91c5 chore: bump version to 1.11.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 56s
2026-03-20 09:46:29 +01:00
c6ddd3e6c7 chore: bump version to 1.10.1
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 49s
2026-03-19 22:33:52 +01:00
504185f31f feat: add editable search input to Prowlarr modal with scrollable badges
- Add text input for custom search queries in Prowlarr modal
- Quick search badges pre-fill the input and trigger search
- Default query uses quoted series name for exact match
- Add custom_query support to backend API
- Limit badge area height with vertical scroll
- Add debug logging for Prowlarr API responses

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-19 22:33:40 +01:00
acd0cce3f8 fix: reorder Prowlarr button, add collection progress bar, remove redundant missing badge
- Move Prowlarr search button before Metadata button
- Add amber collection progress bar showing owned/expected books ratio
- Remove yellow missing count badge from MetadataSearchModal (now shown in progress bar)
- Fix i18n plural parameter for series read count

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-19 22:17:49 +01:00
e14da4fc8d chore: bump version to 1.10.0
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 51s
2026-03-19 21:51:45 +01:00
c04d4fb618 feat: add qBittorrent download client integration
Send Prowlarr search results directly to qBittorrent from the modal.
Backend authenticates via SID cookie (login + add torrent endpoints).

- Backend: qbittorrent module with add and test endpoints
- Migration: add qbittorrent settings (url, username, password)
- Settings UI: qBittorrent config card with test connection
- ProwlarrSearchModal: send-to-qBittorrent button per result row
  with spinner/checkmark state progression
- Button only shown when qBittorrent is configured

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-19 21:51:28 +01:00
57bc82703d feat: add Prowlarr integration for manual release search
Add Prowlarr indexer integration (step 1: config + manual search).
Allows searching for comics/ebooks releases on Prowlarr indexers
directly from the series detail page, with download links and
per-volume search for missing books.

- Backend: new prowlarr module with search and test endpoints
- Migration: add prowlarr settings (url, api_key, categories)
- Settings UI: Prowlarr config card with test connection button
- ProwlarrSearchModal: auto-search on open, missing volumes shortcuts
- Fix series.readCount i18n plural parameter on series pages

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-19 21:43:34 +01:00
e6aa7ebed0 chore: bump version to 1.9.2
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 44s
2026-03-19 13:22:41 +01:00
c44b51d6ef fix: unmap status mappings instead of deleting, store unmapped provider statuses
- Make mapped_status nullable so unmapping (X button) sets NULL instead of
  deleting the row — provider statuses never disappear from the UI
- normalize_series_status now returns the raw provider status (lowercased)
  when no mapping exists, so all statuses are stored in series_metadata
- Fix series_statuses query crash caused by NULL mapped_status values
- Fix metadata batch/refresh server actions crashing page on 400 errors
- StatusMappingDto.mapped_status is now string | null in the backoffice

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-19 13:22:31 +01:00
d4c48de780 chore: bump version to 1.9.1 2026-03-19 12:59:31 +01:00
8948f75d62 fix: ignore unknown provider statuses instead of storing them
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 5s
normalize_series_status now returns None when no mapping exists,
so unknown provider statuses won't pollute series_metadata.status.
Users can see unmapped statuses in Settings and assign them before
they get stored.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-19 12:58:55 +01:00
d304877a83 fix: re-normalize series statuses with UI-added mappings
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 6s
Migration 0041 re-applies status normalization using all current
status_mappings entries, including those added via the UI after the
initial migration 0039 (e.g. "one shot" → "ended").

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-19 12:57:14 +01:00
9cec32ba3e fix: normalize series status casing to avoid duplicates
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 6s
- LOWER() all series_metadata.status values in the statuses endpoint
  to prevent "One shot" / "one shot" appearing as separate targets
- Migration 0040: lowercase all existing status values in DB
- Use LOWER() in series status filter queries for consistency

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-19 12:56:02 +01:00
e8768dfad7 chore: bump version to 1.9.0 2026-03-19 12:44:30 +01:00
cfc98819ab feat: add configurable status mappings for metadata providers
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 6s
Add a status_mappings table to replace hardcoded provider status
normalization. Users can now configure how provider statuses (e.g.
"releasing", "finie") map to target statuses (e.g. "ongoing", "ended")
via the Settings > Integrations page.

- Migration 0038: status_mappings table with pre-seeded mappings
- Migration 0039: re-normalize existing series_metadata.status values
- API: CRUD endpoints for status mappings, DB-based normalize function
- API: new GET /series/provider-statuses endpoint
- Backoffice: StatusMappingsCard component with create target, assign,
  and delete capabilities
- Fix all clippy warnings across the API crate
- Fix missing OpenAPI schema refs (MetadataStats, ProviderCount)

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-19 12:44:22 +01:00
bfc1c76fe2 chore: repair deploy
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 32s
2026-03-19 11:19:50 +01:00
39e9f35acb chore: push deploy stack local with dockerhub images
Some checks failed
Deploy with Docker Compose / deploy (push) Failing after 8s
2026-03-19 11:16:29 +01:00
36987f59b9 chore: bump version to 1.8.1 2026-03-19 11:12:06 +01:00
931d0e06f4 feat: redesign search bars with prominent search input and compact filters
Restructure LiveSearchForm: full-width search input with magnifying glass
icon, filters in a compact row below with contextual icons per field
(library, status, sort, etc.) and inline labels. Remove per-field
className overrides from series and books pages.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-19 11:12:00 +01:00
741a4da878 feat: redesign jobs page action bar with grouped layout
Replace flat button row + separate reference card with a single card
organized in 3 visual groups (Indexation, Thumbnails, Metadata).
Each action is a card-like button with inline description.
Destructive actions have distinct warning styling.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-19 11:03:08 +01:00
e28b78d0e6 chore: bump version to 1.8.0 2026-03-19 09:09:27 +01:00
189 changed files with 20175 additions and 4455 deletions

View File

@@ -13,6 +13,12 @@
# Use this token for the first API calls before creating proper API tokens
API_BOOTSTRAP_TOKEN=change-me-in-production
# Backoffice admin credentials (required)
ADMIN_USERNAME=admin
ADMIN_PASSWORD=change-me-in-production
# Secret for signing session JWTs (min 32 chars, required)
SESSION_SECRET=change-me-in-production-use-32-chars-min
# =============================================================================
# Service Configuration
# =============================================================================

View File

@@ -0,0 +1,17 @@
name: Deploy with Docker Compose
on:
push:
branches:
- main # adapte la branche que tu veux déployer
jobs:
deploy:
runs-on: mac-orbstack-runner # le nom que tu as donné au runner
steps:
- name: Deploy stack
env:
DOCKER_BUILDKIT: 1
COMPOSE_DOCKER_CLI_BUILD: 1
run: |
BUILDKIT_PROGRESS=plain cd /Users/julienfroidefond/Sites/docker-stack && docker pull julienfroidefond32/stripstream-backoffice && docker pull julienfroidefond32/stripstream-api && docker pull julienfroidefond32/stripstream-indexer && ./scripts/stack.sh up stripstream

25
Cargo.lock generated
View File

@@ -64,7 +64,7 @@ checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
[[package]]
name = "api"
version = "1.7.0"
version = "2.10.0"
dependencies = [
"anyhow",
"argon2",
@@ -76,6 +76,7 @@ dependencies = [
"image",
"jpeg-decoder",
"lru",
"notifications",
"parsers",
"rand 0.8.5",
"regex",
@@ -1232,7 +1233,7 @@ dependencies = [
[[package]]
name = "indexer"
version = "1.7.0"
version = "2.10.0"
dependencies = [
"anyhow",
"axum",
@@ -1240,6 +1241,7 @@ dependencies = [
"futures",
"image",
"jpeg-decoder",
"notifications",
"num_cpus",
"parsers",
"reqwest",
@@ -1663,6 +1665,19 @@ dependencies = [
"nom",
]
[[package]]
name = "notifications"
version = "2.10.0"
dependencies = [
"anyhow",
"reqwest",
"serde",
"serde_json",
"sqlx",
"tokio",
"tracing",
]
[[package]]
name = "nu-ansi-term"
version = "0.50.3"
@@ -1771,7 +1786,7 @@ dependencies = [
[[package]]
name = "parsers"
version = "1.7.0"
version = "2.10.0"
dependencies = [
"anyhow",
"flate2",
@@ -2270,6 +2285,7 @@ dependencies = [
"base64",
"bytes",
"futures-core",
"futures-util",
"http",
"http-body",
"http-body-util",
@@ -2278,6 +2294,7 @@ dependencies = [
"hyper-util",
"js-sys",
"log",
"mime_guess",
"percent-encoding",
"pin-project-lite",
"quinn",
@@ -2906,7 +2923,7 @@ dependencies = [
[[package]]
name = "stripstream-core"
version = "1.7.0"
version = "2.10.0"
dependencies = [
"anyhow",
"serde",

View File

@@ -3,13 +3,14 @@ members = [
"apps/api",
"apps/indexer",
"crates/core",
"crates/notifications",
"crates/parsers",
]
resolver = "2"
[workspace.package]
edition = "2021"
version = "1.7.0"
version = "2.10.0"
license = "MIT"
[workspace.dependencies]
@@ -22,7 +23,7 @@ image = { version = "0.25", default-features = false, features = ["jpeg", "png",
jpeg-decoder = "0.3"
lru = "0.12"
rayon = "1.10"
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
reqwest = { version = "0.12", default-features = false, features = ["json", "multipart", "rustls-tls"] }
rand = "0.8"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

21
LICENSE Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2026 Julien Froidefond
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -81,28 +81,67 @@ The backoffice will be available at http://localhost:7082
## Features
### Libraries Management
- Create and manage multiple libraries
- Configure automatic scanning schedules (hourly, daily, weekly)
- Real-time file watcher for instant indexing
- Full and incremental rebuild options
> For the full feature list, business rules, and API details, see [docs/FEATURES.md](docs/FEATURES.md).
### Books Management
- Support for CBZ, CBR, and PDF formats
- Automatic metadata extraction
- Series and volume detection
- Full-text search powered by PostgreSQL
### Libraries
- Multi-library management with per-library configuration
- Incremental and full scanning, real-time filesystem watcher
- Per-library metadata provider selection (Google Books, ComicVine, BedéThèque, AniList, Open Library)
### Jobs Monitoring
- Real-time job progress tracking
- Detailed statistics (scanned, indexed, removed, errors)
- Job history and logs
- Cancel pending jobs
### Books & Series
- **Formats**: CBZ, CBR, PDF, EPUB
- Automatic metadata extraction (title, series, volume, authors, page count) from filenames and directory structure
- Series aggregation with missing volume detection
- Thumbnail generation (WebP/JPEG/PNG) with lazy generation and bulk rebuild
- CBR → CBZ conversion
### Search
- Full-text search across titles, authors, and series
- Library filtering
- Real-time suggestions
### Reading Progress
- Per-book tracking: unread / reading / read with current page
- Series-level aggregated reading status
- Bulk mark-as-read for series
### Search & Discovery
- Full-text search across titles, authors, and series (PostgreSQL `pg_trgm`)
- Author listing with book/series counts
- Filtering by reading status, series status, format, metadata provider
### External Metadata
- Search, match, approve/reject workflow with confidence scoring
- Batch auto-matching and scheduled metadata refresh
- Field locking to protect manual edits from sync
### Notifications
- **Telegram**: real-time notifications via Telegram Bot API
- 16 granular event toggles (scans, thumbnails, conversions, metadata, reading status, download detection)
- Book thumbnail images included in notifications where applicable
- Test connection from settings
### External Integrations
- **AniList**: bidirectional reading status sync — pull progress from AniList or push local statuses (PLANNING/CURRENT/COMPLETED) with differential detection and configurable auto-push schedule
- **Komga**: import reading progress
- **Prowlarr**: search for missing volumes manually from series pages, or run a **download detection job** to automatically scan all series with missing volumes and report available releases
- **qBittorrent**: add torrents directly from search results
### Background Jobs
- Rebuild, rescan, thumbnail generation, metadata batch, CBR conversion, AniList reading status sync/push, download detection (Prowlarr)
- Real-time progress via Server-Sent Events (SSE)
- Job history, error tracking, cancellation, replay
### Page Rendering
- On-demand page extraction from all formats
- Image processing (format, quality, max width, resampling filter)
- LRU in-memory + disk cache
### Security
- Token-based auth (`admin` / `read` scopes) with Argon2 hashing
- Rate limiting, token expiration and revocation
### Web UI (Backoffice)
- Dashboard with statistics, interactive charts (recharts), and reading progress
- Currently reading & recently read sections
- Library, book, series, author management
- Live job monitoring, metadata search modals, settings panel
- Notification settings with per-event toggle configuration
## Environment Variables
@@ -249,4 +288,4 @@ volumes:
## License
[Your License Here]
This project is licensed under the [MIT License](LICENSE).

View File

@@ -15,6 +15,7 @@ futures = "0.3"
image.workspace = true
jpeg-decoder.workspace = true
lru.workspace = true
notifications = { path = "../../crates/notifications" }
stripstream-core = { path = "../../crates/core" }
parsers = { path = "../../crates/parsers" }
rand.workspace = true

View File

@@ -6,13 +6,15 @@ COPY Cargo.toml ./
COPY apps/api/Cargo.toml apps/api/Cargo.toml
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
COPY crates/core/Cargo.toml crates/core/Cargo.toml
COPY crates/notifications/Cargo.toml crates/notifications/Cargo.toml
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
RUN mkdir -p apps/api/src apps/indexer/src crates/core/src crates/parsers/src && \
RUN mkdir -p apps/api/src apps/indexer/src crates/core/src crates/notifications/src crates/parsers/src && \
echo "fn main() {}" > apps/api/src/main.rs && \
echo "fn main() {}" > apps/indexer/src/main.rs && \
echo "" > apps/indexer/src/lib.rs && \
echo "" > crates/core/src/lib.rs && \
echo "" > crates/notifications/src/lib.rs && \
echo "" > crates/parsers/src/lib.rs
# Build dependencies only (cached as long as Cargo.toml files don't change)
@@ -26,12 +28,13 @@ RUN --mount=type=cache,target=/usr/local/cargo/registry \
COPY apps/api/src apps/api/src
COPY apps/indexer/src apps/indexer/src
COPY crates/core/src crates/core/src
COPY crates/notifications/src crates/notifications/src
COPY crates/parsers/src crates/parsers/src
RUN --mount=type=cache,target=/usr/local/cargo/registry \
--mount=type=cache,target=/usr/local/cargo/git \
--mount=type=cache,target=/app/target \
touch apps/api/src/main.rs crates/core/src/lib.rs crates/parsers/src/lib.rs && \
touch apps/api/src/main.rs crates/core/src/lib.rs crates/notifications/src/lib.rs crates/parsers/src/lib.rs && \
cargo build --release -p api && \
cp /app/target/release/api /usr/local/bin/api

968
apps/api/src/anilist.rs Normal file
View File

@@ -0,0 +1,968 @@
use axum::extract::{Path, State};
use axum::Json;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use sqlx::Row;
use utoipa::ToSchema;
use uuid::Uuid;
use crate::{error::ApiError, state::AppState};
// ─── AniList API client ───────────────────────────────────────────────────────
const ANILIST_API: &str = "https://graphql.anilist.co";
pub(crate) async fn anilist_graphql(
token: &str,
query: &str,
variables: Value,
) -> Result<Value, ApiError> {
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(15))
.build()
.map_err(|e| ApiError::internal(format!("HTTP client error: {e}")))?;
let body = serde_json::json!({ "query": query, "variables": variables });
let resp = client
.post(ANILIST_API)
.bearer_auth(token)
.header("Content-Type", "application/json")
.header("Accept", "application/json")
.json(&body)
.send()
.await
.map_err(|e| ApiError::internal(format!("AniList request failed: {e}")))?;
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
return Err(ApiError::internal(format!("AniList returned {status}: {text}")));
}
let data: Value = resp
.json()
.await
.map_err(|e| ApiError::internal(format!("Failed to parse AniList response: {e}")))?;
if let Some(errors) = data.get("errors") {
let msg = errors[0]["message"].as_str().unwrap_or("Unknown AniList error");
return Err(ApiError::internal(format!("AniList API error: {msg}")));
}
Ok(data["data"].clone())
}
/// Load AniList settings from DB: (access_token, anilist_user_id, local_user_id)
pub(crate) async fn load_anilist_settings(pool: &sqlx::PgPool) -> Result<(String, Option<i64>, Option<Uuid>), ApiError> {
let row = sqlx::query("SELECT value FROM app_settings WHERE key = 'anilist'")
.fetch_optional(pool)
.await?;
let value: Value = row
.ok_or_else(|| ApiError::bad_request("AniList not configured (missing settings)"))?
.get("value");
let token = value["access_token"]
.as_str()
.filter(|s| !s.is_empty())
.ok_or_else(|| ApiError::bad_request("AniList access token not configured"))?
.to_string();
let user_id = value["user_id"].as_i64();
let local_user_id = value["local_user_id"]
.as_str()
.and_then(|s| Uuid::parse_str(s).ok());
Ok((token, user_id, local_user_id))
}
// ─── Types ────────────────────────────────────────────────────────────────────
#[derive(Serialize, ToSchema)]
pub struct AnilistStatusResponse {
pub connected: bool,
pub user_id: i64,
pub username: String,
pub site_url: String,
}
#[derive(Serialize, Deserialize, ToSchema)]
pub struct AnilistMediaResult {
pub id: i32,
pub title_romaji: Option<String>,
pub title_english: Option<String>,
pub title_native: Option<String>,
pub site_url: String,
pub status: Option<String>,
pub volumes: Option<i32>,
}
#[derive(Serialize, ToSchema)]
pub struct AnilistSeriesLinkResponse {
#[schema(value_type = String)]
pub library_id: Uuid,
pub series_name: String,
pub anilist_id: i32,
pub anilist_title: Option<String>,
pub anilist_url: Option<String>,
pub status: String,
#[schema(value_type = String)]
pub linked_at: DateTime<Utc>,
#[schema(value_type = Option<String>)]
pub synced_at: Option<DateTime<Utc>>,
}
#[derive(Serialize, ToSchema)]
pub struct AnilistSyncPreviewItem {
pub series_name: String,
pub anilist_id: i32,
pub anilist_title: Option<String>,
pub anilist_url: Option<String>,
/// Status that would be sent to AniList: PLANNING | CURRENT | COMPLETED
pub status: String,
pub progress_volumes: i32,
pub books_read: i64,
pub book_count: i64,
}
#[derive(Serialize, ToSchema)]
pub struct AnilistSyncItem {
pub series_name: String,
pub anilist_title: Option<String>,
pub anilist_url: Option<String>,
/// Status sent to AniList: PLANNING | CURRENT | COMPLETED
pub status: String,
pub progress_volumes: i32,
}
#[derive(Serialize, ToSchema)]
pub struct AnilistSyncReport {
pub synced: i32,
pub skipped: i32,
pub errors: Vec<String>,
pub items: Vec<AnilistSyncItem>,
}
#[derive(Serialize, ToSchema)]
pub struct AnilistPullItem {
pub series_name: String,
pub anilist_title: Option<String>,
pub anilist_url: Option<String>,
/// Status received from AniList: COMPLETED | CURRENT | PLANNING | etc.
pub anilist_status: String,
pub books_updated: i32,
}
#[derive(Serialize, ToSchema)]
pub struct AnilistPullReport {
pub updated: i32,
pub skipped: i32,
pub errors: Vec<String>,
pub items: Vec<AnilistPullItem>,
}
#[derive(Deserialize, ToSchema)]
pub struct AnilistSearchRequest {
pub query: String,
}
#[derive(Deserialize, ToSchema)]
pub struct AnilistLinkRequest {
pub anilist_id: i32,
/// Override display title (optional)
pub title: Option<String>,
/// Override URL (optional)
pub url: Option<String>,
}
#[derive(Deserialize, ToSchema)]
pub struct AnilistLibraryToggleRequest {
pub enabled: bool,
}
// ─── Handlers ─────────────────────────────────────────────────────────────────
/// Test AniList connection and return viewer info
#[utoipa::path(
get,
path = "/anilist/status",
tag = "anilist",
responses(
(status = 200, body = AnilistStatusResponse),
(status = 400, description = "AniList not configured"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn get_status(
State(state): State<AppState>,
) -> Result<Json<AnilistStatusResponse>, ApiError> {
let (token, _, _) = load_anilist_settings(&state.pool).await?;
let gql = r#"
query Viewer {
Viewer {
id
name
siteUrl
}
}
"#;
let data = anilist_graphql(&token, gql, serde_json::json!({})).await?;
let viewer = &data["Viewer"];
Ok(Json(AnilistStatusResponse {
connected: true,
user_id: viewer["id"].as_i64().unwrap_or(0),
username: viewer["name"].as_str().unwrap_or("").to_string(),
site_url: viewer["siteUrl"].as_str().unwrap_or("").to_string(),
}))
}
/// Search AniList manga by title
#[utoipa::path(
post,
path = "/anilist/search",
tag = "anilist",
request_body = AnilistSearchRequest,
responses(
(status = 200, body = Vec<AnilistMediaResult>),
(status = 400, description = "AniList not configured"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn search_manga(
State(state): State<AppState>,
Json(body): Json<AnilistSearchRequest>,
) -> Result<Json<Vec<AnilistMediaResult>>, ApiError> {
let (token, _, _) = load_anilist_settings(&state.pool).await?;
let gql = r#"
query SearchManga($search: String) {
Page(perPage: 10) {
media(search: $search, type: MANGA) {
id
title { romaji english native }
siteUrl
status
volumes
}
}
}
"#;
let data = anilist_graphql(
&token,
gql,
serde_json::json!({ "search": body.query }),
)
.await?;
let media = data["Page"]["media"]
.as_array()
.cloned()
.unwrap_or_default();
let results: Vec<AnilistMediaResult> = media
.iter()
.map(|m| AnilistMediaResult {
id: m["id"].as_i64().unwrap_or(0) as i32,
title_romaji: m["title"]["romaji"].as_str().map(String::from),
title_english: m["title"]["english"].as_str().map(String::from),
title_native: m["title"]["native"].as_str().map(String::from),
site_url: m["siteUrl"].as_str().unwrap_or("").to_string(),
status: m["status"].as_str().map(String::from),
volumes: m["volumes"].as_i64().map(|v| v as i32),
})
.collect();
Ok(Json(results))
}
/// Get AniList link for a specific series
#[utoipa::path(
get,
path = "/anilist/series/{library_id}/{series_name}",
tag = "anilist",
params(
("library_id" = String, Path, description = "Library UUID"),
("series_name" = String, Path, description = "Series name"),
),
responses(
(status = 200, body = AnilistSeriesLinkResponse),
(status = 404, description = "No AniList link for this series"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn get_series_link(
State(state): State<AppState>,
Path((library_id, series_name)): Path<(Uuid, String)>,
) -> Result<Json<AnilistSeriesLinkResponse>, ApiError> {
let row = sqlx::query(
"SELECT library_id, series_name, anilist_id, anilist_title, anilist_url, status, linked_at, synced_at
FROM anilist_series_links
WHERE library_id = $1 AND series_name = $2",
)
.bind(library_id)
.bind(&series_name)
.fetch_optional(&state.pool)
.await?;
let row = row.ok_or_else(|| ApiError::not_found("no AniList link for this series"))?;
Ok(Json(AnilistSeriesLinkResponse {
library_id: row.get("library_id"),
series_name: row.get("series_name"),
anilist_id: row.get("anilist_id"),
anilist_title: row.get("anilist_title"),
anilist_url: row.get("anilist_url"),
status: row.get("status"),
linked_at: row.get("linked_at"),
synced_at: row.get("synced_at"),
}))
}
/// Link a series to an AniList media ID
#[utoipa::path(
post,
path = "/anilist/series/{library_id}/{series_name}/link",
tag = "anilist",
params(
("library_id" = String, Path, description = "Library UUID"),
("series_name" = String, Path, description = "Series name"),
),
request_body = AnilistLinkRequest,
responses(
(status = 200, body = AnilistSeriesLinkResponse),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn link_series(
State(state): State<AppState>,
Path((library_id, series_name)): Path<(Uuid, String)>,
Json(body): Json<AnilistLinkRequest>,
) -> Result<Json<AnilistSeriesLinkResponse>, ApiError> {
// Try to fetch title/url from AniList if not provided
let (anilist_title, anilist_url) = if body.title.is_some() && body.url.is_some() {
(body.title, body.url)
} else {
// Fetch from AniList
match load_anilist_settings(&state.pool).await {
Ok((token, _, _)) => {
let gql = r#"
query GetMedia($id: Int) {
Media(id: $id, type: MANGA) {
title { romaji english }
siteUrl
}
}
"#;
match anilist_graphql(&token, gql, serde_json::json!({ "id": body.anilist_id })).await {
Ok(data) => {
let title = data["Media"]["title"]["english"]
.as_str()
.or_else(|| data["Media"]["title"]["romaji"].as_str())
.map(String::from);
let url = data["Media"]["siteUrl"].as_str().map(String::from);
(title, url)
}
Err(_) => (body.title, body.url),
}
}
Err(_) => (body.title, body.url),
}
};
let row = sqlx::query(
r#"
INSERT INTO anilist_series_links (library_id, series_name, provider, anilist_id, anilist_title, anilist_url, status, linked_at)
VALUES ($1, $2, 'anilist', $3, $4, $5, 'linked', NOW())
ON CONFLICT (library_id, series_name, provider) DO UPDATE
SET anilist_id = EXCLUDED.anilist_id,
anilist_title = EXCLUDED.anilist_title,
anilist_url = EXCLUDED.anilist_url,
status = 'linked',
linked_at = NOW(),
synced_at = NULL
RETURNING library_id, series_name, anilist_id, anilist_title, anilist_url, status, linked_at, synced_at
"#,
)
.bind(library_id)
.bind(&series_name)
.bind(body.anilist_id)
.bind(&anilist_title)
.bind(&anilist_url)
.fetch_one(&state.pool)
.await?;
Ok(Json(AnilistSeriesLinkResponse {
library_id: row.get("library_id"),
series_name: row.get("series_name"),
anilist_id: row.get("anilist_id"),
anilist_title: row.get("anilist_title"),
anilist_url: row.get("anilist_url"),
status: row.get("status"),
linked_at: row.get("linked_at"),
synced_at: row.get("synced_at"),
}))
}
/// Remove the AniList link for a series
#[utoipa::path(
delete,
path = "/anilist/series/{library_id}/{series_name}/unlink",
tag = "anilist",
params(
("library_id" = String, Path, description = "Library UUID"),
("series_name" = String, Path, description = "Series name"),
),
responses(
(status = 200, description = "Unlinked"),
(status = 404, description = "Link not found"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn unlink_series(
State(state): State<AppState>,
Path((library_id, series_name)): Path<(Uuid, String)>,
) -> Result<Json<serde_json::Value>, ApiError> {
let result = sqlx::query(
"DELETE FROM anilist_series_links WHERE library_id = $1 AND series_name = $2",
)
.bind(library_id)
.bind(&series_name)
.execute(&state.pool)
.await?;
if result.rows_affected() == 0 {
return Err(ApiError::not_found("AniList link not found"));
}
Ok(Json(serde_json::json!({"unlinked": true})))
}
/// Toggle AniList sync for a library
#[utoipa::path(
patch,
path = "/anilist/libraries/{id}",
tag = "anilist",
params(("id" = String, Path, description = "Library UUID")),
request_body = AnilistLibraryToggleRequest,
responses(
(status = 200, description = "Updated"),
(status = 404, description = "Library not found"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn toggle_library(
State(state): State<AppState>,
Path(library_id): Path<Uuid>,
Json(body): Json<AnilistLibraryToggleRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let provider: Option<&str> = if body.enabled { Some("anilist") } else { None };
let result = sqlx::query("UPDATE libraries SET reading_status_provider = $2 WHERE id = $1")
.bind(library_id)
.bind(provider)
.execute(&state.pool)
.await?;
if result.rows_affected() == 0 {
return Err(ApiError::not_found("library not found"));
}
Ok(Json(serde_json::json!({ "library_id": library_id, "reading_status_provider": provider })))
}
/// List series from AniList-enabled libraries that are not yet linked
#[utoipa::path(
get,
path = "/anilist/unlinked",
tag = "anilist",
responses(
(status = 200, description = "List of unlinked series"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn list_unlinked(
State(state): State<AppState>,
) -> Result<Json<Vec<serde_json::Value>>, ApiError> {
let rows = sqlx::query(
r#"
SELECT
l.id AS library_id,
l.name AS library_name,
COALESCE(NULLIF(b.series, ''), 'unclassified') AS series_name
FROM books b
JOIN libraries l ON l.id = b.library_id
LEFT JOIN anilist_series_links asl
ON asl.library_id = b.library_id
AND asl.series_name = COALESCE(NULLIF(b.series, ''), 'unclassified')
WHERE l.reading_status_provider = 'anilist'
AND asl.library_id IS NULL
GROUP BY l.id, l.name, COALESCE(NULLIF(b.series, ''), 'unclassified')
ORDER BY l.name, series_name
"#,
)
.fetch_all(&state.pool)
.await?;
let items: Vec<serde_json::Value> = rows
.iter()
.map(|row| {
let library_id: Uuid = row.get("library_id");
serde_json::json!({
"library_id": library_id,
"library_name": row.get::<String, _>("library_name"),
"series_name": row.get::<String, _>("series_name"),
})
})
.collect();
Ok(Json(items))
}
/// Preview what would be synced to AniList (dry-run, no writes)
#[utoipa::path(
get,
path = "/anilist/sync/preview",
tag = "anilist",
responses(
(status = 200, body = Vec<AnilistSyncPreviewItem>),
(status = 400, description = "AniList not configured"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn preview_sync(
State(state): State<AppState>,
) -> Result<Json<Vec<AnilistSyncPreviewItem>>, ApiError> {
let (_, _, local_user_id) = load_anilist_settings(&state.pool).await?;
let local_user_id = local_user_id
.ok_or_else(|| ApiError::bad_request("AniList local user not configured — please select a user in settings"))?;
let links = sqlx::query(
r#"
SELECT asl.library_id, asl.series_name, asl.anilist_id, asl.anilist_title, asl.anilist_url
FROM anilist_series_links asl
JOIN libraries l ON l.id = asl.library_id
WHERE l.reading_status_provider = 'anilist'
ORDER BY l.name, asl.series_name
"#,
)
.fetch_all(&state.pool)
.await?;
let mut items: Vec<AnilistSyncPreviewItem> = Vec::new();
for link in &links {
let library_id: Uuid = link.get("library_id");
let series_name: String = link.get("series_name");
let anilist_id: i32 = link.get("anilist_id");
let anilist_title: Option<String> = link.get("anilist_title");
let anilist_url: Option<String> = link.get("anilist_url");
let stats = sqlx::query(
r#"
SELECT
COUNT(*) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read,
(SELECT sm.total_volumes FROM series_metadata sm WHERE sm.library_id = $1 AND sm.name = $2 LIMIT 1) as total_volumes
FROM books b
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND brp.user_id = $3
WHERE b.library_id = $1 AND COALESCE(NULLIF(b.series, ''), 'unclassified') = $2
"#,
)
.bind(library_id)
.bind(&series_name)
.bind(local_user_id)
.fetch_one(&state.pool)
.await;
let (book_count, books_read, total_volumes) = match stats {
Ok(row) => {
let bc: i64 = row.get("book_count");
let br: i64 = row.get("books_read");
let tv: Option<i32> = row.get("total_volumes");
(bc, br, tv)
}
Err(_) => continue,
};
if book_count == 0 {
continue;
}
let (status, progress_volumes) = if books_read > 0 && total_volumes.is_some_and(|tv| books_read >= tv as i64) {
("COMPLETED".to_string(), books_read as i32)
} else if books_read > 0 {
("CURRENT".to_string(), books_read as i32)
} else {
("PLANNING".to_string(), 0i32)
};
items.push(AnilistSyncPreviewItem {
series_name,
anilist_id,
anilist_title,
anilist_url,
status,
progress_volumes,
books_read,
book_count,
});
}
Ok(Json(items))
}
/// Sync local reading progress to AniList for all enabled libraries
#[utoipa::path(
post,
path = "/anilist/sync",
tag = "anilist",
responses(
(status = 200, body = AnilistSyncReport),
(status = 400, description = "AniList not configured"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn sync_to_anilist(
State(state): State<AppState>,
) -> Result<Json<AnilistSyncReport>, ApiError> {
let (token, _, local_user_id) = load_anilist_settings(&state.pool).await?;
let local_user_id = local_user_id
.ok_or_else(|| ApiError::bad_request("AniList local user not configured — please select a user in settings"))?;
// Get all series that have AniList links in enabled libraries
let links = sqlx::query(
r#"
SELECT asl.library_id, asl.series_name, asl.anilist_id, asl.anilist_title, asl.anilist_url
FROM anilist_series_links asl
JOIN libraries l ON l.id = asl.library_id
WHERE l.reading_status_provider = 'anilist'
"#,
)
.fetch_all(&state.pool)
.await?;
let mut synced = 0i32;
let mut skipped = 0i32;
let mut errors: Vec<String> = Vec::new();
let mut items: Vec<AnilistSyncItem> = Vec::new();
let gql_update = r#"
mutation SaveEntry($mediaId: Int, $status: MediaListStatus, $progressVolumes: Int) {
SaveMediaListEntry(mediaId: $mediaId, status: $status, progressVolumes: $progressVolumes) {
id
status
progressVolumes
}
}
"#;
for link in &links {
let library_id: Uuid = link.get("library_id");
let series_name: String = link.get("series_name");
let anilist_id: i32 = link.get("anilist_id");
let anilist_title: Option<String> = link.get("anilist_title");
let anilist_url: Option<String> = link.get("anilist_url");
// Get reading progress + total_volumes from series metadata
let stats = sqlx::query(
r#"
SELECT
COUNT(*) as book_count,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read,
(SELECT sm.total_volumes FROM series_metadata sm WHERE sm.library_id = $1 AND sm.name = $2 LIMIT 1) as total_volumes
FROM books b
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND brp.user_id = $3
WHERE b.library_id = $1 AND COALESCE(NULLIF(b.series, ''), 'unclassified') = $2
"#,
)
.bind(library_id)
.bind(&series_name)
.bind(local_user_id)
.fetch_one(&state.pool)
.await;
let (book_count, books_read, total_volumes) = match stats {
Ok(row) => {
let bc: i64 = row.get("book_count");
let br: i64 = row.get("books_read");
let tv: Option<i32> = row.get("total_volumes");
(bc, br, tv)
}
Err(e) => {
errors.push(format!("{series_name}: DB error: {e}"));
continue;
}
};
// COMPLETED only if books_read reaches the known total_volumes
// — never auto-complete based solely on owned books
let (status, progress_volumes) = if book_count == 0 {
skipped += 1;
continue;
} else if books_read > 0 && total_volumes.is_some_and(|tv| books_read >= tv as i64) {
("COMPLETED", books_read as i32)
} else if books_read > 0 {
("CURRENT", books_read as i32)
} else {
("PLANNING", 0i32)
};
let vars = serde_json::json!({
"mediaId": anilist_id,
"status": status,
"progressVolumes": progress_volumes,
});
match anilist_graphql(&token, gql_update, vars).await {
Ok(_) => {
// Update synced_at
let _ = sqlx::query(
"UPDATE anilist_series_links SET status = 'synced', synced_at = NOW() WHERE library_id = $1 AND series_name = $2",
)
.bind(library_id)
.bind(&series_name)
.execute(&state.pool)
.await;
items.push(AnilistSyncItem {
series_name: series_name.clone(),
anilist_title,
anilist_url,
status: status.to_string(),
progress_volumes,
});
synced += 1;
}
Err(e) => {
let _ = sqlx::query(
"UPDATE anilist_series_links SET status = 'error' WHERE library_id = $1 AND series_name = $2",
)
.bind(library_id)
.bind(&series_name)
.execute(&state.pool)
.await;
errors.push(format!("{series_name}: {}", e.message));
}
}
}
Ok(Json(AnilistSyncReport { synced, skipped, errors, items }))
}
/// Pull reading list from AniList and update local reading progress
#[utoipa::path(
post,
path = "/anilist/pull",
tag = "anilist",
responses(
(status = 200, body = AnilistPullReport),
(status = 400, description = "AniList not configured"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn pull_from_anilist(
State(state): State<AppState>,
) -> Result<Json<AnilistPullReport>, ApiError> {
let (token, user_id, local_user_id) = load_anilist_settings(&state.pool).await?;
let user_id = user_id
.ok_or_else(|| ApiError::bad_request("AniList user_id not configured — please test the connection in settings"))?;
let local_user_id = local_user_id
.ok_or_else(|| ApiError::bad_request("AniList local user not configured — please select a user in settings"))?;
let gql = r#"
query GetUserMangaList($userId: Int) {
MediaListCollection(userId: $userId, type: MANGA) {
lists {
entries {
media { id siteUrl }
status
progressVolumes
}
}
}
}
"#;
let data = anilist_graphql(&token, gql, serde_json::json!({ "userId": user_id })).await?;
let lists = data["MediaListCollection"]["lists"]
.as_array()
.cloned()
.unwrap_or_default();
// Build flat list of (anilist_id, status, progressVolumes)
let mut entries: Vec<(i32, String, i32)> = Vec::new();
for list in &lists {
if let Some(list_entries) = list["entries"].as_array() {
for entry in list_entries {
let media_id = entry["media"]["id"].as_i64().unwrap_or(0) as i32;
let status = entry["status"].as_str().unwrap_or("").to_string();
let progress = entry["progressVolumes"].as_i64().unwrap_or(0) as i32;
entries.push((media_id, status, progress));
}
}
}
// Find local series linked to these anilist IDs (in enabled libraries)
let link_rows = sqlx::query(
r#"
SELECT asl.library_id, asl.series_name, asl.anilist_id, asl.anilist_title, asl.anilist_url
FROM anilist_series_links asl
JOIN libraries l ON l.id = asl.library_id
WHERE l.reading_status_provider = 'anilist'
"#,
)
.fetch_all(&state.pool)
.await?;
// Build map: anilist_id → (library_id, series_name, anilist_title, anilist_url)
let mut link_map: std::collections::HashMap<i32, (Uuid, String, Option<String>, Option<String>)> =
std::collections::HashMap::new();
for row in &link_rows {
let aid: i32 = row.get("anilist_id");
let lib: Uuid = row.get("library_id");
let name: String = row.get("series_name");
let title: Option<String> = row.get("anilist_title");
let url: Option<String> = row.get("anilist_url");
link_map.insert(aid, (lib, name, title, url));
}
let mut updated = 0i32;
let mut skipped = 0i32;
let mut errors: Vec<String> = Vec::new();
let mut items: Vec<AnilistPullItem> = Vec::new();
for (anilist_id, anilist_status, progress_volumes) in &entries {
let Some((library_id, series_name, anilist_title, anilist_url)) = link_map.get(anilist_id) else {
skipped += 1;
continue;
};
// Map AniList status → local reading status
let local_status = match anilist_status.as_str() {
"COMPLETED" => "read",
"CURRENT" | "REPEATING" => "reading",
"PLANNING" | "PAUSED" | "DROPPED" => "unread",
_ => {
skipped += 1;
continue;
}
};
// Get all book IDs for this series, ordered by volume
let book_rows = sqlx::query(
"SELECT id, volume FROM books WHERE library_id = $1 AND COALESCE(NULLIF(series, ''), 'unclassified') = $2 ORDER BY volume NULLS LAST",
)
.bind(library_id)
.bind(series_name)
.fetch_all(&state.pool)
.await;
let book_rows = match book_rows {
Ok(r) => r,
Err(e) => {
errors.push(format!("{series_name}: {e}"));
continue;
}
};
if book_rows.is_empty() {
skipped += 1;
continue;
}
let total_books = book_rows.len() as i32;
let volumes_done = (*progress_volumes).min(total_books);
for (idx, book_row) in book_rows.iter().enumerate() {
let book_id: Uuid = book_row.get("id");
let book_status = if local_status == "read" || (idx as i32) < volumes_done {
"read"
} else if local_status == "reading" && idx as i32 == volumes_done {
"reading"
} else {
"unread"
};
let _ = sqlx::query(
r#"
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
VALUES ($1, $3, $2, NULL, NOW(), NOW())
ON CONFLICT (book_id, user_id) DO UPDATE
SET status = EXCLUDED.status, updated_at = NOW()
WHERE book_reading_progress.status != EXCLUDED.status
"#,
)
.bind(book_id)
.bind(book_status)
.bind(local_user_id)
.execute(&state.pool)
.await;
}
items.push(AnilistPullItem {
series_name: series_name.clone(),
anilist_title: anilist_title.clone(),
anilist_url: anilist_url.clone(),
anilist_status: anilist_status.clone(),
books_updated: total_books,
});
updated += 1;
}
Ok(Json(AnilistPullReport { updated, skipped, errors, items }))
}
/// List all AniList series links
#[utoipa::path(
get,
path = "/anilist/links",
tag = "anilist",
responses(
(status = 200, body = Vec<AnilistSeriesLinkResponse>),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn list_links(
State(state): State<AppState>,
) -> Result<Json<Vec<AnilistSeriesLinkResponse>>, ApiError> {
let rows = sqlx::query(
"SELECT library_id, series_name, anilist_id, anilist_title, anilist_url, status, linked_at, synced_at
FROM anilist_series_links
ORDER BY linked_at DESC",
)
.fetch_all(&state.pool)
.await?;
let links: Vec<AnilistSeriesLinkResponse> = rows
.iter()
.map(|row| AnilistSeriesLinkResponse {
library_id: row.get("library_id"),
series_name: row.get("series_name"),
anilist_id: row.get("anilist_id"),
anilist_title: row.get("anilist_title"),
anilist_url: row.get("anilist_url"),
status: row.get("status"),
linked_at: row.get("linked_at"),
synced_at: row.get("synced_at"),
})
.collect();
Ok(Json(links))
}

View File

@@ -10,10 +10,15 @@ use sqlx::Row;
use crate::{error::ApiError, state::AppState};
#[derive(Clone, Debug)]
pub struct AuthUser {
pub user_id: uuid::Uuid,
}
#[derive(Clone, Debug)]
pub enum Scope {
Admin,
Read,
Read { user_id: uuid::Uuid },
}
pub async fn require_admin(
@@ -40,6 +45,20 @@ pub async fn require_read(
let token = bearer_token(&req).ok_or_else(|| ApiError::unauthorized("missing bearer token"))?;
let scope = authenticate(&state, token).await?;
if let Scope::Read { user_id } = &scope {
req.extensions_mut().insert(AuthUser { user_id: *user_id });
} else if matches!(scope, Scope::Admin) {
// Admin peut s'impersonifier via le header X-As-User
if let Some(as_user_id) = req
.headers()
.get("X-As-User")
.and_then(|v| v.to_str().ok())
.and_then(|v| uuid::Uuid::parse_str(v).ok())
{
req.extensions_mut().insert(AuthUser { user_id: as_user_id });
}
}
req.extensions_mut().insert(scope);
Ok(next.run(req).await)
}
@@ -60,8 +79,7 @@ async fn authenticate(state: &AppState, token: &str) -> Result<Scope, ApiError>
let maybe_row = sqlx::query(
r#"
SELECT id, token_hash, scope
FROM api_tokens
SELECT id, token_hash, scope, user_id FROM api_tokens
WHERE prefix = $1 AND revoked_at IS NULL AND (expires_at IS NULL OR expires_at > NOW())
"#,
)
@@ -88,7 +106,12 @@ async fn authenticate(state: &AppState, token: &str) -> Result<Scope, ApiError>
let scope: String = row.try_get("scope").map_err(|_| ApiError::unauthorized("invalid token"))?;
match scope.as_str() {
"admin" => Ok(Scope::Admin),
"read" => Ok(Scope::Read),
"read" => {
let user_id: uuid::Uuid = row
.try_get("user_id")
.map_err(|_| ApiError::unauthorized("read token missing user_id"))?;
Ok(Scope::Read { user_id })
}
_ => Err(ApiError::unauthorized("invalid token scope")),
}
}

178
apps/api/src/authors.rs Normal file
View File

@@ -0,0 +1,178 @@
use axum::{extract::{Query, State}, Json};
use serde::{Deserialize, Serialize};
use sqlx::Row;
use utoipa::ToSchema;
use crate::{error::ApiError, state::AppState};
#[derive(Deserialize, ToSchema)]
pub struct ListAuthorsQuery {
#[schema(value_type = Option<String>, example = "batman")]
pub q: Option<String>,
#[schema(value_type = Option<i64>, example = 1)]
pub page: Option<i64>,
#[schema(value_type = Option<i64>, example = 20)]
pub limit: Option<i64>,
/// Sort order: "name" (default), "books" (most books first)
#[schema(value_type = Option<String>, example = "books")]
pub sort: Option<String>,
}
#[derive(Serialize, ToSchema)]
pub struct AuthorItem {
pub name: String,
pub book_count: i64,
pub series_count: i64,
}
#[derive(Serialize, ToSchema)]
pub struct AuthorsPageResponse {
pub items: Vec<AuthorItem>,
pub total: i64,
pub page: i64,
pub limit: i64,
}
/// List all unique authors with book/series counts
#[utoipa::path(
get,
path = "/authors",
tag = "authors",
params(
("q" = Option<String>, Query, description = "Search by author name"),
("page" = Option<i64>, Query, description = "Page number (1-based)"),
("limit" = Option<i64>, Query, description = "Items per page (max 100)"),
("sort" = Option<String>, Query, description = "Sort: name (default) or books"),
),
responses(
(status = 200, body = AuthorsPageResponse),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn list_authors(
State(state): State<AppState>,
Query(query): Query<ListAuthorsQuery>,
) -> Result<Json<AuthorsPageResponse>, ApiError> {
let page = query.page.unwrap_or(1).max(1);
let limit = query.limit.unwrap_or(20).clamp(1, 100);
let offset = (page - 1) * limit;
let sort = query.sort.as_deref().unwrap_or("name");
let order_clause = match sort {
"books" => "book_count DESC, name ASC",
_ => "name ASC",
};
let q_pattern = query.q.as_deref()
.filter(|s| !s.trim().is_empty())
.map(|s| format!("%{s}%"));
// Aggregate unique authors from books.authors + books.author + series_metadata.authors
let sql = format!(
r#"
WITH all_authors AS (
SELECT DISTINCT UNNEST(
COALESCE(
NULLIF(authors, '{{}}'),
CASE WHEN author IS NOT NULL AND author != '' THEN ARRAY[author] ELSE ARRAY[]::text[] END
)
) AS name
FROM books
UNION
SELECT DISTINCT UNNEST(authors) AS name
FROM series_metadata
WHERE authors != '{{}}'
),
filtered AS (
SELECT name FROM all_authors
WHERE ($1::text IS NULL OR name ILIKE $1)
),
book_counts AS (
SELECT
f.name AS author_name,
COUNT(DISTINCT b.id) AS book_count
FROM filtered f
LEFT JOIN books b ON (
f.name = ANY(
COALESCE(
NULLIF(b.authors, '{{}}'),
CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END
)
)
)
GROUP BY f.name
),
series_counts AS (
SELECT
f.name AS author_name,
COUNT(DISTINCT (sm.library_id, sm.name)) AS series_count
FROM filtered f
LEFT JOIN series_metadata sm ON (
f.name = ANY(sm.authors) AND sm.authors != '{{}}'
)
GROUP BY f.name
)
SELECT
f.name,
COALESCE(bc.book_count, 0) AS book_count,
COALESCE(sc.series_count, 0) AS series_count
FROM filtered f
LEFT JOIN book_counts bc ON bc.author_name = f.name
LEFT JOIN series_counts sc ON sc.author_name = f.name
ORDER BY {order_clause}
LIMIT $2 OFFSET $3
"#
);
let count_sql = r#"
WITH all_authors AS (
SELECT DISTINCT UNNEST(
COALESCE(
NULLIF(authors, '{}'),
CASE WHEN author IS NOT NULL AND author != '' THEN ARRAY[author] ELSE ARRAY[]::text[] END
)
) AS name
FROM books
UNION
SELECT DISTINCT UNNEST(authors) AS name
FROM series_metadata
WHERE authors != '{}'
)
SELECT COUNT(*) AS total
FROM all_authors
WHERE ($1::text IS NULL OR name ILIKE $1)
"#;
let (rows, count_row) = tokio::join!(
sqlx::query(&sql)
.bind(q_pattern.as_deref())
.bind(limit)
.bind(offset)
.fetch_all(&state.pool),
sqlx::query(count_sql)
.bind(q_pattern.as_deref())
.fetch_one(&state.pool)
);
let rows = rows.map_err(|e| ApiError::internal(format!("authors query failed: {e}")))?;
let total: i64 = count_row
.map_err(|e| ApiError::internal(format!("authors count failed: {e}")))?
.get("total");
let items: Vec<AuthorItem> = rows
.iter()
.map(|r| AuthorItem {
name: r.get("name"),
book_count: r.get("book_count"),
series_count: r.get("series_count"),
})
.collect();
Ok(Json(AuthorsPageResponse {
items,
total,
page,
limit,
}))
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,675 @@
use axum::{extract::State, Json};
use serde::{Deserialize, Serialize};
use sqlx::{PgPool, Row};
use tracing::{info, warn};
use utoipa::ToSchema;
use uuid::Uuid;
use crate::{error::ApiError, prowlarr, state::AppState};
// ---------------------------------------------------------------------------
// DTOs
// ---------------------------------------------------------------------------
#[derive(Deserialize, ToSchema)]
pub struct StartDownloadDetectionRequest {
pub library_id: Option<String>,
}
#[derive(Serialize, ToSchema)]
pub struct DownloadDetectionReportDto {
#[schema(value_type = String)]
pub job_id: Uuid,
pub status: String,
pub total_series: i64,
pub found: i64,
pub not_found: i64,
pub no_missing: i64,
pub no_metadata: i64,
pub errors: i64,
}
#[derive(Serialize, ToSchema)]
pub struct DownloadDetectionResultDto {
#[schema(value_type = String)]
pub id: Uuid,
pub series_name: String,
/// 'found' | 'not_found' | 'no_missing' | 'no_metadata' | 'error'
pub status: String,
pub missing_count: i32,
pub available_releases: Option<Vec<AvailableReleaseDto>>,
pub error_message: Option<String>,
}
#[derive(Serialize, Deserialize, ToSchema)]
pub struct AvailableReleaseDto {
pub title: String,
pub size: i64,
pub download_url: Option<String>,
pub indexer: Option<String>,
pub seeders: Option<i32>,
pub matched_missing_volumes: Vec<i32>,
}
// ---------------------------------------------------------------------------
// POST /download-detection/start
// ---------------------------------------------------------------------------
#[utoipa::path(
post,
path = "/download-detection/start",
tag = "download_detection",
request_body = StartDownloadDetectionRequest,
responses(
(status = 200, description = "Job created"),
(status = 400, description = "Bad request"),
),
security(("Bearer" = []))
)]
pub async fn start_detection(
State(state): State<AppState>,
Json(body): Json<StartDownloadDetectionRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
// All libraries case
if body.library_id.is_none() {
prowlarr::check_prowlarr_configured(&state.pool).await?;
let library_ids: Vec<Uuid> = sqlx::query_scalar(
"SELECT id FROM libraries ORDER BY name"
)
.fetch_all(&state.pool)
.await?;
let mut last_job_id: Option<Uuid> = None;
for library_id in library_ids {
let existing: Option<Uuid> = sqlx::query_scalar(
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'download_detection' AND status IN ('pending', 'running') LIMIT 1",
)
.bind(library_id)
.fetch_optional(&state.pool)
.await?;
if existing.is_some() { continue; }
let job_id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'download_detection', 'running', NOW())",
)
.bind(job_id)
.bind(library_id)
.execute(&state.pool)
.await?;
let pool = state.pool.clone();
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&state.pool)
.await
.ok()
.flatten();
tokio::spawn(async move {
if let Err(e) = process_download_detection(&pool, job_id, library_id).await {
warn!("[DOWNLOAD_DETECTION] job {job_id} failed: {e}");
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
)
.bind(job_id)
.bind(e.to_string())
.execute(&pool)
.await;
notifications::notify(
pool,
notifications::NotificationEvent::DownloadDetectionFailed {
library_name,
error: e.to_string(),
},
);
}
});
last_job_id = Some(job_id);
}
return Ok(Json(serde_json::json!({
"id": last_job_id.map(|id| id.to_string()),
"status": "started",
})));
}
let library_id: Uuid = body
.library_id
.unwrap()
.parse()
.map_err(|_| ApiError::bad_request("invalid library_id"))?;
// Verify library exists
sqlx::query("SELECT id FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found("library not found"))?;
// Verify Prowlarr is configured
prowlarr::check_prowlarr_configured(&state.pool).await?;
// Check no existing running job for this library
let existing: Option<Uuid> = sqlx::query_scalar(
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'download_detection' AND status IN ('pending', 'running') LIMIT 1",
)
.bind(library_id)
.fetch_optional(&state.pool)
.await?;
if let Some(existing_id) = existing {
return Ok(Json(serde_json::json!({
"id": existing_id.to_string(),
"status": "already_running",
})));
}
let job_id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'download_detection', 'running', NOW())",
)
.bind(job_id)
.bind(library_id)
.execute(&state.pool)
.await?;
let pool = state.pool.clone();
let library_name: Option<String> =
sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&state.pool)
.await
.ok()
.flatten();
tokio::spawn(async move {
if let Err(e) = process_download_detection(&pool, job_id, library_id).await {
warn!("[DOWNLOAD_DETECTION] job {job_id} failed: {e}");
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
)
.bind(job_id)
.bind(e.to_string())
.execute(&pool)
.await;
notifications::notify(
pool,
notifications::NotificationEvent::DownloadDetectionFailed {
library_name,
error: e.to_string(),
},
);
}
});
Ok(Json(serde_json::json!({
"id": job_id.to_string(),
"status": "running",
})))
}
// ---------------------------------------------------------------------------
// GET /download-detection/:id/report
// ---------------------------------------------------------------------------
#[utoipa::path(
get,
path = "/download-detection/{id}/report",
tag = "download_detection",
params(("id" = String, Path, description = "Job UUID")),
responses(
(status = 200, body = DownloadDetectionReportDto),
(status = 404, description = "Job not found"),
),
security(("Bearer" = []))
)]
pub async fn get_detection_report(
State(state): State<AppState>,
axum::extract::Path(job_id): axum::extract::Path<Uuid>,
) -> Result<Json<DownloadDetectionReportDto>, ApiError> {
let row = sqlx::query(
"SELECT status, total_files FROM index_jobs WHERE id = $1 AND type = 'download_detection'",
)
.bind(job_id)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found("job not found"))?;
let job_status: String = row.get("status");
let total_files: Option<i32> = row.get("total_files");
let counts = sqlx::query(
"SELECT status, COUNT(*) as cnt FROM download_detection_results WHERE job_id = $1 GROUP BY status",
)
.bind(job_id)
.fetch_all(&state.pool)
.await?;
let mut found = 0i64;
let mut not_found = 0i64;
let mut no_missing = 0i64;
let mut no_metadata = 0i64;
let mut errors = 0i64;
for r in &counts {
let status: String = r.get("status");
let cnt: i64 = r.get("cnt");
match status.as_str() {
"found" => found = cnt,
"not_found" => not_found = cnt,
"no_missing" => no_missing = cnt,
"no_metadata" => no_metadata = cnt,
"error" => errors = cnt,
_ => {}
}
}
Ok(Json(DownloadDetectionReportDto {
job_id,
status: job_status,
total_series: total_files.unwrap_or(0) as i64,
found,
not_found,
no_missing,
no_metadata,
errors,
}))
}
// ---------------------------------------------------------------------------
// GET /download-detection/:id/results
// ---------------------------------------------------------------------------
#[derive(Deserialize)]
pub struct ResultsQuery {
pub status: Option<String>,
}
#[utoipa::path(
get,
path = "/download-detection/{id}/results",
tag = "download_detection",
params(
("id" = String, Path, description = "Job UUID"),
("status" = Option<String>, Query, description = "Filter by status"),
),
responses(
(status = 200, body = Vec<DownloadDetectionResultDto>),
),
security(("Bearer" = []))
)]
pub async fn get_detection_results(
State(state): State<AppState>,
axum::extract::Path(job_id): axum::extract::Path<Uuid>,
axum::extract::Query(query): axum::extract::Query<ResultsQuery>,
) -> Result<Json<Vec<DownloadDetectionResultDto>>, ApiError> {
let rows = if let Some(status_filter) = &query.status {
sqlx::query(
"SELECT id, series_name, status, missing_count, available_releases, error_message
FROM download_detection_results
WHERE job_id = $1 AND status = $2
ORDER BY series_name",
)
.bind(job_id)
.bind(status_filter)
.fetch_all(&state.pool)
.await?
} else {
sqlx::query(
"SELECT id, series_name, status, missing_count, available_releases, error_message
FROM download_detection_results
WHERE job_id = $1
ORDER BY status, series_name",
)
.bind(job_id)
.fetch_all(&state.pool)
.await?
};
let results = rows
.iter()
.map(|row| {
let releases_json: Option<serde_json::Value> = row.get("available_releases");
let available_releases = releases_json.and_then(|v| {
serde_json::from_value::<Vec<AvailableReleaseDto>>(v).ok()
});
DownloadDetectionResultDto {
id: row.get("id"),
series_name: row.get("series_name"),
status: row.get("status"),
missing_count: row.get("missing_count"),
available_releases,
error_message: row.get("error_message"),
}
})
.collect();
Ok(Json(results))
}
// ---------------------------------------------------------------------------
// Background processing
// ---------------------------------------------------------------------------
pub(crate) async fn process_download_detection(
pool: &PgPool,
job_id: Uuid,
library_id: Uuid,
) -> Result<(i32, i64), String> {
let (prowlarr_url, prowlarr_api_key, categories) =
prowlarr::load_prowlarr_config_internal(pool)
.await
.map_err(|e| e.message)?;
// Fetch all series with their metadata link status
let all_series: Vec<String> = sqlx::query_scalar(
r#"
SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')
FROM books
WHERE library_id = $1
ORDER BY 1
"#,
)
.bind(library_id)
.fetch_all(pool)
.await
.map_err(|e| e.to_string())?;
let total = all_series.len() as i32;
sqlx::query("UPDATE index_jobs SET total_files = $2 WHERE id = $1")
.bind(job_id)
.bind(total)
.execute(pool)
.await
.map_err(|e| e.to_string())?;
// Fetch approved metadata links for this library (series_name -> link_id)
let links: Vec<(String, Uuid)> = sqlx::query(
"SELECT series_name, id FROM external_metadata_links WHERE library_id = $1 AND status = 'approved'",
)
.bind(library_id)
.fetch_all(pool)
.await
.map_err(|e| e.to_string())?
.into_iter()
.map(|row| {
let series_name: String = row.get("series_name");
let link_id: Uuid = row.get("id");
(series_name, link_id)
})
.collect();
let link_map: std::collections::HashMap<String, Uuid> = links.into_iter().collect();
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
let mut processed = 0i32;
for series_name in &all_series {
if is_job_cancelled(pool, job_id).await {
sqlx::query(
"UPDATE index_jobs SET status = 'cancelled', finished_at = NOW() WHERE id = $1",
)
.bind(job_id)
.execute(pool)
.await
.map_err(|e| e.to_string())?;
return Ok((total, 0));
}
processed += 1;
let progress = (processed * 100 / total.max(1)).min(100);
sqlx::query(
"UPDATE index_jobs SET processed_files = $2, progress_percent = $3, current_file = $4 WHERE id = $1",
)
.bind(job_id)
.bind(processed)
.bind(progress)
.bind(series_name)
.execute(pool)
.await
.ok();
// Skip unclassified
if series_name == "unclassified" {
insert_result(pool, job_id, library_id, series_name, "no_metadata", 0, None, None).await;
continue;
}
// Check if this series has an approved metadata link
let link_id = match link_map.get(series_name) {
Some(id) => *id,
None => {
insert_result(pool, job_id, library_id, series_name, "no_metadata", 0, None, None).await;
continue;
}
};
// Fetch missing books for this series
let missing_rows = sqlx::query(
"SELECT volume_number FROM external_book_metadata WHERE link_id = $1 AND book_id IS NULL ORDER BY volume_number NULLS LAST",
)
.bind(link_id)
.fetch_all(pool)
.await
.map_err(|e| e.to_string())?;
if missing_rows.is_empty() {
insert_result(pool, job_id, library_id, series_name, "no_missing", 0, None, None).await;
continue;
}
let missing_volumes: Vec<i32> = missing_rows
.iter()
.filter_map(|row| row.get::<Option<i32>, _>("volume_number"))
.collect();
let missing_count = missing_rows.len() as i32;
// Search Prowlarr
match search_prowlarr_for_series(
&client,
&prowlarr_url,
&prowlarr_api_key,
&categories,
series_name,
&missing_volumes,
)
.await
{
Ok(matched_releases) if !matched_releases.is_empty() => {
let releases_json = serde_json::to_value(&matched_releases).ok();
insert_result(
pool,
job_id,
library_id,
series_name,
"found",
missing_count,
releases_json,
None,
)
.await;
}
Ok(_) => {
insert_result(pool, job_id, library_id, series_name, "not_found", missing_count, None, None).await;
}
Err(e) => {
warn!("[DOWNLOAD_DETECTION] series '{series_name}': {e}");
insert_result(pool, job_id, library_id, series_name, "error", missing_count, None, Some(&e)).await;
}
}
}
// Build final stats
let counts = sqlx::query(
"SELECT status, COUNT(*) as cnt FROM download_detection_results WHERE job_id = $1 GROUP BY status",
)
.bind(job_id)
.fetch_all(pool)
.await
.map_err(|e| e.to_string())?;
let mut count_found = 0i64;
let mut count_not_found = 0i64;
let mut count_no_missing = 0i64;
let mut count_no_metadata = 0i64;
let mut count_errors = 0i64;
for row in &counts {
let s: String = row.get("status");
let c: i64 = row.get("cnt");
match s.as_str() {
"found" => count_found = c,
"not_found" => count_not_found = c,
"no_missing" => count_no_missing = c,
"no_metadata" => count_no_metadata = c,
"error" => count_errors = c,
_ => {}
}
}
let stats = serde_json::json!({
"total_series": total as i64,
"found": count_found,
"not_found": count_not_found,
"no_missing": count_no_missing,
"no_metadata": count_no_metadata,
"errors": count_errors,
});
sqlx::query(
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), stats_json = $2, progress_percent = 100 WHERE id = $1",
)
.bind(job_id)
.bind(&stats)
.execute(pool)
.await
.map_err(|e| e.to_string())?;
info!(
"[DOWNLOAD_DETECTION] job={job_id} completed: {total} series, found={count_found}, not_found={count_not_found}, no_missing={count_no_missing}, no_metadata={count_no_metadata}, errors={count_errors}"
);
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(pool)
.await
.ok()
.flatten();
notifications::notify(
pool.clone(),
notifications::NotificationEvent::DownloadDetectionCompleted {
library_name,
total_series: total,
found: count_found,
},
);
Ok((total, count_found))
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
async fn search_prowlarr_for_series(
client: &reqwest::Client,
url: &str,
api_key: &str,
categories: &[i32],
series_name: &str,
missing_volumes: &[i32],
) -> Result<Vec<AvailableReleaseDto>, String> {
let query = format!("\"{}\"", series_name);
let mut params: Vec<(&str, String)> = vec![
("query", query),
("type", "search".to_string()),
];
for cat in categories {
params.push(("categories", cat.to_string()));
}
let resp = client
.get(format!("{url}/api/v1/search"))
.query(&params)
.header("X-Api-Key", api_key)
.send()
.await
.map_err(|e| format!("Prowlarr request failed: {e}"))?;
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
return Err(format!("Prowlarr returned {status}: {text}"));
}
let raw_releases: Vec<prowlarr::ProwlarrRawRelease> = resp
.json()
.await
.map_err(|e| format!("Failed to parse Prowlarr response: {e}"))?;
let matched: Vec<AvailableReleaseDto> = raw_releases
.into_iter()
.filter_map(|r| {
let title_volumes = prowlarr::extract_volumes_from_title_pub(&r.title);
let matched_vols: Vec<i32> = title_volumes
.into_iter()
.filter(|v| missing_volumes.contains(v))
.collect();
if matched_vols.is_empty() {
None
} else {
Some(AvailableReleaseDto {
title: r.title,
size: r.size,
download_url: r.download_url,
indexer: r.indexer,
seeders: r.seeders,
matched_missing_volumes: matched_vols,
})
}
})
.collect();
Ok(matched)
}
#[allow(clippy::too_many_arguments)]
async fn insert_result(
pool: &PgPool,
job_id: Uuid,
library_id: Uuid,
series_name: &str,
status: &str,
missing_count: i32,
available_releases: Option<serde_json::Value>,
error_message: Option<&str>,
) {
let _ = sqlx::query(
r#"
INSERT INTO download_detection_results
(job_id, library_id, series_name, status, missing_count, available_releases, error_message)
VALUES ($1, $2, $3, $4, $5, $6, $7)
"#,
)
.bind(job_id)
.bind(library_id)
.bind(series_name)
.bind(status)
.bind(missing_count)
.bind(&available_releases)
.bind(error_message)
.execute(pool)
.await;
}
async fn is_job_cancelled(pool: &PgPool, job_id: Uuid) -> bool {
sqlx::query_scalar::<_, String>("SELECT status FROM index_jobs WHERE id = $1")
.bind(job_id)
.fetch_optional(pool)
.await
.ok()
.flatten()
.as_deref()
== Some("cancelled")
}

View File

@@ -16,6 +16,10 @@ pub struct RebuildRequest {
pub library_id: Option<Uuid>,
#[schema(value_type = Option<bool>, example = false)]
pub full: Option<bool>,
/// Deep rescan: clears directory mtimes to force re-walking all directories,
/// discovering newly supported formats without deleting existing data.
#[schema(value_type = Option<bool>, example = false)]
pub rescan: Option<bool>,
}
#[derive(Serialize, ToSchema)]
@@ -117,9 +121,38 @@ pub async fn enqueue_rebuild(
) -> Result<Json<IndexJobResponse>, ApiError> {
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
let is_full = payload.as_ref().and_then(|p| p.0.full).unwrap_or(false);
let job_type = if is_full { "full_rebuild" } else { "rebuild" };
let id = Uuid::new_v4();
let is_rescan = payload.as_ref().and_then(|p| p.0.rescan).unwrap_or(false);
let job_type = if is_full { "full_rebuild" } else if is_rescan { "rescan" } else { "rebuild" };
// When no library specified, create one job per library
if library_id.is_none() {
let library_ids: Vec<Uuid> = sqlx::query_scalar("SELECT id FROM libraries ORDER BY name")
.fetch_all(&state.pool)
.await?;
let mut last_id: Option<Uuid> = None;
for lib_id in library_ids {
let id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, $3, 'pending')",
)
.bind(id)
.bind(lib_id)
.bind(job_type)
.execute(&state.pool)
.await?;
last_id = Some(id);
}
let last_id = last_id.ok_or_else(|| ApiError::bad_request("No libraries found"))?;
let row = sqlx::query(
"SELECT id, library_id, book_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs WHERE id = $1",
)
.bind(last_id)
.fetch_one(&state.pool)
.await?;
return Ok(Json(map_row(row)));
}
let id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, $3, 'pending')",
)

169
apps/api/src/job_poller.rs Normal file
View File

@@ -0,0 +1,169 @@
use std::time::Duration;
use sqlx::{PgPool, Row};
use tracing::{error, info, trace};
use uuid::Uuid;
use crate::{download_detection, metadata_batch, metadata_refresh, reading_status_push};
/// Poll for pending API-only jobs (`metadata_batch`, `metadata_refresh`) and process them.
/// This mirrors the indexer's worker loop but for job types handled by the API.
pub async fn run_job_poller(pool: PgPool, interval_seconds: u64) {
let wait = Duration::from_secs(interval_seconds.max(1));
loop {
match claim_next_api_job(&pool).await {
Ok(Some((job_id, job_type, library_id))) => {
info!("[JOB_POLLER] Claimed {job_type} job {job_id} library={library_id}");
let pool_clone = pool.clone();
let library_name: Option<String> =
sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&pool)
.await
.ok()
.flatten();
tokio::spawn(async move {
let result = match job_type.as_str() {
"metadata_refresh" => {
metadata_refresh::process_metadata_refresh(
&pool_clone,
job_id,
library_id,
)
.await
}
"metadata_batch" => {
metadata_batch::process_metadata_batch(
&pool_clone,
job_id,
library_id,
)
.await
}
"reading_status_push" => {
reading_status_push::process_reading_status_push(
&pool_clone,
job_id,
library_id,
)
.await
}
"download_detection" => {
download_detection::process_download_detection(
&pool_clone,
job_id,
library_id,
)
.await
.map(|_| ())
}
_ => Err(format!("Unknown API job type: {job_type}")),
};
if let Err(e) = result {
error!("[JOB_POLLER] {job_type} job {job_id} failed: {e}");
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
)
.bind(job_id)
.bind(e.to_string())
.execute(&pool_clone)
.await;
match job_type.as_str() {
"metadata_refresh" => {
notifications::notify(
pool_clone,
notifications::NotificationEvent::MetadataRefreshFailed {
library_name,
error: e.to_string(),
},
);
}
"metadata_batch" => {
notifications::notify(
pool_clone,
notifications::NotificationEvent::MetadataBatchFailed {
library_name,
error: e.to_string(),
},
);
}
"reading_status_push" => {
notifications::notify(
pool_clone,
notifications::NotificationEvent::ReadingStatusPushFailed {
library_name,
error: e.to_string(),
},
);
}
"download_detection" => {
notifications::notify(
pool_clone,
notifications::NotificationEvent::DownloadDetectionFailed {
library_name,
error: e.to_string(),
},
);
}
_ => {}
}
}
});
}
Ok(None) => {
trace!("[JOB_POLLER] No pending API jobs, waiting...");
tokio::time::sleep(wait).await;
}
Err(err) => {
error!("[JOB_POLLER] Error claiming job: {err}");
tokio::time::sleep(wait).await;
}
}
}
}
const API_JOB_TYPES: &[&str] = &["metadata_batch", "metadata_refresh", "reading_status_push", "download_detection"];
async fn claim_next_api_job(pool: &PgPool) -> Result<Option<(Uuid, String, Uuid)>, sqlx::Error> {
let mut tx = pool.begin().await?;
let row = sqlx::query(
r#"
SELECT id, type, library_id
FROM index_jobs
WHERE status = 'pending'
AND type = ANY($1)
AND library_id IS NOT NULL
ORDER BY created_at ASC
FOR UPDATE SKIP LOCKED
LIMIT 1
"#,
)
.bind(API_JOB_TYPES)
.fetch_optional(&mut *tx)
.await?;
let Some(row) = row else {
tx.commit().await?;
return Ok(None);
};
let id: Uuid = row.get("id");
let job_type: String = row.get("type");
let library_id: Uuid = row.get("library_id");
sqlx::query(
"UPDATE index_jobs SET status = 'running', started_at = NOW(), error_opt = NULL WHERE id = $1",
)
.bind(id)
.execute(&mut *tx)
.await?;
tx.commit().await?;
Ok(Some((id, job_type, library_id)))
}

View File

@@ -38,6 +38,8 @@ pub struct KomgaSyncRequest {
pub url: String,
pub username: String,
pub password: String,
#[schema(value_type = String)]
pub user_id: Uuid,
}
#[derive(Serialize, ToSchema)]
@@ -45,6 +47,8 @@ pub struct KomgaSyncResponse {
#[schema(value_type = String)]
pub id: Uuid,
pub komga_url: String,
#[schema(value_type = Option<String>)]
pub user_id: Option<Uuid>,
pub total_komga_read: i64,
pub matched: i64,
pub already_read: i64,
@@ -61,6 +65,8 @@ pub struct KomgaSyncReportSummary {
#[schema(value_type = String)]
pub id: Uuid,
pub komga_url: String,
#[schema(value_type = Option<String>)]
pub user_id: Option<Uuid>,
pub total_komga_read: i64,
pub matched: i64,
pub already_read: i64,
@@ -154,10 +160,11 @@ pub async fn sync_komga_read_books(
.fetch_all(&state.pool)
.await?;
type BookEntry = (Uuid, String, String);
// Primary: (series_lower, title_lower) -> Vec<(Uuid, title, series)>
let mut primary_map: HashMap<(String, String), Vec<(Uuid, String, String)>> = HashMap::new();
let mut primary_map: HashMap<(String, String), Vec<BookEntry>> = HashMap::new();
// Secondary: title_lower -> Vec<(Uuid, title, series)>
let mut secondary_map: HashMap<String, Vec<(Uuid, String, String)>> = HashMap::new();
let mut secondary_map: HashMap<String, Vec<BookEntry>> = HashMap::new();
for row in &rows {
let id: Uuid = row.get("id");
@@ -214,11 +221,12 @@ pub async fn sync_komga_read_books(
let mut already_read_ids: std::collections::HashSet<Uuid> = std::collections::HashSet::new();
if !matched_ids.is_empty() {
// Get already-read book IDs
// Get already-read book IDs for this user
let ar_rows = sqlx::query(
"SELECT book_id FROM book_reading_progress WHERE book_id = ANY($1) AND status = 'read'",
"SELECT book_id FROM book_reading_progress WHERE book_id = ANY($1) AND user_id = $2 AND status = 'read'",
)
.bind(&matched_ids)
.bind(body.user_id)
.fetch_all(&state.pool)
.await?;
@@ -227,12 +235,12 @@ pub async fn sync_komga_read_books(
}
already_read = already_read_ids.len() as i64;
// Bulk upsert all matched books as read
// Bulk upsert all matched books as read for this user
sqlx::query(
r#"
INSERT INTO book_reading_progress (book_id, status, current_page, last_read_at, updated_at)
SELECT unnest($1::uuid[]), 'read', NULL, NOW(), NOW()
ON CONFLICT (book_id) DO UPDATE
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
SELECT unnest($1::uuid[]), $2, 'read', NULL, NOW(), NOW()
ON CONFLICT (book_id, user_id) DO UPDATE
SET status = 'read',
current_page = NULL,
last_read_at = NOW(),
@@ -241,6 +249,7 @@ pub async fn sync_komga_read_books(
"#,
)
.bind(&matched_ids)
.bind(body.user_id)
.execute(&state.pool)
.await?;
}
@@ -272,12 +281,13 @@ pub async fn sync_komga_read_books(
let newly_marked_books_json = serde_json::to_value(&newly_marked_books).unwrap_or_default();
let report_row = sqlx::query(
r#"
INSERT INTO komga_sync_reports (komga_url, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
INSERT INTO komga_sync_reports (komga_url, user_id, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
RETURNING id, created_at
"#,
)
.bind(&url)
.bind(body.user_id)
.bind(total_komga_read)
.bind(matched)
.bind(already_read)
@@ -291,6 +301,7 @@ pub async fn sync_komga_read_books(
Ok(Json(KomgaSyncResponse {
id: report_row.get("id"),
komga_url: url,
user_id: Some(body.user_id),
total_komga_read,
matched,
already_read,
@@ -318,7 +329,7 @@ pub async fn list_sync_reports(
) -> Result<Json<Vec<KomgaSyncReportSummary>>, ApiError> {
let rows = sqlx::query(
r#"
SELECT id, komga_url, total_komga_read, matched, already_read, newly_marked,
SELECT id, komga_url, user_id, total_komga_read, matched, already_read, newly_marked,
jsonb_array_length(unmatched) as unmatched_count, created_at
FROM komga_sync_reports
ORDER BY created_at DESC
@@ -333,6 +344,7 @@ pub async fn list_sync_reports(
.map(|row| KomgaSyncReportSummary {
id: row.get("id"),
komga_url: row.get("komga_url"),
user_id: row.get("user_id"),
total_komga_read: row.get("total_komga_read"),
matched: row.get("matched"),
already_read: row.get("already_read"),
@@ -364,7 +376,7 @@ pub async fn get_sync_report(
) -> Result<Json<KomgaSyncResponse>, ApiError> {
let row = sqlx::query(
r#"
SELECT id, komga_url, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched, created_at
SELECT id, komga_url, user_id, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched, created_at
FROM komga_sync_reports
WHERE id = $1
"#,
@@ -385,6 +397,7 @@ pub async fn get_sync_report(
Ok(Json(KomgaSyncResponse {
id: row.get("id"),
komga_url: row.get("komga_url"),
user_id: row.get("user_id"),
total_komga_read: row.get("total_komga_read"),
matched: row.get("matched"),
already_read: row.get("already_read"),

View File

@@ -23,6 +23,20 @@ pub struct LibraryResponse {
pub watcher_enabled: bool,
pub metadata_provider: Option<String>,
pub fallback_metadata_provider: Option<String>,
pub metadata_refresh_mode: String,
#[schema(value_type = Option<String>)]
pub next_metadata_refresh_at: Option<chrono::DateTime<chrono::Utc>>,
pub series_count: i64,
/// First book IDs from up to 5 distinct series (for thumbnail fan display)
#[schema(value_type = Vec<String>)]
pub thumbnail_book_ids: Vec<Uuid>,
pub reading_status_provider: Option<String>,
pub reading_status_push_mode: String,
#[schema(value_type = Option<String>)]
pub next_reading_status_push_at: Option<chrono::DateTime<chrono::Utc>>,
pub download_detection_mode: String,
#[schema(value_type = Option<String>)]
pub next_download_detection_at: Option<chrono::DateTime<chrono::Utc>>,
}
#[derive(Deserialize, ToSchema)]
@@ -41,14 +55,27 @@ pub struct CreateLibraryRequest {
responses(
(status = 200, body = Vec<LibraryResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden - Admin scope required"),
),
security(("Bearer" = []))
)]
pub async fn list_libraries(State(state): State<AppState>) -> Result<Json<Vec<LibraryResponse>>, ApiError> {
let rows = sqlx::query(
"SELECT l.id, l.name, l.root_path, l.enabled, l.monitor_enabled, l.scan_mode, l.next_scan_at, l.watcher_enabled, l.metadata_provider, l.fallback_metadata_provider,
(SELECT COUNT(*) FROM books b WHERE b.library_id = l.id) as book_count
"SELECT l.id, l.name, l.root_path, l.enabled, l.monitor_enabled, l.scan_mode, l.next_scan_at, l.watcher_enabled, l.metadata_provider, l.fallback_metadata_provider, l.metadata_refresh_mode, l.next_metadata_refresh_at, l.reading_status_provider, l.reading_status_push_mode, l.next_reading_status_push_at, l.download_detection_mode, l.next_download_detection_at,
(SELECT COUNT(*) FROM books b WHERE b.library_id = l.id) as book_count,
(SELECT COUNT(DISTINCT COALESCE(NULLIF(b.series, ''), 'unclassified')) FROM books b WHERE b.library_id = l.id) as series_count,
COALESCE((
SELECT ARRAY_AGG(first_id ORDER BY series_name)
FROM (
SELECT DISTINCT ON (COALESCE(NULLIF(b.series, ''), 'unclassified'))
COALESCE(NULLIF(b.series, ''), 'unclassified') as series_name,
b.id as first_id
FROM books b
WHERE b.library_id = l.id
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'),
b.volume NULLS LAST, b.title ASC
LIMIT 5
) sub
), ARRAY[]::uuid[]) as thumbnail_book_ids
FROM libraries l ORDER BY l.created_at DESC"
)
.fetch_all(&state.pool)
@@ -62,12 +89,21 @@ pub async fn list_libraries(State(state): State<AppState>) -> Result<Json<Vec<Li
root_path: row.get("root_path"),
enabled: row.get("enabled"),
book_count: row.get("book_count"),
series_count: row.get("series_count"),
monitor_enabled: row.get("monitor_enabled"),
scan_mode: row.get("scan_mode"),
next_scan_at: row.get("next_scan_at"),
watcher_enabled: row.get("watcher_enabled"),
metadata_provider: row.get("metadata_provider"),
fallback_metadata_provider: row.get("fallback_metadata_provider"),
metadata_refresh_mode: row.get("metadata_refresh_mode"),
next_metadata_refresh_at: row.get("next_metadata_refresh_at"),
thumbnail_book_ids: row.get("thumbnail_book_ids"),
reading_status_provider: row.get("reading_status_provider"),
reading_status_push_mode: row.get("reading_status_push_mode"),
next_reading_status_push_at: row.get("next_reading_status_push_at"),
download_detection_mode: row.get("download_detection_mode"),
next_download_detection_at: row.get("next_download_detection_at"),
})
.collect();
@@ -115,12 +151,21 @@ pub async fn create_library(
root_path,
enabled: true,
book_count: 0,
series_count: 0,
monitor_enabled: false,
scan_mode: "manual".to_string(),
next_scan_at: None,
watcher_enabled: false,
metadata_provider: None,
fallback_metadata_provider: None,
metadata_refresh_mode: "manual".to_string(),
next_metadata_refresh_at: None,
thumbnail_book_ids: vec![],
reading_status_provider: None,
reading_status_push_mode: "manual".to_string(),
next_reading_status_push_at: None,
download_detection_mode: "manual".to_string(),
next_download_detection_at: None,
}))
}
@@ -192,7 +237,6 @@ use crate::index_jobs::{IndexJobResponse, RebuildRequest};
(status = 200, body = IndexJobResponse),
(status = 404, description = "Library not found"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden - Admin scope required"),
),
security(("Bearer" = []))
)]
@@ -212,7 +256,8 @@ pub async fn scan_library(
}
let is_full = payload.as_ref().and_then(|p| p.full).unwrap_or(false);
let job_type = if is_full { "full_rebuild" } else { "rebuild" };
let is_rescan = payload.as_ref().and_then(|p| p.rescan).unwrap_or(false);
let job_type = if is_full { "full_rebuild" } else if is_rescan { "rescan" } else { "rebuild" };
// Create indexing job for this library
let job_id = Uuid::new_v4();
@@ -241,6 +286,10 @@ pub struct UpdateMonitoringRequest {
#[schema(value_type = String, example = "hourly")]
pub scan_mode: String, // 'manual', 'hourly', 'daily', 'weekly'
pub watcher_enabled: Option<bool>,
#[schema(value_type = Option<String>, example = "daily")]
pub metadata_refresh_mode: Option<String>, // 'manual', 'hourly', 'daily', 'weekly'
#[schema(value_type = Option<String>, example = "daily")]
pub download_detection_mode: Option<String>, // 'manual', 'hourly', 'daily', 'weekly'
}
/// Update monitoring settings for a library
@@ -271,6 +320,18 @@ pub async fn update_monitoring(
return Err(ApiError::bad_request("scan_mode must be one of: manual, hourly, daily, weekly"));
}
// Validate metadata_refresh_mode
let metadata_refresh_mode = input.metadata_refresh_mode.as_deref().unwrap_or("manual");
if !valid_modes.contains(&metadata_refresh_mode) {
return Err(ApiError::bad_request("metadata_refresh_mode must be one of: manual, hourly, daily, weekly"));
}
// Validate download_detection_mode
let download_detection_mode = input.download_detection_mode.as_deref().unwrap_or("manual");
if !valid_modes.contains(&download_detection_mode) {
return Err(ApiError::bad_request("download_detection_mode must be one of: manual, hourly, daily, weekly"));
}
// Calculate next_scan_at if monitoring is enabled
let next_scan_at = if input.monitor_enabled {
let interval_minutes = match input.scan_mode.as_str() {
@@ -284,16 +345,46 @@ pub async fn update_monitoring(
None
};
// Calculate next_metadata_refresh_at
let next_metadata_refresh_at = if metadata_refresh_mode != "manual" {
let interval_minutes = match metadata_refresh_mode {
"hourly" => 60,
"daily" => 1440,
"weekly" => 10080,
_ => 1440,
};
Some(chrono::Utc::now() + chrono::Duration::minutes(interval_minutes))
} else {
None
};
// Calculate next_download_detection_at
let next_download_detection_at = if download_detection_mode != "manual" {
let interval_minutes = match download_detection_mode {
"hourly" => 60,
"daily" => 1440,
"weekly" => 10080,
_ => 1440,
};
Some(chrono::Utc::now() + chrono::Duration::minutes(interval_minutes))
} else {
None
};
let watcher_enabled = input.watcher_enabled.unwrap_or(false);
let result = sqlx::query(
"UPDATE libraries SET monitor_enabled = $2, scan_mode = $3, next_scan_at = $4, watcher_enabled = $5 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled, metadata_provider, fallback_metadata_provider"
"UPDATE libraries SET monitor_enabled = $2, scan_mode = $3, next_scan_at = $4, watcher_enabled = $5, metadata_refresh_mode = $6, next_metadata_refresh_at = $7, download_detection_mode = $8, next_download_detection_at = $9 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled, metadata_provider, fallback_metadata_provider, metadata_refresh_mode, next_metadata_refresh_at, reading_status_provider, reading_status_push_mode, next_reading_status_push_at, download_detection_mode, next_download_detection_at"
)
.bind(library_id)
.bind(input.monitor_enabled)
.bind(input.scan_mode)
.bind(next_scan_at)
.bind(watcher_enabled)
.bind(metadata_refresh_mode)
.bind(next_metadata_refresh_at)
.bind(download_detection_mode)
.bind(next_download_detection_at)
.fetch_optional(&state.pool)
.await?;
@@ -306,18 +397,43 @@ pub async fn update_monitoring(
.fetch_one(&state.pool)
.await?;
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')) FROM books WHERE library_id = $1")
.bind(library_id)
.fetch_one(&state.pool)
.await?;
let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar(
"SELECT b.id FROM books b
WHERE b.library_id = $1
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'), b.volume NULLS LAST, b.title ASC
LIMIT 5"
)
.bind(library_id)
.fetch_all(&state.pool)
.await
.unwrap_or_default();
Ok(Json(LibraryResponse {
id: row.get("id"),
name: row.get("name"),
root_path: row.get("root_path"),
enabled: row.get("enabled"),
book_count,
series_count,
monitor_enabled: row.get("monitor_enabled"),
scan_mode: row.get("scan_mode"),
next_scan_at: row.get("next_scan_at"),
watcher_enabled: row.get("watcher_enabled"),
metadata_provider: row.get("metadata_provider"),
fallback_metadata_provider: row.get("fallback_metadata_provider"),
metadata_refresh_mode: row.get("metadata_refresh_mode"),
next_metadata_refresh_at: row.get("next_metadata_refresh_at"),
thumbnail_book_ids,
reading_status_provider: row.get("reading_status_provider"),
reading_status_push_mode: row.get("reading_status_push_mode"),
next_reading_status_push_at: row.get("next_reading_status_push_at"),
download_detection_mode: row.get("download_detection_mode"),
next_download_detection_at: row.get("next_download_detection_at"),
}))
}
@@ -353,7 +469,7 @@ pub async fn update_metadata_provider(
let fallback = input.fallback_metadata_provider.as_deref().filter(|s| !s.is_empty());
let result = sqlx::query(
"UPDATE libraries SET metadata_provider = $2, fallback_metadata_provider = $3 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled, metadata_provider, fallback_metadata_provider"
"UPDATE libraries SET metadata_provider = $2, fallback_metadata_provider = $3 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled, metadata_provider, fallback_metadata_provider, metadata_refresh_mode, next_metadata_refresh_at, reading_status_provider, reading_status_push_mode, next_reading_status_push_at, download_detection_mode, next_download_detection_at"
)
.bind(library_id)
.bind(provider)
@@ -370,17 +486,107 @@ pub async fn update_metadata_provider(
.fetch_one(&state.pool)
.await?;
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')) FROM books WHERE library_id = $1")
.bind(library_id)
.fetch_one(&state.pool)
.await?;
let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar(
"SELECT b.id FROM books b
WHERE b.library_id = $1
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'), b.volume NULLS LAST, b.title ASC
LIMIT 5"
)
.bind(library_id)
.fetch_all(&state.pool)
.await
.unwrap_or_default();
Ok(Json(LibraryResponse {
id: row.get("id"),
name: row.get("name"),
root_path: row.get("root_path"),
enabled: row.get("enabled"),
book_count,
series_count,
monitor_enabled: row.get("monitor_enabled"),
scan_mode: row.get("scan_mode"),
next_scan_at: row.get("next_scan_at"),
watcher_enabled: row.get("watcher_enabled"),
metadata_provider: row.get("metadata_provider"),
fallback_metadata_provider: row.get("fallback_metadata_provider"),
metadata_refresh_mode: row.get("metadata_refresh_mode"),
next_metadata_refresh_at: row.get("next_metadata_refresh_at"),
thumbnail_book_ids,
reading_status_provider: row.get("reading_status_provider"),
reading_status_push_mode: row.get("reading_status_push_mode"),
next_reading_status_push_at: row.get("next_reading_status_push_at"),
download_detection_mode: row.get("download_detection_mode"),
next_download_detection_at: row.get("next_download_detection_at"),
}))
}
#[derive(Deserialize, ToSchema)]
pub struct UpdateReadingStatusProviderRequest {
pub reading_status_provider: Option<String>,
pub reading_status_push_mode: Option<String>,
}
/// Update the reading status provider for a library
#[utoipa::path(
patch,
path = "/libraries/{id}/reading-status-provider",
tag = "libraries",
params(("id" = String, Path, description = "Library UUID")),
request_body = UpdateReadingStatusProviderRequest,
responses(
(status = 200, description = "Updated"),
(status = 404, description = "Library not found"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn update_reading_status_provider(
State(state): State<AppState>,
AxumPath(library_id): AxumPath<Uuid>,
Json(input): Json<UpdateReadingStatusProviderRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let provider = input.reading_status_provider.as_deref().filter(|s| !s.is_empty());
let valid_modes = ["manual", "hourly", "daily", "weekly"];
let push_mode = input.reading_status_push_mode.as_deref().unwrap_or("manual");
if !valid_modes.contains(&push_mode) {
return Err(ApiError::bad_request("reading_status_push_mode must be one of: manual, hourly, daily, weekly"));
}
let next_push_at = if push_mode != "manual" {
let interval_minutes: i64 = match push_mode {
"hourly" => 60,
"daily" => 1440,
"weekly" => 10080,
_ => 1440,
};
Some(chrono::Utc::now() + chrono::Duration::minutes(interval_minutes))
} else {
None
};
let result = sqlx::query(
"UPDATE libraries SET reading_status_provider = $2, reading_status_push_mode = $3, next_reading_status_push_at = $4 WHERE id = $1"
)
.bind(library_id)
.bind(provider)
.bind(push_mode)
.bind(next_push_at)
.execute(&state.pool)
.await?;
if result.rows_affected() == 0 {
return Err(ApiError::not_found("library not found"));
}
Ok(Json(serde_json::json!({
"reading_status_provider": provider,
"reading_status_push_mode": push_mode,
})))
}

View File

@@ -1,8 +1,12 @@
mod anilist;
mod auth;
mod authors;
mod books;
mod download_detection;
mod error;
mod handlers;
mod index_jobs;
mod job_poller;
mod komga;
mod libraries;
mod metadata;
@@ -12,13 +16,20 @@ mod metadata_providers;
mod api_middleware;
mod openapi;
mod pages;
mod prowlarr;
mod qbittorrent;
mod reading_progress;
mod reading_status_match;
mod reading_status_push;
mod search;
mod series;
mod settings;
mod state;
mod stats;
mod telegram;
mod thumbnails;
mod tokens;
mod users;
use std::sync::Arc;
use std::time::Instant;
@@ -83,14 +94,14 @@ async fn main() -> anyhow::Result<()> {
};
let admin_routes = Router::new()
.route("/libraries", get(libraries::list_libraries).post(libraries::create_library))
.route("/libraries", axum::routing::post(libraries::create_library))
.route("/libraries/:id", delete(libraries::delete_library))
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
.route("/libraries/:id/metadata-provider", axum::routing::patch(libraries::update_metadata_provider))
.route("/libraries/:id/reading-status-provider", axum::routing::patch(libraries::update_reading_status_provider))
.route("/books/:id", axum::routing::patch(books::update_book))
.route("/books/:id/convert", axum::routing::post(books::convert_book))
.route("/libraries/:library_id/series/:name", axum::routing::patch(books::update_series))
.route("/libraries/:library_id/series/:name", axum::routing::patch(series::update_series))
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
.route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild))
.route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate))
@@ -101,12 +112,30 @@ async fn main() -> anyhow::Result<()> {
.route("/index/jobs/:id/errors", get(index_jobs::get_job_errors))
.route("/index/cancel/:id", axum::routing::post(index_jobs::cancel_job))
.route("/folders", get(index_jobs::list_folders))
.route("/admin/users", get(users::list_users).post(users::create_user))
.route("/admin/users/:id", delete(users::delete_user).patch(users::update_user))
.route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token))
.route("/admin/tokens/:id", delete(tokens::revoke_token))
.route("/admin/tokens/:id", delete(tokens::revoke_token).patch(tokens::update_token))
.route("/admin/tokens/:id/delete", axum::routing::post(tokens::delete_token))
.route("/prowlarr/search", axum::routing::post(prowlarr::search_prowlarr))
.route("/prowlarr/test", get(prowlarr::test_prowlarr))
.route("/qbittorrent/add", axum::routing::post(qbittorrent::add_torrent))
.route("/qbittorrent/test", get(qbittorrent::test_qbittorrent))
.route("/telegram/test", get(telegram::test_telegram))
.route("/komga/sync", axum::routing::post(komga::sync_komga_read_books))
.route("/komga/reports", get(komga::list_sync_reports))
.route("/komga/reports/:id", get(komga::get_sync_report))
.route("/anilist/status", get(anilist::get_status))
.route("/anilist/search", axum::routing::post(anilist::search_manga))
.route("/anilist/unlinked", get(anilist::list_unlinked))
.route("/anilist/sync/preview", get(anilist::preview_sync))
.route("/anilist/sync", axum::routing::post(anilist::sync_to_anilist))
.route("/anilist/pull", axum::routing::post(anilist::pull_from_anilist))
.route("/anilist/links", get(anilist::list_links))
.route("/anilist/libraries/:id", axum::routing::patch(anilist::toggle_library))
.route("/anilist/series/:library_id/:series_name", get(anilist::get_series_link))
.route("/anilist/series/:library_id/:series_name/link", axum::routing::post(anilist::link_series))
.route("/anilist/series/:library_id/:series_name/unlink", delete(anilist::unlink_series))
.route("/metadata/search", axum::routing::post(metadata::search_metadata))
.route("/metadata/match", axum::routing::post(metadata::create_metadata_match))
.route("/metadata/approve/:id", axum::routing::post(metadata::approve_metadata))
@@ -119,6 +148,15 @@ async fn main() -> anyhow::Result<()> {
.route("/metadata/batch/:id/results", get(metadata_batch::get_batch_results))
.route("/metadata/refresh", axum::routing::post(metadata_refresh::start_refresh))
.route("/metadata/refresh/:id/report", get(metadata_refresh::get_refresh_report))
.route("/reading-status/match", axum::routing::post(reading_status_match::start_match))
.route("/reading-status/match/:id/report", get(reading_status_match::get_match_report))
.route("/reading-status/match/:id/results", get(reading_status_match::get_match_results))
.route("/reading-status/push", axum::routing::post(reading_status_push::start_push))
.route("/reading-status/push/:id/report", get(reading_status_push::get_push_report))
.route("/reading-status/push/:id/results", get(reading_status_push::get_push_results))
.route("/download-detection/start", axum::routing::post(download_detection::start_detection))
.route("/download-detection/:id/report", get(download_detection::get_detection_report))
.route("/download-detection/:id/results", get(download_detection::get_detection_results))
.merge(settings::settings_routes())
.route_layer(middleware::from_fn_with_state(
state.clone(),
@@ -126,18 +164,22 @@ async fn main() -> anyhow::Result<()> {
));
let read_routes = Router::new()
.route("/libraries", get(libraries::list_libraries))
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
.route("/books", get(books::list_books))
.route("/books/ongoing", get(books::ongoing_books))
.route("/books/ongoing", get(series::ongoing_books))
.route("/books/:id", get(books::get_book))
.route("/books/:id/thumbnail", get(books::get_thumbnail))
.route("/books/:id/pages/:n", get(pages::get_page))
.route("/books/:id/progress", get(reading_progress::get_reading_progress).patch(reading_progress::update_reading_progress))
.route("/libraries/:library_id/series", get(books::list_series))
.route("/libraries/:library_id/series/:name/metadata", get(books::get_series_metadata))
.route("/series", get(books::list_all_series))
.route("/series/ongoing", get(books::ongoing_series))
.route("/series/statuses", get(books::series_statuses))
.route("/libraries/:library_id/series", get(series::list_series))
.route("/libraries/:library_id/series/:name/metadata", get(series::get_series_metadata))
.route("/series", get(series::list_all_series))
.route("/series/ongoing", get(series::ongoing_series))
.route("/series/statuses", get(series::series_statuses))
.route("/series/provider-statuses", get(series::provider_statuses))
.route("/series/mark-read", axum::routing::post(reading_progress::mark_series_read))
.route("/authors", get(authors::list_authors))
.route("/stats", get(stats::get_stats))
.route("/search", get(search::search_books))
.route_layer(middleware::from_fn_with_state(state.clone(), api_middleware::read_rate_limit))
@@ -146,6 +188,9 @@ async fn main() -> anyhow::Result<()> {
auth::require_read,
));
// Clone pool before state is moved into the router
let poller_pool = state.pool.clone();
let app = Router::new()
.route("/health", get(handlers::health))
.route("/ready", get(handlers::ready))
@@ -157,6 +202,11 @@ async fn main() -> anyhow::Result<()> {
.layer(middleware::from_fn_with_state(state.clone(), api_middleware::request_counter))
.with_state(state);
// Start background poller for API-only jobs (metadata_batch, metadata_refresh)
tokio::spawn(async move {
job_poller::run_job_poller(poller_pool, 5).await;
});
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
info!(addr = %config.listen_addr, "api listening");
axum::serve(listener, app).await?;

View File

@@ -369,6 +369,26 @@ pub async fn approve_metadata(
.await?;
}
// Notify via Telegram (with first book thumbnail if available)
let provider_for_notif: String = row.get("provider");
let thumbnail_path: Option<String> = sqlx::query_scalar(
"SELECT thumbnail_path FROM books WHERE library_id = $1 AND series_name = $2 AND thumbnail_path IS NOT NULL ORDER BY sort_order LIMIT 1",
)
.bind(library_id)
.bind(&series_name)
.fetch_optional(&state.pool)
.await
.ok()
.flatten();
notifications::notify(
state.pool.clone(),
notifications::NotificationEvent::MetadataApproved {
series_name: series_name.clone(),
provider: provider_for_notif,
thumbnail_path,
},
);
Ok(Json(ApproveResponse {
status: "approved".to_string(),
report,
@@ -693,10 +713,11 @@ pub(crate) async fn sync_series_metadata(
.get("start_year")
.and_then(|y| y.as_i64())
.map(|y| y as i32);
let status = metadata_json
.get("status")
.and_then(|s| s.as_str())
.map(normalize_series_status);
let status = if let Some(raw) = metadata_json.get("status").and_then(|s| s.as_str()) {
Some(normalize_series_status(&state.pool, raw).await)
} else {
None
};
// Fetch existing state before upsert
let existing = sqlx::query(
@@ -775,7 +796,7 @@ pub(crate) async fn sync_series_metadata(
let fields = vec![
FieldDef {
name: "description",
old: existing.as_ref().and_then(|r| r.get::<Option<String>, _>("description")).map(|s| serde_json::Value::String(s)),
old: existing.as_ref().and_then(|r| r.get::<Option<String>, _>("description")).map(serde_json::Value::String),
new: description.map(|s| serde_json::Value::String(s.to_string())),
},
FieldDef {
@@ -800,8 +821,8 @@ pub(crate) async fn sync_series_metadata(
},
FieldDef {
name: "status",
old: existing.as_ref().and_then(|r| r.get::<Option<String>, _>("status")).map(|s| serde_json::Value::String(s)),
new: status.as_ref().map(|s| serde_json::Value::String(s.clone())),
old: existing.as_ref().and_then(|r| r.get::<Option<String>, _>("status")).map(serde_json::Value::String),
new: status.as_ref().map(|s: &String| serde_json::Value::String(s.clone())),
},
];
@@ -825,25 +846,35 @@ pub(crate) async fn sync_series_metadata(
Ok(report)
}
/// Normalize provider-specific status strings to a standard set:
/// "ongoing", "ended", "hiatus", "cancelled", or the original lowercase value
fn normalize_series_status(raw: &str) -> String {
/// Normalize provider-specific status strings using the status_mappings table.
/// Returns None if no mapping is found — unknown statuses are not stored.
pub(crate) async fn normalize_series_status(pool: &sqlx::PgPool, raw: &str) -> String {
let lower = raw.to_lowercase();
match lower.as_str() {
// AniList
"finished" => "ended".to_string(),
"releasing" => "ongoing".to_string(),
"not_yet_released" => "upcoming".to_string(),
"cancelled" => "cancelled".to_string(),
"hiatus" => "hiatus".to_string(),
// Bédéthèque
_ if lower.contains("finie") || lower.contains("terminée") => "ended".to_string(),
_ if lower.contains("en cours") => "ongoing".to_string(),
_ if lower.contains("hiatus") || lower.contains("suspendue") => "hiatus".to_string(),
_ if lower.contains("annulée") || lower.contains("arrêtée") => "cancelled".to_string(),
// Fallback
_ => lower,
// Try exact match first (only mapped entries)
if let Ok(Some(row)) = sqlx::query_scalar::<_, String>(
"SELECT mapped_status FROM status_mappings WHERE provider_status = $1 AND mapped_status IS NOT NULL",
)
.bind(&lower)
.fetch_optional(pool)
.await
{
return row;
}
// Try substring match (for Bédéthèque-style statuses like "Série finie")
if let Ok(Some(row)) = sqlx::query_scalar::<_, String>(
"SELECT mapped_status FROM status_mappings WHERE $1 LIKE '%' || provider_status || '%' AND mapped_status IS NOT NULL LIMIT 1",
)
.bind(&lower)
.fetch_optional(pool)
.await
{
return row;
}
// No mapping found — return the provider status as-is (lowercased)
lower
}
pub(crate) async fn sync_books_metadata(

View File

@@ -16,7 +16,7 @@ use crate::{error::ApiError, metadata_providers, state::AppState};
#[derive(Deserialize, ToSchema)]
pub struct MetadataBatchRequest {
pub library_id: String,
pub library_id: Option<String>,
}
#[derive(Serialize, ToSchema)]
@@ -76,8 +76,67 @@ pub async fn start_batch(
State(state): State<AppState>,
Json(body): Json<MetadataBatchRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
// All libraries case
if body.library_id.is_none() {
let library_ids: Vec<Uuid> = sqlx::query_scalar(
"SELECT id FROM libraries WHERE metadata_provider IS DISTINCT FROM 'none' ORDER BY name"
)
.fetch_all(&state.pool)
.await?;
let mut last_job_id: Option<Uuid> = None;
for library_id in library_ids {
let existing: Option<Uuid> = sqlx::query_scalar(
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'metadata_batch' AND status IN ('pending', 'running') LIMIT 1",
)
.bind(library_id)
.fetch_optional(&state.pool)
.await?;
if existing.is_some() { continue; }
let job_id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'metadata_batch', 'running', NOW())",
)
.bind(job_id)
.bind(library_id)
.execute(&state.pool)
.await?;
let pool = state.pool.clone();
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&state.pool)
.await
.ok()
.flatten();
tokio::spawn(async move {
if let Err(e) = process_metadata_batch(&pool, job_id, library_id).await {
warn!("[METADATA_BATCH] job {job_id} failed: {e}");
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
)
.bind(job_id)
.bind(e.to_string())
.execute(&pool)
.await;
notifications::notify(
pool.clone(),
notifications::NotificationEvent::MetadataBatchFailed {
library_name,
error: e.to_string(),
},
);
}
});
last_job_id = Some(job_id);
}
return Ok(Json(serde_json::json!({
"id": last_job_id.map(|id| id.to_string()),
"status": "started",
})));
}
let library_id: Uuid = body
.library_id
.unwrap()
.parse()
.map_err(|_| ApiError::bad_request("invalid library_id"))?;
@@ -115,15 +174,21 @@ pub async fn start_batch(
let job_id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, 'metadata_batch', 'pending')",
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'metadata_batch', 'running', NOW())",
)
.bind(job_id)
.bind(library_id)
.execute(&state.pool)
.await?;
// Spawn the background processing task
// Spawn the background processing task (status already 'running' to avoid poller race)
let pool = state.pool.clone();
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&state.pool)
.await
.ok()
.flatten();
tokio::spawn(async move {
if let Err(e) = process_metadata_batch(&pool, job_id, library_id).await {
warn!("[METADATA_BATCH] job {job_id} failed: {e}");
@@ -134,6 +199,13 @@ pub async fn start_batch(
.bind(e.to_string())
.execute(&pool)
.await;
notifications::notify(
pool.clone(),
notifications::NotificationEvent::MetadataBatchFailed {
library_name,
error: e.to_string(),
},
);
}
});
@@ -300,7 +372,7 @@ pub async fn get_batch_results(
// Background processing
// ---------------------------------------------------------------------------
async fn process_metadata_batch(
pub(crate) async fn process_metadata_batch(
pool: &PgPool,
job_id: Uuid,
library_id: Uuid,
@@ -389,17 +461,19 @@ async fn process_metadata_batch(
update_progress(pool, job_id, processed, total, series_name).await;
insert_result(
pool,
&InsertResultParams {
job_id,
library_id,
series_name,
"already_linked",
None,
false,
0,
None,
None,
None,
Some("Unclassified series skipped"),
status: "already_linked",
provider_used: None,
fallback_used: false,
candidates_count: 0,
best_confidence: None,
best_candidate_json: None,
link_id: None,
error_message: Some("Unclassified series skipped"),
},
)
.await;
continue;
@@ -411,17 +485,19 @@ async fn process_metadata_batch(
update_progress(pool, job_id, processed, total, series_name).await;
insert_result(
pool,
&InsertResultParams {
job_id,
library_id,
series_name,
"already_linked",
None,
false,
0,
None,
None,
None,
None,
status: "already_linked",
provider_used: None,
fallback_used: false,
candidates_count: 0,
best_confidence: None,
best_candidate_json: None,
link_id: None,
error_message: None,
},
)
.await;
continue;
@@ -577,17 +653,19 @@ async fn process_metadata_batch(
insert_result(
pool,
&InsertResultParams {
job_id,
library_id,
series_name,
result_status,
provider_used.as_deref(),
status: result_status,
provider_used: provider_used.as_deref(),
fallback_used,
candidates_count,
best_confidence,
best_candidate.as_ref(),
best_candidate_json: best_candidate.as_ref(),
link_id,
error_msg.as_deref(),
error_message: error_msg.as_deref(),
},
)
.await;
@@ -615,6 +693,21 @@ async fn process_metadata_batch(
info!("[METADATA_BATCH] job={job_id} completed: {processed}/{total} series processed");
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(pool)
.await
.ok()
.flatten();
notifications::notify(
pool.clone(),
notifications::NotificationEvent::MetadataBatchCompleted {
library_name,
total_series: total,
processed,
},
);
Ok(())
}
@@ -765,9 +858,12 @@ async fn sync_series_from_candidate(
let publishers = &candidate.publishers;
let start_year = candidate.start_year;
let total_volumes = candidate.total_volumes;
let status = candidate.metadata_json
.get("status")
.and_then(|s| s.as_str());
let status = if let Some(raw) = candidate.metadata_json.get("status").and_then(|s| s.as_str()) {
Some(crate::metadata::normalize_series_status(pool, raw).await)
} else {
None
};
let status = status.as_deref();
sqlx::query(
r#"
@@ -1070,20 +1166,21 @@ pub(crate) async fn update_progress(pool: &PgPool, job_id: Uuid, processed: i32,
.await;
}
async fn insert_result(
pool: &PgPool,
struct InsertResultParams<'a> {
job_id: Uuid,
library_id: Uuid,
series_name: &str,
status: &str,
provider_used: Option<&str>,
series_name: &'a str,
status: &'a str,
provider_used: Option<&'a str>,
fallback_used: bool,
candidates_count: i32,
best_confidence: Option<f32>,
best_candidate_json: Option<&serde_json::Value>,
best_candidate_json: Option<&'a serde_json::Value>,
link_id: Option<Uuid>,
error_message: Option<&str>,
) {
error_message: Option<&'a str>,
}
async fn insert_result(pool: &PgPool, params: &InsertResultParams<'_>) {
let _ = sqlx::query(
r#"
INSERT INTO metadata_batch_results
@@ -1091,17 +1188,17 @@ async fn insert_result(
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
"#,
)
.bind(job_id)
.bind(library_id)
.bind(series_name)
.bind(status)
.bind(provider_used)
.bind(fallback_used)
.bind(candidates_count)
.bind(best_confidence)
.bind(best_candidate_json)
.bind(link_id)
.bind(error_message)
.bind(params.job_id)
.bind(params.library_id)
.bind(params.series_name)
.bind(params.status)
.bind(params.provider_used)
.bind(params.fallback_used)
.bind(params.candidates_count)
.bind(params.best_confidence)
.bind(params.best_candidate_json)
.bind(params.link_id)
.bind(params.error_message)
.execute(pool)
.await;
}

View File

@@ -128,7 +128,7 @@ async fn search_series_impl(
let mut candidates: Vec<SeriesCandidate> = media
.iter()
.filter_map(|m| {
let id = m.get("id").and_then(|id| id.as_i64())? as i64;
let id = m.get("id").and_then(|id| id.as_i64())?;
let title_obj = m.get("title")?;
let title = title_obj
.get("english")

View File

@@ -497,6 +497,13 @@ async fn get_series_books_impl(
}))
.collect();
static RE_TOME: std::sync::LazyLock<regex::Regex> =
std::sync::LazyLock::new(|| regex::Regex::new(r"(?i)-Tome-\d+-").unwrap());
static RE_BOOK_ID: std::sync::LazyLock<regex::Regex> =
std::sync::LazyLock::new(|| regex::Regex::new(r"-(\d+)\.html").unwrap());
static RE_VOLUME: std::sync::LazyLock<regex::Regex> =
std::sync::LazyLock::new(|| regex::Regex::new(r"(?i)Tome-(\d+)-").unwrap());
for (idx, album_el) in doc.select(&album_sel).enumerate() {
// Title from <a class="titre" title="..."> — the title attribute is clean
let title_sel = Selector::parse("a.titre").ok();
@@ -513,16 +520,21 @@ async fn get_series_books_impl(
// External book ID from album URL (e.g. "...-1063.html")
let album_url = title_el.and_then(|el| el.value().attr("href")).unwrap_or("");
let external_book_id = regex::Regex::new(r"-(\d+)\.html")
.ok()
.and_then(|re| re.captures(album_url))
// Only keep main tomes — their URLs contain "Tome-{N}-"
// Skip hors-série (HS), intégrales (INT/INTFL), romans, coffrets, etc.
if !RE_TOME.is_match(album_url) {
continue;
}
let external_book_id = RE_BOOK_ID
.captures(album_url)
.map(|c| c[1].to_string())
.unwrap_or_default();
// Volume number from URL pattern "Tome-{N}-" or from itemprop name
let volume_number = regex::Regex::new(r"(?i)Tome-(\d+)-")
.ok()
.and_then(|re| re.captures(album_url))
let volume_number = RE_VOLUME
.captures(album_url)
.and_then(|c| c[1].parse::<i32>().ok())
.or_else(|| extract_volume_from_title(&title));
@@ -640,13 +652,13 @@ fn compute_confidence(title: &str, query: &str) -> f32 {
return 1.0;
}
if title_lower.starts_with(&query_lower) || query_lower.starts_with(&title_lower) {
if title_lower.starts_with(&query_lower) || query_lower.starts_with(&title_lower)
|| title_norm.starts_with(&query_norm) || query_norm.starts_with(&title_norm)
{
0.85
} else if title_norm.starts_with(&query_norm) || query_norm.starts_with(&title_norm) {
0.85
} else if title_lower.contains(&query_lower) || query_lower.contains(&title_lower) {
0.7
} else if title_norm.contains(&query_norm) || query_norm.contains(&title_norm) {
} else if title_lower.contains(&query_lower) || query_lower.contains(&title_lower)
|| title_norm.contains(&query_norm) || query_norm.contains(&title_norm)
{
0.7
} else {
let common: usize = query_lower

View File

@@ -86,11 +86,11 @@ async fn search_series_impl(
.iter()
.filter_map(|vol| {
let name = vol.get("name").and_then(|n| n.as_str())?.to_string();
let id = vol.get("id").and_then(|id| id.as_i64())? as i64;
let id = vol.get("id").and_then(|id| id.as_i64())?;
let description = vol
.get("description")
.and_then(|d| d.as_str())
.map(|d| strip_html(d));
.map(strip_html);
let publisher = vol
.get("publisher")
.and_then(|p| p.get("name"))
@@ -180,7 +180,7 @@ async fn get_series_books_impl(
let books: Vec<BookCandidate> = results
.iter()
.filter_map(|issue| {
let id = issue.get("id").and_then(|id| id.as_i64())? as i64;
let id = issue.get("id").and_then(|id| id.as_i64())?;
let name = issue
.get("name")
.and_then(|n| n.as_str())
@@ -194,7 +194,7 @@ async fn get_series_books_impl(
let description = issue
.get("description")
.and_then(|d| d.as_str())
.map(|d| strip_html(d));
.map(strip_html);
let cover_url = issue
.get("image")
.and_then(|img| img.get("medium_url").or_else(|| img.get("small_url")))

View File

@@ -295,7 +295,7 @@ async fn get_series_books_impl(
let mut books: Vec<BookCandidate> = items
.iter()
.map(|item| volume_to_book_candidate(item))
.map(volume_to_book_candidate)
.collect();
// Sort by volume number

View File

@@ -144,11 +144,11 @@ async fn search_series_impl(
entry.publishers.push(p.clone());
}
}
if entry.start_year.is_none() || first_publish_year.map_or(false, |y| entry.start_year.unwrap() > y) {
if first_publish_year.is_some() {
if (entry.start_year.is_none() || first_publish_year.is_some_and(|y| entry.start_year.unwrap() > y))
&& first_publish_year.is_some()
{
entry.start_year = first_publish_year;
}
}
if entry.cover_url.is_none() {
entry.cover_url = cover_url;
}

View File

@@ -17,7 +17,7 @@ use crate::metadata_batch::{load_provider_config_from_pool, is_job_cancelled, up
#[derive(Deserialize, ToSchema)]
pub struct MetadataRefreshRequest {
pub library_id: String,
pub library_id: Option<String>,
}
/// A single field change: old → new
@@ -83,8 +83,82 @@ pub async fn start_refresh(
State(state): State<AppState>,
Json(body): Json<MetadataRefreshRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
// All libraries case
if body.library_id.is_none() {
let library_ids: Vec<Uuid> = sqlx::query_scalar(
"SELECT id FROM libraries WHERE metadata_provider IS DISTINCT FROM 'none' ORDER BY name"
)
.fetch_all(&state.pool)
.await?;
let mut last_job_id: Option<Uuid> = None;
for library_id in library_ids {
let link_count: i64 = sqlx::query_scalar(
r#"
SELECT COUNT(*) FROM external_metadata_links eml
LEFT JOIN series_metadata sm
ON sm.library_id = eml.library_id AND sm.name = eml.series_name
WHERE eml.library_id = $1
AND eml.status = 'approved'
AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled')
"#,
)
.bind(library_id)
.fetch_one(&state.pool)
.await
.unwrap_or(0);
if link_count == 0 { continue; }
let existing: Option<Uuid> = sqlx::query_scalar(
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'metadata_refresh' AND status IN ('pending', 'running') LIMIT 1",
)
.bind(library_id)
.fetch_optional(&state.pool)
.await?;
if existing.is_some() { continue; }
let job_id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'metadata_refresh', 'running', NOW())",
)
.bind(job_id)
.bind(library_id)
.execute(&state.pool)
.await?;
let pool = state.pool.clone();
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&state.pool)
.await
.ok()
.flatten();
tokio::spawn(async move {
if let Err(e) = process_metadata_refresh(&pool, job_id, library_id).await {
warn!("[METADATA_REFRESH] job {job_id} failed: {e}");
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
)
.bind(job_id)
.bind(e.to_string())
.execute(&pool)
.await;
notifications::notify(
pool.clone(),
notifications::NotificationEvent::MetadataRefreshFailed {
library_name,
error: e.to_string(),
},
);
}
});
last_job_id = Some(job_id);
}
return Ok(Json(serde_json::json!({
"id": last_job_id.map(|id| id.to_string()),
"status": "started",
})));
}
let library_id: Uuid = body
.library_id
.unwrap()
.parse()
.map_err(|_| ApiError::bad_request("invalid library_id"))?;
@@ -110,9 +184,16 @@ pub async fn start_refresh(
})));
}
// Check there are approved links to refresh
// Check there are approved links to refresh (only ongoing series)
let link_count: i64 = sqlx::query_scalar(
"SELECT COUNT(*) FROM external_metadata_links WHERE library_id = $1 AND status = 'approved'",
r#"
SELECT COUNT(*) FROM external_metadata_links eml
LEFT JOIN series_metadata sm
ON sm.library_id = eml.library_id AND sm.name = eml.series_name
WHERE eml.library_id = $1
AND eml.status = 'approved'
AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled')
"#,
)
.bind(library_id)
.fetch_one(&state.pool)
@@ -124,15 +205,21 @@ pub async fn start_refresh(
let job_id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, 'metadata_refresh', 'pending')",
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'metadata_refresh', 'running', NOW())",
)
.bind(job_id)
.bind(library_id)
.execute(&state.pool)
.await?;
// Spawn the background processing task
// Spawn the background processing task (status already 'running' to avoid poller race)
let pool = state.pool.clone();
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&state.pool)
.await
.ok()
.flatten();
tokio::spawn(async move {
if let Err(e) = process_metadata_refresh(&pool, job_id, library_id).await {
warn!("[METADATA_REFRESH] job {job_id} failed: {e}");
@@ -143,6 +230,13 @@ pub async fn start_refresh(
.bind(e.to_string())
.execute(&pool)
.await;
notifications::notify(
pool.clone(),
notifications::NotificationEvent::MetadataRefreshFailed {
library_name,
error: e.to_string(),
},
);
}
});
@@ -209,7 +303,7 @@ pub async fn get_refresh_report(
// Background processing
// ---------------------------------------------------------------------------
async fn process_metadata_refresh(
pub(crate) async fn process_metadata_refresh(
pool: &PgPool,
job_id: Uuid,
library_id: Uuid,
@@ -221,13 +315,17 @@ async fn process_metadata_refresh(
.await
.map_err(|e| e.to_string())?;
// Get all approved links for this library
// Get approved links for this library, only for ongoing series (not ended/cancelled)
let links: Vec<(Uuid, String, String, String)> = sqlx::query_as(
r#"
SELECT id, series_name, provider, external_id
FROM external_metadata_links
WHERE library_id = $1 AND status = 'approved'
ORDER BY series_name
SELECT eml.id, eml.series_name, eml.provider, eml.external_id
FROM external_metadata_links eml
LEFT JOIN series_metadata sm
ON sm.library_id = eml.library_id AND sm.name = eml.series_name
WHERE eml.library_id = $1
AND eml.status = 'approved'
AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled')
ORDER BY eml.series_name
"#,
)
.bind(library_id)
@@ -319,6 +417,22 @@ async fn process_metadata_refresh(
info!("[METADATA_REFRESH] job={job_id} completed: {refreshed} updated, {unchanged} unchanged, {errors} errors");
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(pool)
.await
.ok()
.flatten();
notifications::notify(
pool.clone(),
notifications::NotificationEvent::MetadataRefreshCompleted {
library_name,
refreshed,
unchanged,
errors,
},
);
Ok(())
}
@@ -574,9 +688,12 @@ async fn sync_series_with_diff(
let new_publishers = &candidate.publishers;
let new_start_year = candidate.start_year;
let new_total_volumes = candidate.total_volumes;
let new_status = candidate.metadata_json
.get("status")
.and_then(|s| s.as_str());
let new_status = if let Some(raw) = candidate.metadata_json.get("status").and_then(|s| s.as_str()) {
Some(crate::metadata::normalize_series_status(pool, raw).await)
} else {
None
};
let new_status = new_status.as_deref();
// Fetch existing series metadata for diffing
let existing = sqlx::query(

View File

@@ -10,14 +10,14 @@ use utoipa::OpenApi;
crate::reading_progress::update_reading_progress,
crate::reading_progress::mark_series_read,
crate::books::get_thumbnail,
crate::books::list_series,
crate::books::list_all_series,
crate::books::ongoing_series,
crate::books::ongoing_books,
crate::series::list_series,
crate::series::list_all_series,
crate::series::ongoing_series,
crate::series::ongoing_books,
crate::books::convert_book,
crate::books::update_book,
crate::books::get_series_metadata,
crate::books::update_series,
crate::series::get_series_metadata,
crate::series::update_series,
crate::pages::get_page,
crate::search::search_books,
crate::index_jobs::enqueue_rebuild,
@@ -35,10 +35,12 @@ use utoipa::OpenApi;
crate::libraries::delete_library,
crate::libraries::scan_library,
crate::libraries::update_monitoring,
crate::libraries::update_metadata_provider,
crate::tokens::list_tokens,
crate::tokens::create_token,
crate::tokens::revoke_token,
crate::tokens::delete_token,
crate::authors::list_authors,
crate::stats::get_stats,
crate::settings::get_settings,
crate::settings::get_setting,
@@ -53,6 +55,46 @@ use utoipa::OpenApi;
crate::metadata::get_metadata_links,
crate::metadata::get_missing_books,
crate::metadata::delete_metadata_link,
crate::series::series_statuses,
crate::series::provider_statuses,
crate::settings::list_status_mappings,
crate::settings::upsert_status_mapping,
crate::settings::delete_status_mapping,
crate::prowlarr::search_prowlarr,
crate::prowlarr::test_prowlarr,
crate::qbittorrent::add_torrent,
crate::qbittorrent::test_qbittorrent,
crate::metadata_batch::start_batch,
crate::metadata_batch::get_batch_report,
crate::metadata_batch::get_batch_results,
crate::metadata_refresh::start_refresh,
crate::metadata_refresh::get_refresh_report,
crate::komga::sync_komga_read_books,
crate::komga::list_sync_reports,
crate::komga::get_sync_report,
crate::users::list_users,
crate::users::create_user,
crate::users::update_user,
crate::users::delete_user,
crate::tokens::update_token,
crate::libraries::update_reading_status_provider,
crate::reading_status_match::start_match,
crate::reading_status_match::get_match_report,
crate::reading_status_match::get_match_results,
crate::reading_status_push::start_push,
crate::reading_status_push::get_push_report,
crate::reading_status_push::get_push_results,
crate::anilist::get_status,
crate::anilist::search_manga,
crate::anilist::get_series_link,
crate::anilist::link_series,
crate::anilist::unlink_series,
crate::anilist::toggle_library,
crate::anilist::list_unlinked,
crate::anilist::preview_sync,
crate::anilist::sync_to_anilist,
crate::anilist::pull_from_anilist,
crate::anilist::list_links,
),
components(
schemas(
@@ -64,14 +106,14 @@ use utoipa::OpenApi;
crate::reading_progress::UpdateReadingProgressRequest,
crate::reading_progress::MarkSeriesReadRequest,
crate::reading_progress::MarkSeriesReadResponse,
crate::books::SeriesItem,
crate::books::SeriesPage,
crate::books::ListAllSeriesQuery,
crate::books::OngoingQuery,
crate::series::SeriesItem,
crate::series::SeriesPage,
crate::series::ListAllSeriesQuery,
crate::series::OngoingQuery,
crate::books::UpdateBookRequest,
crate::books::SeriesMetadata,
crate::books::UpdateSeriesRequest,
crate::books::UpdateSeriesResponse,
crate::series::SeriesMetadata,
crate::series::UpdateSeriesRequest,
crate::series::UpdateSeriesResponse,
crate::pages::PageQuery,
crate::search::SearchQuery,
crate::search::SearchResponse,
@@ -86,6 +128,7 @@ use utoipa::OpenApi;
crate::libraries::LibraryResponse,
crate::libraries::CreateLibraryRequest,
crate::libraries::UpdateMonitoringRequest,
crate::libraries::UpdateMetadataProviderRequest,
crate::tokens::CreateTokenRequest,
crate::tokens::TokenResponse,
crate::tokens::CreatedTokenResponse,
@@ -93,6 +136,11 @@ use utoipa::OpenApi;
crate::settings::ClearCacheResponse,
crate::settings::CacheStats,
crate::settings::ThumbnailStats,
crate::settings::StatusMappingDto,
crate::settings::UpsertStatusMappingRequest,
crate::authors::ListAuthorsQuery,
crate::authors::AuthorItem,
crate::authors::AuthorsPageResponse,
crate::stats::StatsResponse,
crate::stats::StatsOverview,
crate::stats::ReadingStatusStats,
@@ -101,6 +149,13 @@ use utoipa::OpenApi;
crate::stats::LibraryStats,
crate::stats::TopSeries,
crate::stats::MonthlyAdditions,
crate::stats::MetadataStats,
crate::stats::ProviderCount,
crate::stats::CurrentlyReadingItem,
crate::stats::RecentlyReadItem,
crate::stats::MonthlyReading,
crate::stats::UserMonthlyReading,
crate::stats::JobTimePoint,
crate::metadata::ApproveRequest,
crate::metadata::ApproveResponse,
crate::metadata::SyncReport,
@@ -113,6 +168,44 @@ use utoipa::OpenApi;
crate::metadata::ExternalMetadataLinkDto,
crate::metadata::MissingBooksDto,
crate::metadata::MissingBookItem,
crate::qbittorrent::QBittorrentAddRequest,
crate::qbittorrent::QBittorrentAddResponse,
crate::qbittorrent::QBittorrentTestResponse,
crate::prowlarr::ProwlarrSearchRequest,
crate::prowlarr::ProwlarrRelease,
crate::prowlarr::ProwlarrCategory,
crate::prowlarr::ProwlarrSearchResponse,
crate::prowlarr::MissingVolumeInput,
crate::prowlarr::ProwlarrTestResponse,
crate::metadata_batch::MetadataBatchRequest,
crate::metadata_batch::MetadataBatchReportDto,
crate::metadata_batch::MetadataBatchResultDto,
crate::metadata_refresh::MetadataRefreshRequest,
crate::metadata_refresh::MetadataRefreshReportDto,
crate::komga::KomgaSyncRequest,
crate::komga::KomgaSyncResponse,
crate::komga::KomgaSyncReportSummary,
crate::users::UserResponse,
crate::users::CreateUserRequest,
crate::tokens::UpdateTokenRequest,
crate::libraries::UpdateReadingStatusProviderRequest,
crate::reading_status_match::ReadingStatusMatchRequest,
crate::reading_status_match::ReadingStatusMatchReportDto,
crate::reading_status_match::ReadingStatusMatchResultDto,
crate::reading_status_push::ReadingStatusPushRequest,
crate::reading_status_push::ReadingStatusPushReportDto,
crate::reading_status_push::ReadingStatusPushResultDto,
crate::anilist::AnilistStatusResponse,
crate::anilist::AnilistMediaResult,
crate::anilist::AnilistSeriesLinkResponse,
crate::anilist::AnilistSyncPreviewItem,
crate::anilist::AnilistSyncItem,
crate::anilist::AnilistSyncReport,
crate::anilist::AnilistPullItem,
crate::anilist::AnilistPullReport,
crate::anilist::AnilistSearchRequest,
crate::anilist::AnilistLinkRequest,
crate::anilist::AnilistLibraryToggleRequest,
ErrorResponse,
)
),
@@ -120,12 +213,23 @@ use utoipa::OpenApi;
("Bearer" = [])
),
tags(
(name = "books", description = "Read-only endpoints for browsing and searching books"),
(name = "books", description = "Book browsing, details and management"),
(name = "series", description = "Series browsing, filtering and management"),
(name = "search", description = "Full-text search across books and series"),
(name = "reading-progress", description = "Reading progress tracking per book"),
(name = "libraries", description = "Library management endpoints (Admin only)"),
(name = "authors", description = "Author browsing and listing"),
(name = "stats", description = "Collection statistics and dashboard data"),
(name = "libraries", description = "Library listing, scanning, and management (create/delete/settings: Admin only)"),
(name = "indexing", description = "Search index management and job control (Admin only)"),
(name = "metadata", description = "External metadata providers and matching (Admin only)"),
(name = "komga", description = "Komga read-status sync (Admin only)"),
(name = "tokens", description = "API token management (Admin only)"),
(name = "settings", description = "Application settings and cache management (Admin only)"),
(name = "prowlarr", description = "Prowlarr indexer integration (Admin only)"),
(name = "qbittorrent", description = "qBittorrent download client integration (Admin only)"),
(name = "users", description = "Reader user management (Admin only)"),
(name = "reading_status", description = "Reading status match and push jobs (Admin only)"),
(name = "anilist", description = "AniList integration for reading status sync (Admin only)"),
),
modifiers(&SecurityAddon)
)]

View File

@@ -277,7 +277,17 @@ pub async fn get_page(
let cache_dir2 = cache_dir_path.clone();
let format2 = format;
tokio::spawn(async move {
prefetch_page(state2, book_id, &abs_path2, next_page, format2, quality, width, filter, timeout_secs, &cache_dir2).await;
prefetch_page(state2, &PrefetchParams {
book_id,
abs_path: &abs_path2,
page: next_page,
format: format2,
quality,
width,
filter,
timeout_secs,
cache_dir: &cache_dir2,
}).await;
});
}
@@ -290,19 +300,30 @@ pub async fn get_page(
}
}
/// Prefetch a single page into disk+memory cache (best-effort, ignores errors).
async fn prefetch_page(
state: AppState,
struct PrefetchParams<'a> {
book_id: Uuid,
abs_path: &str,
abs_path: &'a str,
page: u32,
format: OutputFormat,
quality: u8,
width: u32,
filter: image::imageops::FilterType,
timeout_secs: u64,
cache_dir: &Path,
) {
cache_dir: &'a Path,
}
/// Prefetch a single page into disk+memory cache (best-effort, ignores errors).
async fn prefetch_page(state: AppState, params: &PrefetchParams<'_>) {
let book_id = params.book_id;
let page = params.page;
let format = params.format;
let quality = params.quality;
let width = params.width;
let filter = params.filter;
let timeout_secs = params.timeout_secs;
let abs_path = params.abs_path;
let cache_dir = params.cache_dir;
let mem_key = format!("{book_id}:{page}:{}:{quality}:{width}", format.extension());
// Already in memory cache?
if state.page_cache.lock().await.contains(&mem_key) {
@@ -330,6 +351,7 @@ async fn prefetch_page(
Some(ref e) if e == "cbz" => "cbz",
Some(ref e) if e == "cbr" => "cbr",
Some(ref e) if e == "pdf" => "pdf",
Some(ref e) if e == "epub" => "epub",
_ => return,
}
.to_string();
@@ -458,6 +480,7 @@ fn render_page(
"cbz" => parsers::BookFormat::Cbz,
"cbr" => parsers::BookFormat::Cbr,
"pdf" => parsers::BookFormat::Pdf,
"epub" => parsers::BookFormat::Epub,
_ => return Err(ApiError::bad_request("unsupported source format")),
};

538
apps/api/src/prowlarr.rs Normal file
View File

@@ -0,0 +1,538 @@
use axum::{extract::State, Json};
use serde::{Deserialize, Serialize};
use sqlx::Row;
use utoipa::ToSchema;
use crate::{error::ApiError, state::AppState};
// ─── Types ──────────────────────────────────────────────────────────────────
#[derive(Deserialize, ToSchema)]
pub struct MissingVolumeInput {
pub volume_number: Option<i32>,
#[allow(dead_code)]
pub title: Option<String>,
}
#[derive(Deserialize, ToSchema)]
pub struct ProwlarrSearchRequest {
pub series_name: String,
pub volume_number: Option<i32>,
pub custom_query: Option<String>,
pub missing_volumes: Option<Vec<MissingVolumeInput>>,
}
#[derive(Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct ProwlarrRawRelease {
pub guid: String,
pub title: String,
pub size: i64,
pub download_url: Option<String>,
pub indexer: Option<String>,
pub seeders: Option<i32>,
pub leechers: Option<i32>,
pub publish_date: Option<String>,
pub protocol: Option<String>,
pub info_url: Option<String>,
pub categories: Option<Vec<ProwlarrCategory>>,
}
#[derive(Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct ProwlarrRelease {
pub guid: String,
pub title: String,
pub size: i64,
pub download_url: Option<String>,
pub indexer: Option<String>,
pub seeders: Option<i32>,
pub leechers: Option<i32>,
pub publish_date: Option<String>,
pub protocol: Option<String>,
pub info_url: Option<String>,
pub categories: Option<Vec<ProwlarrCategory>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub matched_missing_volumes: Option<Vec<i32>>,
}
#[derive(Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct ProwlarrCategory {
pub id: i32,
pub name: Option<String>,
}
#[derive(Serialize, ToSchema)]
pub struct ProwlarrSearchResponse {
pub results: Vec<ProwlarrRelease>,
pub query: String,
}
#[derive(Serialize, ToSchema)]
pub struct ProwlarrTestResponse {
pub success: bool,
pub message: String,
pub indexer_count: Option<i32>,
}
// ─── Config helper ──────────────────────────────────────────────────────────
#[derive(Deserialize)]
struct ProwlarrConfig {
url: String,
api_key: String,
categories: Option<Vec<i32>>,
}
pub(crate) async fn load_prowlarr_config_internal(
pool: &sqlx::PgPool,
) -> Result<(String, String, Vec<i32>), ApiError> {
load_prowlarr_config(pool).await
}
pub(crate) async fn check_prowlarr_configured(pool: &sqlx::PgPool) -> Result<(), ApiError> {
load_prowlarr_config(pool).await.map(|_| ())
}
pub(crate) fn extract_volumes_from_title_pub(title: &str) -> Vec<i32> {
extract_volumes_from_title(title)
}
async fn load_prowlarr_config(
pool: &sqlx::PgPool,
) -> Result<(String, String, Vec<i32>), ApiError> {
let row = sqlx::query("SELECT value FROM app_settings WHERE key = 'prowlarr'")
.fetch_optional(pool)
.await?;
let row = row.ok_or_else(|| ApiError::bad_request("Prowlarr is not configured"))?;
let value: serde_json::Value = row.get("value");
let config: ProwlarrConfig = serde_json::from_value(value)
.map_err(|e| ApiError::internal(format!("invalid prowlarr config: {e}")))?;
if config.url.is_empty() || config.api_key.is_empty() {
return Err(ApiError::bad_request(
"Prowlarr URL and API key must be configured in settings",
));
}
let url = config.url.trim_end_matches('/').to_string();
let categories = config.categories.unwrap_or_else(|| vec![7030, 7020]);
Ok((url, config.api_key, categories))
}
// ─── Volume matching ─────────────────────────────────────────────────────────
/// Extract volume numbers from a release title.
///
/// Handles individual volumes (T01, Tome 01, Vol. 01, v01, #01) and also
/// **range packs** like `T01.T15`, `[T001.T104]`, `T01-T15`, `Tome 01 à Tome 15`
/// — the range is expanded so every volume in [start..=end] is returned.
fn extract_volumes_from_title(title: &str) -> Vec<i32> {
let lower = title.to_lowercase();
let chars: Vec<char> = lower.chars().collect();
let mut volumes = Vec::new();
// Pass 1 — range expansion: PREFIX NUMBER (SEP) PREFIX NUMBER
// Separator: '.' | '-' | 'à'
let mut i = 0;
while i < chars.len() {
if let Some((n1, after1)) = read_vol_prefix_number(&chars, i) {
let mut j = after1;
while j < chars.len() && chars[j] == ' ' {
j += 1;
}
let after_sep = if j < chars.len() && (chars[j] == '.' || chars[j] == '-') {
Some(j + 1)
} else if j < chars.len() && chars[j] == '\u{00e0}' {
// 'à' (U+00E0) — French "à" as in "Tome 01 à Tome 15"
Some(j + 1)
} else {
None
};
if let Some(sep_end) = after_sep {
let mut k = sep_end;
while k < chars.len() && chars[k] == ' ' {
k += 1;
}
if let Some((n2, _)) = read_vol_prefix_number(&chars, k) {
if n1 < n2 && n2 - n1 <= 500 {
for v in n1..=n2 {
if !volumes.contains(&v) {
volumes.push(v);
}
}
i = after1;
continue;
}
}
}
}
i += 1;
}
// Pass 2 — individual volumes not already captured by range expansion
let prefixes = ["tome", "vol.", "vol ", "t", "v", "#"];
let len = chars.len();
for prefix in &prefixes {
let mut start = 0;
while let Some(pos) = lower[start..].find(prefix) {
let abs_pos = start + pos;
let after = abs_pos + prefix.len();
// For single-char prefixes (t, v), ensure it's at a word boundary
if prefix.len() == 1 && *prefix != "#" {
if abs_pos > 0 && chars[abs_pos - 1].is_alphanumeric() {
start = after;
continue;
}
}
// Skip optional spaces after prefix
let mut i = after;
while i < len && chars[i] == ' ' {
i += 1;
}
// Read digits
let digit_start = i;
while i < len && chars[i].is_ascii_digit() {
i += 1;
}
if i > digit_start {
if let Ok(num) = lower[digit_start..i].parse::<i32>() {
if !volumes.contains(&num) {
volumes.push(num);
}
}
}
start = after;
}
}
volumes
}
/// Try to read a vol-prefixed number starting at `pos` in the `chars` slice.
/// Returns `(number, position_after_last_digit)` or `None`.
/// Prefixes recognised (longest first to avoid "t" matching "tome"):
/// `tome`, `vol.`, `vol `, `t`, `v`, `#`.
fn read_vol_prefix_number(chars: &[char], pos: usize) -> Option<(i32, usize)> {
if pos >= chars.len() {
return None;
}
// Build a look-ahead string from `pos` (at most 6 chars is enough for the longest prefix "tome ")
let suffix: String = chars[pos..].iter().collect();
const PREFIXES: &[(&str, bool)] = &[
("tome", false),
("vol.", false),
("vol ", false),
("t", true),
("v", true),
("#", false),
];
let mut prefix_char_count = 0usize;
for (p, needs_boundary) in PREFIXES {
if suffix.starts_with(p) {
if *needs_boundary && pos > 0 && chars[pos - 1].is_alphanumeric() {
continue;
}
prefix_char_count = p.chars().count();
break;
}
}
if prefix_char_count == 0 {
return None;
}
let mut i = pos + prefix_char_count;
while i < chars.len() && chars[i] == ' ' {
i += 1;
}
let digit_start = i;
while i < chars.len() && chars[i].is_ascii_digit() {
i += 1;
}
if i == digit_start {
return None;
}
let n: i32 = chars[digit_start..i]
.iter()
.collect::<String>()
.parse()
.ok()?;
Some((n, i))
}
/// Match releases against missing volume numbers.
fn match_missing_volumes(
releases: Vec<ProwlarrRawRelease>,
missing: &[MissingVolumeInput],
) -> Vec<ProwlarrRelease> {
let missing_numbers: Vec<i32> = missing
.iter()
.filter_map(|m| m.volume_number)
.collect();
releases
.into_iter()
.map(|r| {
let matched = if missing_numbers.is_empty() {
None
} else {
let title_volumes = extract_volumes_from_title(&r.title);
let matched: Vec<i32> = title_volumes
.into_iter()
.filter(|v| missing_numbers.contains(v))
.collect();
if matched.is_empty() {
None
} else {
Some(matched)
}
};
ProwlarrRelease {
guid: r.guid,
title: r.title,
size: r.size,
download_url: r.download_url,
indexer: r.indexer,
seeders: r.seeders,
leechers: r.leechers,
publish_date: r.publish_date,
protocol: r.protocol,
info_url: r.info_url,
categories: r.categories,
matched_missing_volumes: matched,
}
})
.collect()
}
// ─── Handlers ───────────────────────────────────────────────────────────────
/// Search for releases on Prowlarr
#[utoipa::path(
post,
path = "/prowlarr/search",
tag = "prowlarr",
request_body = ProwlarrSearchRequest,
responses(
(status = 200, body = ProwlarrSearchResponse),
(status = 400, description = "Bad request or Prowlarr not configured"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "Prowlarr connection error"),
),
security(("Bearer" = []))
)]
pub async fn search_prowlarr(
State(state): State<AppState>,
Json(body): Json<ProwlarrSearchRequest>,
) -> Result<Json<ProwlarrSearchResponse>, ApiError> {
let (url, api_key, categories) = load_prowlarr_config(&state.pool).await?;
let query = if let Some(custom) = &body.custom_query {
custom.clone()
} else if let Some(vol) = body.volume_number {
format!("\"{}\" {}", body.series_name, vol)
} else {
format!("\"{}\"", body.series_name)
};
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(30))
.build()
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
let mut params: Vec<(&str, String)> = vec![
("query", query.clone()),
("type", "search".to_string()),
];
for cat in &categories {
params.push(("categories", cat.to_string()));
}
let resp = client
.get(format!("{url}/api/v1/search"))
.query(&params)
.header("X-Api-Key", &api_key)
.send()
.await
.map_err(|e| ApiError::internal(format!("Prowlarr request failed: {e}")))?;
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
return Err(ApiError::internal(format!(
"Prowlarr returned {status}: {text}"
)));
}
let raw_text = resp
.text()
.await
.map_err(|e| ApiError::internal(format!("Failed to read Prowlarr response: {e}")))?;
tracing::debug!("Prowlarr raw response length: {} chars", raw_text.len());
let raw_releases: Vec<ProwlarrRawRelease> = serde_json::from_str(&raw_text)
.map_err(|e| {
tracing::error!("Failed to parse Prowlarr response: {e}");
tracing::error!("Raw response (first 500 chars): {}", &raw_text[..raw_text.len().min(500)]);
ApiError::internal(format!("Failed to parse Prowlarr response: {e}"))
})?;
let results = if let Some(missing) = &body.missing_volumes {
match_missing_volumes(raw_releases, missing)
} else {
raw_releases
.into_iter()
.map(|r| ProwlarrRelease {
guid: r.guid,
title: r.title,
size: r.size,
download_url: r.download_url,
indexer: r.indexer,
seeders: r.seeders,
leechers: r.leechers,
publish_date: r.publish_date,
protocol: r.protocol,
info_url: r.info_url,
categories: r.categories,
matched_missing_volumes: None,
})
.collect()
};
Ok(Json(ProwlarrSearchResponse { results, query }))
}
/// Test connection to Prowlarr
#[utoipa::path(
get,
path = "/prowlarr/test",
tag = "prowlarr",
responses(
(status = 200, body = ProwlarrTestResponse),
(status = 400, description = "Prowlarr not configured"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn test_prowlarr(
State(state): State<AppState>,
) -> Result<Json<ProwlarrTestResponse>, ApiError> {
let (url, api_key, _categories) = load_prowlarr_config(&state.pool).await?;
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(10))
.build()
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
let resp = client
.get(format!("{url}/api/v1/indexer"))
.header("X-Api-Key", &api_key)
.send()
.await;
match resp {
Ok(r) if r.status().is_success() => {
let indexers: Vec<serde_json::Value> = r.json().await.unwrap_or_default();
Ok(Json(ProwlarrTestResponse {
success: true,
message: format!("Connected successfully ({} indexers)", indexers.len()),
indexer_count: Some(indexers.len() as i32),
}))
}
Ok(r) => {
let status = r.status();
let text = r.text().await.unwrap_or_default();
Ok(Json(ProwlarrTestResponse {
success: false,
message: format!("Prowlarr returned {status}: {text}"),
indexer_count: None,
}))
}
Err(e) => Ok(Json(ProwlarrTestResponse {
success: false,
message: format!("Connection failed: {e}"),
indexer_count: None,
})),
}
}
#[cfg(test)]
mod tests {
use super::extract_volumes_from_title;
fn sorted(mut v: Vec<i32>) -> Vec<i32> {
v.sort_unstable();
v
}
#[test]
fn individual_volumes() {
assert_eq!(sorted(extract_volumes_from_title("One Piece T05")), vec![5]);
assert_eq!(sorted(extract_volumes_from_title("Naruto Tome 12")), vec![12]);
assert_eq!(sorted(extract_volumes_from_title("Vol.03")), vec![3]);
assert_eq!(sorted(extract_volumes_from_title("v07")), vec![7]);
}
#[test]
fn range_dot_separator() {
// T01.T15 → 1..=15
let v = sorted(extract_volumes_from_title("One Piece T01.T15"));
assert_eq!(v, (1..=15).collect::<Vec<_>>());
}
#[test]
fn range_dot_with_brackets() {
// [T001.T104] → 1..=104
let v = sorted(extract_volumes_from_title("Naruto [T001.T104]"));
assert_eq!(v.len(), 104);
assert_eq!(v[0], 1);
assert_eq!(v[103], 104);
}
#[test]
fn range_dash_separator() {
// T01-T15
let v = sorted(extract_volumes_from_title("Dragon Ball T01-T10"));
assert_eq!(v, (1..=10).collect::<Vec<_>>());
}
#[test]
fn range_french_a_grave() {
// Tome 01 à Tome 05
let v = sorted(extract_volumes_from_title("Astérix Tome 01 à Tome 05"));
assert_eq!(v, vec![1, 2, 3, 4, 5]);
}
#[test]
fn range_long_prefix() {
// Tome01.Tome15
let v = sorted(extract_volumes_from_title("Naruto Tome01.Tome15"));
assert_eq!(v, (1..=15).collect::<Vec<_>>());
}
#[test]
fn no_false_positive_version_string() {
// v2.0 should NOT be treated as a range
let v = extract_volumes_from_title("tool v2.0 release");
assert!(!v.contains(&0) || v.len() == 1); // only v2 at most
}
}

218
apps/api/src/qbittorrent.rs Normal file
View File

@@ -0,0 +1,218 @@
use axum::{extract::State, Json};
use serde::{Deserialize, Serialize};
use sqlx::Row;
use utoipa::ToSchema;
use crate::{error::ApiError, state::AppState};
// ─── Types ──────────────────────────────────────────────────────────────────
#[derive(Deserialize, ToSchema)]
pub struct QBittorrentAddRequest {
pub url: String,
}
#[derive(Serialize, ToSchema)]
pub struct QBittorrentAddResponse {
pub success: bool,
pub message: String,
}
#[derive(Serialize, ToSchema)]
pub struct QBittorrentTestResponse {
pub success: bool,
pub message: String,
pub version: Option<String>,
}
// ─── Config helper ──────────────────────────────────────────────────────────
#[derive(Deserialize)]
struct QBittorrentConfig {
url: String,
username: String,
password: String,
}
async fn load_qbittorrent_config(
pool: &sqlx::PgPool,
) -> Result<(String, String, String), ApiError> {
let row = sqlx::query("SELECT value FROM app_settings WHERE key = 'qbittorrent'")
.fetch_optional(pool)
.await?;
let row = row.ok_or_else(|| ApiError::bad_request("qBittorrent is not configured"))?;
let value: serde_json::Value = row.get("value");
let config: QBittorrentConfig = serde_json::from_value(value)
.map_err(|e| ApiError::internal(format!("invalid qbittorrent config: {e}")))?;
if config.url.is_empty() || config.username.is_empty() {
return Err(ApiError::bad_request(
"qBittorrent URL and username must be configured in settings",
));
}
let url = config.url.trim_end_matches('/').to_string();
Ok((url, config.username, config.password))
}
// ─── Login helper ───────────────────────────────────────────────────────────
async fn qbittorrent_login(
client: &reqwest::Client,
base_url: &str,
username: &str,
password: &str,
) -> Result<String, ApiError> {
let resp = client
.post(format!("{base_url}/api/v2/auth/login"))
.form(&[("username", username), ("password", password)])
.send()
.await
.map_err(|e| ApiError::internal(format!("qBittorrent login request failed: {e}")))?;
if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
return Err(ApiError::internal(format!(
"qBittorrent login failed ({status}): {text}"
)));
}
// Extract SID from Set-Cookie header
let cookie_header = resp
.headers()
.get("set-cookie")
.and_then(|v| v.to_str().ok())
.unwrap_or("");
let sid = cookie_header
.split(';')
.next()
.and_then(|s| s.strip_prefix("SID="))
.ok_or_else(|| ApiError::internal("Failed to get SID cookie from qBittorrent"))?
.to_string();
Ok(sid)
}
// ─── Handlers ───────────────────────────────────────────────────────────────
/// Add a torrent to qBittorrent
#[utoipa::path(
post,
path = "/qbittorrent/add",
tag = "qbittorrent",
request_body = QBittorrentAddRequest,
responses(
(status = 200, body = QBittorrentAddResponse),
(status = 400, description = "Bad request or qBittorrent not configured"),
(status = 401, description = "Unauthorized"),
(status = 500, description = "qBittorrent connection error"),
),
security(("Bearer" = []))
)]
pub async fn add_torrent(
State(state): State<AppState>,
Json(body): Json<QBittorrentAddRequest>,
) -> Result<Json<QBittorrentAddResponse>, ApiError> {
if body.url.is_empty() {
return Err(ApiError::bad_request("url is required"));
}
let (base_url, username, password) = load_qbittorrent_config(&state.pool).await?;
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(10))
.build()
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
let sid = qbittorrent_login(&client, &base_url, &username, &password).await?;
let resp = client
.post(format!("{base_url}/api/v2/torrents/add"))
.header("Cookie", format!("SID={sid}"))
.form(&[("urls", &body.url)])
.send()
.await
.map_err(|e| ApiError::internal(format!("qBittorrent add request failed: {e}")))?;
if resp.status().is_success() {
Ok(Json(QBittorrentAddResponse {
success: true,
message: "Torrent added to qBittorrent".to_string(),
}))
} else {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
Ok(Json(QBittorrentAddResponse {
success: false,
message: format!("qBittorrent returned {status}: {text}"),
}))
}
}
/// Test connection to qBittorrent
#[utoipa::path(
get,
path = "/qbittorrent/test",
tag = "qbittorrent",
responses(
(status = 200, body = QBittorrentTestResponse),
(status = 400, description = "qBittorrent not configured"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn test_qbittorrent(
State(state): State<AppState>,
) -> Result<Json<QBittorrentTestResponse>, ApiError> {
let (base_url, username, password) = load_qbittorrent_config(&state.pool).await?;
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(10))
.build()
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
let sid = match qbittorrent_login(&client, &base_url, &username, &password).await {
Ok(sid) => sid,
Err(e) => {
return Ok(Json(QBittorrentTestResponse {
success: false,
message: format!("Login failed: {}", e.message),
version: None,
}));
}
};
let resp = client
.get(format!("{base_url}/api/v2/app/version"))
.header("Cookie", format!("SID={sid}"))
.send()
.await;
match resp {
Ok(r) if r.status().is_success() => {
let version = r.text().await.unwrap_or_default();
Ok(Json(QBittorrentTestResponse {
success: true,
message: format!("Connected successfully ({})", version.trim()),
version: Some(version.trim().to_string()),
}))
}
Ok(r) => {
let status = r.status();
let text = r.text().await.unwrap_or_default();
Ok(Json(QBittorrentTestResponse {
success: false,
message: format!("qBittorrent returned {status}: {text}"),
version: None,
}))
}
Err(e) => Ok(Json(QBittorrentTestResponse {
success: false,
message: format!("Connection failed: {e}"),
version: None,
})),
}
}

View File

@@ -1,11 +1,11 @@
use axum::{extract::{Path, State}, Json};
use axum::{extract::{Extension, Path, State}, Json};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::Row;
use uuid::Uuid;
use utoipa::ToSchema;
use crate::{error::ApiError, state::AppState};
use crate::{auth::AuthUser, error::ApiError, state::AppState};
#[derive(Serialize, ToSchema)]
pub struct ReadingProgressResponse {
@@ -42,8 +42,10 @@ pub struct UpdateReadingProgressRequest {
)]
pub async fn get_reading_progress(
State(state): State<AppState>,
user: Option<Extension<AuthUser>>,
Path(id): Path<Uuid>,
) -> Result<Json<ReadingProgressResponse>, ApiError> {
let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0;
// Verify book exists
let exists: bool = sqlx::query_scalar("SELECT EXISTS(SELECT 1 FROM books WHERE id = $1)")
.bind(id)
@@ -55,9 +57,10 @@ pub async fn get_reading_progress(
}
let row = sqlx::query(
"SELECT status, current_page, last_read_at FROM book_reading_progress WHERE book_id = $1",
"SELECT status, current_page, last_read_at FROM book_reading_progress WHERE book_id = $1 AND user_id = $2",
)
.bind(id)
.bind(auth_user.user_id)
.fetch_optional(&state.pool)
.await?;
@@ -96,9 +99,11 @@ pub async fn get_reading_progress(
)]
pub async fn update_reading_progress(
State(state): State<AppState>,
user: Option<Extension<AuthUser>>,
Path(id): Path<Uuid>,
Json(body): Json<UpdateReadingProgressRequest>,
) -> Result<Json<ReadingProgressResponse>, ApiError> {
let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0;
// Validate status value
if !["unread", "reading", "read"].contains(&body.status.as_str()) {
return Err(ApiError::bad_request(format!(
@@ -143,9 +148,9 @@ pub async fn update_reading_progress(
let row = sqlx::query(
r#"
INSERT INTO book_reading_progress (book_id, status, current_page, last_read_at, updated_at)
VALUES ($1, $2, $3, NOW(), NOW())
ON CONFLICT (book_id) DO UPDATE
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
VALUES ($1, $2, $3, $4, NOW(), NOW())
ON CONFLICT (book_id, user_id) DO UPDATE
SET status = EXCLUDED.status,
current_page = EXCLUDED.current_page,
last_read_at = NOW(),
@@ -154,6 +159,7 @@ pub async fn update_reading_progress(
"#,
)
.bind(id)
.bind(auth_user.user_id)
.bind(&body.status)
.bind(current_page)
.fetch_one(&state.pool)
@@ -194,8 +200,10 @@ pub struct MarkSeriesReadResponse {
)]
pub async fn mark_series_read(
State(state): State<AppState>,
user: Option<Extension<AuthUser>>,
Json(body): Json<MarkSeriesReadRequest>,
) -> Result<Json<MarkSeriesReadResponse>, ApiError> {
let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0;
if !["read", "unread"].contains(&body.status.as_str()) {
return Err(ApiError::bad_request(
"status must be 'read' or 'unread'",
@@ -209,24 +217,50 @@ pub async fn mark_series_read(
};
let sql = if body.status == "unread" {
// Delete progress records to reset to unread
// Delete progress records to reset to unread (scoped to this user)
if body.series == "unclassified" {
format!(
r#"
WITH target_books AS (
SELECT id FROM books WHERE {series_filter}
)
DELETE FROM book_reading_progress
WHERE book_id IN (SELECT id FROM target_books)
WHERE book_id IN (SELECT id FROM target_books) AND user_id = $1
"#
)
} else {
format!(
r#"
INSERT INTO book_reading_progress (book_id, status, current_page, last_read_at, updated_at)
SELECT id, 'read', NULL, NOW(), NOW()
WITH target_books AS (
SELECT id FROM books WHERE {series_filter}
)
DELETE FROM book_reading_progress
WHERE book_id IN (SELECT id FROM target_books) AND user_id = $2
"#
)
}
} else if body.series == "unclassified" {
format!(
r#"
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
SELECT id, $1, 'read', NULL, NOW(), NOW()
FROM books
WHERE {series_filter}
ON CONFLICT (book_id) DO UPDATE
ON CONFLICT (book_id, user_id) DO UPDATE
SET status = 'read',
current_page = NULL,
last_read_at = NOW(),
updated_at = NOW()
"#
)
} else {
format!(
r#"
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
SELECT id, $2, 'read', NULL, NOW(), NOW()
FROM books
WHERE {series_filter}
ON CONFLICT (book_id, user_id) DO UPDATE
SET status = 'read',
current_page = NULL,
last_read_at = NOW(),
@@ -236,9 +270,18 @@ pub async fn mark_series_read(
};
let result = if body.series == "unclassified" {
sqlx::query(&sql).execute(&state.pool).await?
// $1 = user_id (no series bind needed)
sqlx::query(&sql)
.bind(auth_user.user_id)
.execute(&state.pool)
.await?
} else {
sqlx::query(&sql).bind(&body.series).execute(&state.pool).await?
// $1 = series, $2 = user_id
sqlx::query(&sql)
.bind(&body.series)
.bind(auth_user.user_id)
.execute(&state.pool)
.await?
};
Ok(Json(MarkSeriesReadResponse {

View File

@@ -0,0 +1,722 @@
use axum::{extract::State, Json};
use serde::{Deserialize, Serialize};
use sqlx::{PgPool, Row};
use std::time::Duration;
use tracing::{info, warn};
use utoipa::ToSchema;
use uuid::Uuid;
use crate::{anilist, error::ApiError, state::AppState};
// ---------------------------------------------------------------------------
// DTOs
// ---------------------------------------------------------------------------
#[derive(Deserialize, ToSchema)]
pub struct ReadingStatusMatchRequest {
pub library_id: Option<String>,
}
#[derive(Serialize, ToSchema)]
pub struct ReadingStatusMatchReportDto {
#[schema(value_type = String)]
pub job_id: Uuid,
pub status: String,
pub total_series: i64,
pub linked: i64,
pub already_linked: i64,
pub no_results: i64,
pub ambiguous: i64,
pub errors: i64,
}
#[derive(Serialize, ToSchema)]
pub struct ReadingStatusMatchResultDto {
#[schema(value_type = String)]
pub id: Uuid,
pub series_name: String,
/// 'linked' | 'already_linked' | 'no_results' | 'ambiguous' | 'error'
pub status: String,
pub anilist_id: Option<i32>,
pub anilist_title: Option<String>,
pub anilist_url: Option<String>,
pub error_message: Option<String>,
}
// ---------------------------------------------------------------------------
// POST /reading-status/match — Trigger a reading status match job
// ---------------------------------------------------------------------------
#[utoipa::path(
post,
path = "/reading-status/match",
tag = "reading_status",
request_body = ReadingStatusMatchRequest,
responses(
(status = 200, description = "Job created"),
(status = 400, description = "Bad request"),
),
security(("Bearer" = []))
)]
pub async fn start_match(
State(state): State<AppState>,
Json(body): Json<ReadingStatusMatchRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
// All libraries case
if body.library_id.is_none() {
anilist::load_anilist_settings(&state.pool).await?;
let library_ids: Vec<Uuid> = sqlx::query_scalar(
"SELECT id FROM libraries WHERE reading_status_provider IS NOT NULL ORDER BY name"
)
.fetch_all(&state.pool)
.await?;
let mut last_job_id: Option<Uuid> = None;
for library_id in library_ids {
let existing: Option<Uuid> = sqlx::query_scalar(
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'reading_status_match' AND status IN ('pending', 'running') LIMIT 1",
)
.bind(library_id)
.fetch_optional(&state.pool)
.await?;
if existing.is_some() { continue; }
let job_id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'reading_status_match', 'running', NOW())",
)
.bind(job_id)
.bind(library_id)
.execute(&state.pool)
.await?;
let pool = state.pool.clone();
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&state.pool)
.await
.ok()
.flatten();
tokio::spawn(async move {
if let Err(e) = process_reading_status_match(&pool, job_id, library_id).await {
warn!("[READING_STATUS_MATCH] job {job_id} failed: {e}");
let partial_stats = build_match_stats(&pool, job_id).await;
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW(), stats_json = $3 WHERE id = $1",
)
.bind(job_id)
.bind(e.to_string())
.bind(&partial_stats)
.execute(&pool)
.await;
notifications::notify(
pool.clone(),
notifications::NotificationEvent::ReadingStatusMatchFailed {
library_name,
error: e.to_string(),
},
);
}
});
last_job_id = Some(job_id);
}
return Ok(Json(serde_json::json!({
"id": last_job_id.map(|id| id.to_string()),
"status": "started",
})));
}
let library_id: Uuid = body
.library_id
.unwrap()
.parse()
.map_err(|_| ApiError::bad_request("invalid library_id"))?;
// Verify library exists and has a reading_status_provider configured
let lib_row = sqlx::query("SELECT reading_status_provider FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found("library not found"))?;
let provider: Option<String> = lib_row.get("reading_status_provider");
if provider.is_none() {
return Err(ApiError::bad_request(
"This library has no reading status provider configured",
));
}
// Check AniList is configured globally
anilist::load_anilist_settings(&state.pool).await?;
// Check no existing running job for this library
let existing: Option<Uuid> = sqlx::query_scalar(
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'reading_status_match' AND status IN ('pending', 'running') LIMIT 1",
)
.bind(library_id)
.fetch_optional(&state.pool)
.await?;
if let Some(existing_id) = existing {
return Ok(Json(serde_json::json!({
"id": existing_id.to_string(),
"status": "already_running",
})));
}
let job_id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'reading_status_match', 'running', NOW())",
)
.bind(job_id)
.bind(library_id)
.execute(&state.pool)
.await?;
let pool = state.pool.clone();
let library_name: Option<String> =
sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&state.pool)
.await
.ok()
.flatten();
tokio::spawn(async move {
if let Err(e) = process_reading_status_match(&pool, job_id, library_id).await {
warn!("[READING_STATUS_MATCH] job {job_id} failed: {e}");
let partial_stats = build_match_stats(&pool, job_id).await;
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW(), stats_json = $3 WHERE id = $1",
)
.bind(job_id)
.bind(e.to_string())
.bind(&partial_stats)
.execute(&pool)
.await;
notifications::notify(
pool.clone(),
notifications::NotificationEvent::ReadingStatusMatchFailed {
library_name,
error: e.to_string(),
},
);
}
});
Ok(Json(serde_json::json!({
"id": job_id.to_string(),
"status": "running",
})))
}
// ---------------------------------------------------------------------------
// GET /reading-status/match/:id/report
// ---------------------------------------------------------------------------
#[utoipa::path(
get,
path = "/reading-status/match/{id}/report",
tag = "reading_status",
params(("id" = String, Path, description = "Job UUID")),
responses(
(status = 200, body = ReadingStatusMatchReportDto),
(status = 404, description = "Job not found"),
),
security(("Bearer" = []))
)]
pub async fn get_match_report(
State(state): State<AppState>,
axum::extract::Path(job_id): axum::extract::Path<Uuid>,
) -> Result<Json<ReadingStatusMatchReportDto>, ApiError> {
let row = sqlx::query(
"SELECT status, total_files FROM index_jobs WHERE id = $1 AND type = 'reading_status_match'",
)
.bind(job_id)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found("job not found"))?;
let job_status: String = row.get("status");
let total_files: Option<i32> = row.get("total_files");
let counts = sqlx::query(
"SELECT status, COUNT(*) as cnt FROM reading_status_match_results WHERE job_id = $1 GROUP BY status",
)
.bind(job_id)
.fetch_all(&state.pool)
.await?;
let mut linked = 0i64;
let mut already_linked = 0i64;
let mut no_results = 0i64;
let mut ambiguous = 0i64;
let mut errors = 0i64;
for r in &counts {
let status: String = r.get("status");
let cnt: i64 = r.get("cnt");
match status.as_str() {
"linked" => linked = cnt,
"already_linked" => already_linked = cnt,
"no_results" => no_results = cnt,
"ambiguous" => ambiguous = cnt,
"error" => errors = cnt,
_ => {}
}
}
Ok(Json(ReadingStatusMatchReportDto {
job_id,
status: job_status,
total_series: total_files.unwrap_or(0) as i64,
linked,
already_linked,
no_results,
ambiguous,
errors,
}))
}
// ---------------------------------------------------------------------------
// GET /reading-status/match/:id/results
// ---------------------------------------------------------------------------
#[utoipa::path(
get,
path = "/reading-status/match/{id}/results",
tag = "reading_status",
params(
("id" = String, Path, description = "Job UUID"),
("status" = Option<String>, Query, description = "Filter by status"),
),
responses(
(status = 200, body = Vec<ReadingStatusMatchResultDto>),
),
security(("Bearer" = []))
)]
pub async fn get_match_results(
State(state): State<AppState>,
axum::extract::Path(job_id): axum::extract::Path<Uuid>,
axum::extract::Query(query): axum::extract::Query<ResultsQuery>,
) -> Result<Json<Vec<ReadingStatusMatchResultDto>>, ApiError> {
let rows = if let Some(status_filter) = &query.status {
sqlx::query(
"SELECT id, series_name, status, anilist_id, anilist_title, anilist_url, error_message
FROM reading_status_match_results
WHERE job_id = $1 AND status = $2
ORDER BY series_name",
)
.bind(job_id)
.bind(status_filter)
.fetch_all(&state.pool)
.await?
} else {
sqlx::query(
"SELECT id, series_name, status, anilist_id, anilist_title, anilist_url, error_message
FROM reading_status_match_results
WHERE job_id = $1
ORDER BY status, series_name",
)
.bind(job_id)
.fetch_all(&state.pool)
.await?
};
let results = rows
.iter()
.map(|row| ReadingStatusMatchResultDto {
id: row.get("id"),
series_name: row.get("series_name"),
status: row.get("status"),
anilist_id: row.get("anilist_id"),
anilist_title: row.get("anilist_title"),
anilist_url: row.get("anilist_url"),
error_message: row.get("error_message"),
})
.collect();
Ok(Json(results))
}
#[derive(Deserialize)]
pub struct ResultsQuery {
pub status: Option<String>,
}
// ---------------------------------------------------------------------------
// Background processing
// ---------------------------------------------------------------------------
pub(crate) async fn process_reading_status_match(
pool: &PgPool,
job_id: Uuid,
library_id: Uuid,
) -> Result<(), String> {
let (token, _, _) = anilist::load_anilist_settings(pool)
.await
.map_err(|e| e.message)?;
let series_names: Vec<String> = sqlx::query_scalar(
r#"
SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')
FROM books
WHERE library_id = $1
ORDER BY 1
"#,
)
.bind(library_id)
.fetch_all(pool)
.await
.map_err(|e| e.to_string())?;
let total = series_names.len() as i32;
sqlx::query("UPDATE index_jobs SET total_files = $2 WHERE id = $1")
.bind(job_id)
.bind(total)
.execute(pool)
.await
.map_err(|e| e.to_string())?;
let already_linked: std::collections::HashSet<String> = sqlx::query_scalar(
"SELECT series_name FROM anilist_series_links WHERE library_id = $1",
)
.bind(library_id)
.fetch_all(pool)
.await
.map_err(|e| e.to_string())?
.into_iter()
.collect();
let mut processed = 0i32;
for series_name in &series_names {
if is_job_cancelled(pool, job_id).await {
sqlx::query(
"UPDATE index_jobs SET status = 'cancelled', finished_at = NOW() WHERE id = $1",
)
.bind(job_id)
.execute(pool)
.await
.map_err(|e| e.to_string())?;
return Ok(());
}
processed += 1;
let progress = (processed * 100 / total.max(1)).min(100);
sqlx::query(
"UPDATE index_jobs SET processed_files = $2, progress_percent = $3, current_file = $4 WHERE id = $1",
)
.bind(job_id)
.bind(processed)
.bind(progress)
.bind(series_name)
.execute(pool)
.await
.ok();
if series_name == "unclassified" {
insert_result(pool, job_id, library_id, series_name, "already_linked", None, None, None, None).await;
continue;
}
if already_linked.contains(series_name) {
insert_result(pool, job_id, library_id, series_name, "already_linked", None, None, None, None).await;
continue;
}
match search_and_link(pool, library_id, series_name, &token).await {
Ok(Outcome::Linked { anilist_id, anilist_title, anilist_url }) => {
insert_result(pool, job_id, library_id, series_name, "linked", Some(anilist_id), anilist_title.as_deref(), anilist_url.as_deref(), None).await;
}
Ok(Outcome::NoResults) => {
insert_result(pool, job_id, library_id, series_name, "no_results", None, None, None, None).await;
}
Ok(Outcome::Ambiguous) => {
insert_result(pool, job_id, library_id, series_name, "ambiguous", None, None, None, None).await;
}
Err(e) if e.contains("429") || e.contains("Too Many Requests") => {
warn!("[READING_STATUS_MATCH] rate limit hit for '{series_name}', waiting 10s before retry");
tokio::time::sleep(Duration::from_secs(10)).await;
match search_and_link(pool, library_id, series_name, &token).await {
Ok(Outcome::Linked { anilist_id, anilist_title, anilist_url }) => {
insert_result(pool, job_id, library_id, series_name, "linked", Some(anilist_id), anilist_title.as_deref(), anilist_url.as_deref(), None).await;
}
Ok(Outcome::NoResults) => {
insert_result(pool, job_id, library_id, series_name, "no_results", None, None, None, None).await;
}
Ok(Outcome::Ambiguous) => {
insert_result(pool, job_id, library_id, series_name, "ambiguous", None, None, None, None).await;
}
Err(e2) => {
return Err(format!(
"AniList rate limit exceeded (429) — job stopped after {processed}/{total} series: {e2}"
));
}
}
}
Err(e) => {
warn!("[READING_STATUS_MATCH] series '{series_name}': {e}");
insert_result(pool, job_id, library_id, series_name, "error", None, None, None, Some(&e)).await;
}
}
// Respect AniList rate limit (~90 req/min)
tokio::time::sleep(Duration::from_millis(700)).await;
}
// Build stats from results table
let counts = sqlx::query(
"SELECT status, COUNT(*) as cnt FROM reading_status_match_results WHERE job_id = $1 GROUP BY status",
)
.bind(job_id)
.fetch_all(pool)
.await
.map_err(|e| e.to_string())?;
let mut count_linked = 0i64;
let mut count_already_linked = 0i64;
let mut count_no_results = 0i64;
let mut count_ambiguous = 0i64;
let mut count_errors = 0i64;
for row in &counts {
let s: String = row.get("status");
let c: i64 = row.get("cnt");
match s.as_str() {
"linked" => count_linked = c,
"already_linked" => count_already_linked = c,
"no_results" => count_no_results = c,
"ambiguous" => count_ambiguous = c,
"error" => count_errors = c,
_ => {}
}
}
let stats = serde_json::json!({
"total_series": total as i64,
"linked": count_linked,
"already_linked": count_already_linked,
"no_results": count_no_results,
"ambiguous": count_ambiguous,
"errors": count_errors,
});
sqlx::query(
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), stats_json = $2, progress_percent = 100 WHERE id = $1",
)
.bind(job_id)
.bind(&stats)
.execute(pool)
.await
.map_err(|e| e.to_string())?;
info!(
"[READING_STATUS_MATCH] job={job_id} completed: {}/{} series, linked={count_linked}, ambiguous={count_ambiguous}, no_results={count_no_results}, errors={count_errors}",
processed, total
);
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(pool)
.await
.ok()
.flatten();
notifications::notify(
pool.clone(),
notifications::NotificationEvent::ReadingStatusMatchCompleted {
library_name,
total_series: total,
linked: count_linked as i32,
},
);
Ok(())
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
#[allow(clippy::too_many_arguments)]
async fn insert_result(
pool: &PgPool,
job_id: Uuid,
library_id: Uuid,
series_name: &str,
status: &str,
anilist_id: Option<i32>,
anilist_title: Option<&str>,
anilist_url: Option<&str>,
error_message: Option<&str>,
) {
let _ = sqlx::query(
r#"
INSERT INTO reading_status_match_results
(job_id, library_id, series_name, status, anilist_id, anilist_title, anilist_url, error_message)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
"#,
)
.bind(job_id)
.bind(library_id)
.bind(series_name)
.bind(status)
.bind(anilist_id)
.bind(anilist_title)
.bind(anilist_url)
.bind(error_message)
.execute(pool)
.await;
}
enum Outcome {
Linked {
anilist_id: i32,
anilist_title: Option<String>,
anilist_url: Option<String>,
},
NoResults,
Ambiguous,
}
async fn search_and_link(
pool: &PgPool,
library_id: Uuid,
series_name: &str,
token: &str,
) -> Result<Outcome, String> {
let gql = r#"
query SearchManga($search: String) {
Page(perPage: 10) {
media(search: $search, type: MANGA, sort: [SEARCH_MATCH]) {
id
title { romaji english native }
siteUrl
}
}
}
"#;
let data = anilist::anilist_graphql(token, gql, serde_json::json!({ "search": series_name }))
.await
.map_err(|e| e.message)?;
let media: Vec<serde_json::Value> = match data["Page"]["media"].as_array() {
Some(arr) => arr.clone(),
None => return Ok(Outcome::NoResults),
};
if media.is_empty() {
return Ok(Outcome::NoResults);
}
let normalized_query = normalize_title(series_name);
let exact_matches: Vec<_> = media
.iter()
.filter(|m| {
let romaji = m["title"]["romaji"].as_str().map(normalize_title);
let english = m["title"]["english"].as_str().map(normalize_title);
let native = m["title"]["native"].as_str().map(normalize_title);
romaji.as_deref() == Some(&normalized_query)
|| english.as_deref() == Some(&normalized_query)
|| native.as_deref() == Some(&normalized_query)
})
.collect();
let candidate = if exact_matches.len() == 1 {
exact_matches[0]
} else if exact_matches.is_empty() && media.len() == 1 {
&media[0]
} else {
return Ok(Outcome::Ambiguous);
};
let anilist_id = candidate["id"].as_i64().unwrap_or(0) as i32;
let anilist_title = candidate["title"]["english"]
.as_str()
.or_else(|| candidate["title"]["romaji"].as_str())
.map(String::from);
let anilist_url = candidate["siteUrl"].as_str().map(String::from);
sqlx::query(
r#"
INSERT INTO anilist_series_links (library_id, series_name, provider, anilist_id, anilist_title, anilist_url, status, linked_at)
VALUES ($1, $2, 'anilist', $3, $4, $5, 'linked', NOW())
ON CONFLICT (library_id, series_name, provider) DO NOTHING
"#,
)
.bind(library_id)
.bind(series_name)
.bind(anilist_id)
.bind(&anilist_title)
.bind(&anilist_url)
.execute(pool)
.await
.map_err(|e| e.to_string())?;
Ok(Outcome::Linked {
anilist_id,
anilist_title,
anilist_url,
})
}
fn normalize_title(s: &str) -> String {
s.to_lowercase()
.replace([':', '!', '?', '.', ',', '\'', '"', '-', '_'], " ")
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
}
async fn build_match_stats(pool: &PgPool, job_id: Uuid) -> serde_json::Value {
let total: Option<i32> = sqlx::query_scalar("SELECT total_files FROM index_jobs WHERE id = $1")
.bind(job_id)
.fetch_optional(pool)
.await
.ok()
.flatten();
let counts = sqlx::query(
"SELECT status, COUNT(*) as cnt FROM reading_status_match_results WHERE job_id = $1 GROUP BY status",
)
.bind(job_id)
.fetch_all(pool)
.await
.unwrap_or_default();
let mut linked = 0i64;
let mut already_linked = 0i64;
let mut no_results = 0i64;
let mut ambiguous = 0i64;
let mut errors = 0i64;
for row in &counts {
let s: String = row.get("status");
let c: i64 = row.get("cnt");
match s.as_str() {
"linked" => linked = c,
"already_linked" => already_linked = c,
"no_results" => no_results = c,
"ambiguous" => ambiguous = c,
"error" => errors = c,
_ => {}
}
}
serde_json::json!({
"total_series": total.unwrap_or(0) as i64,
"linked": linked,
"already_linked": already_linked,
"no_results": no_results,
"ambiguous": ambiguous,
"errors": errors,
})
}
async fn is_job_cancelled(pool: &PgPool, job_id: Uuid) -> bool {
sqlx::query_scalar::<_, String>("SELECT status FROM index_jobs WHERE id = $1")
.bind(job_id)
.fetch_optional(pool)
.await
.ok()
.flatten()
.as_deref()
== Some("cancelled")
}

View File

@@ -0,0 +1,752 @@
use axum::{extract::State, Json};
use serde::{Deserialize, Serialize};
use sqlx::{PgPool, Row};
use std::time::Duration;
use tracing::{info, warn};
use utoipa::ToSchema;
use uuid::Uuid;
use crate::{anilist, error::ApiError, state::AppState};
// ---------------------------------------------------------------------------
// DTOs
// ---------------------------------------------------------------------------
#[derive(Deserialize, ToSchema)]
pub struct ReadingStatusPushRequest {
pub library_id: Option<String>,
}
#[derive(Serialize, ToSchema)]
pub struct ReadingStatusPushReportDto {
#[schema(value_type = String)]
pub job_id: Uuid,
pub status: String,
pub total_series: i64,
pub pushed: i64,
pub skipped: i64,
pub no_books: i64,
pub errors: i64,
}
#[derive(Serialize, ToSchema)]
pub struct ReadingStatusPushResultDto {
#[schema(value_type = String)]
pub id: Uuid,
pub series_name: String,
/// 'pushed' | 'skipped' | 'no_books' | 'error'
pub status: String,
pub anilist_id: Option<i32>,
pub anilist_title: Option<String>,
pub anilist_url: Option<String>,
/// PLANNING | CURRENT | COMPLETED
pub anilist_status: Option<String>,
pub progress_volumes: Option<i32>,
pub error_message: Option<String>,
}
// ---------------------------------------------------------------------------
// POST /reading-status/push — Trigger a reading status push job
// ---------------------------------------------------------------------------
#[utoipa::path(
post,
path = "/reading-status/push",
tag = "reading_status",
request_body = ReadingStatusPushRequest,
responses(
(status = 200, description = "Job created"),
(status = 400, description = "Bad request"),
),
security(("Bearer" = []))
)]
pub async fn start_push(
State(state): State<AppState>,
Json(body): Json<ReadingStatusPushRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
// All libraries case
if body.library_id.is_none() {
let (_, _, local_user_id) = anilist::load_anilist_settings(&state.pool).await?;
if local_user_id.is_none() {
return Err(ApiError::bad_request(
"AniList local_user_id not configured — required for reading status push",
));
}
let library_ids: Vec<Uuid> = sqlx::query_scalar(
"SELECT id FROM libraries WHERE reading_status_provider = 'anilist' ORDER BY name"
)
.fetch_all(&state.pool)
.await?;
let mut last_job_id: Option<Uuid> = None;
for library_id in library_ids {
let existing: Option<Uuid> = sqlx::query_scalar(
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'reading_status_push' AND status IN ('pending', 'running') LIMIT 1",
)
.bind(library_id)
.fetch_optional(&state.pool)
.await?;
if existing.is_some() { continue; }
let job_id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'reading_status_push', 'running', NOW())",
)
.bind(job_id)
.bind(library_id)
.execute(&state.pool)
.await?;
let pool = state.pool.clone();
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&state.pool)
.await
.ok()
.flatten();
tokio::spawn(async move {
if let Err(e) = process_reading_status_push(&pool, job_id, library_id).await {
warn!("[READING_STATUS_PUSH] job {job_id} failed: {e}");
let partial_stats = build_push_stats(&pool, job_id).await;
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW(), stats_json = $3 WHERE id = $1",
)
.bind(job_id)
.bind(e.to_string())
.bind(&partial_stats)
.execute(&pool)
.await;
notifications::notify(
pool.clone(),
notifications::NotificationEvent::ReadingStatusPushFailed {
library_name,
error: e.to_string(),
},
);
}
});
last_job_id = Some(job_id);
}
return Ok(Json(serde_json::json!({
"id": last_job_id.map(|id| id.to_string()),
"status": "started",
})));
}
let library_id: Uuid = body
.library_id
.unwrap()
.parse()
.map_err(|_| ApiError::bad_request("invalid library_id"))?;
// Verify library exists and has AniList configured
let lib_row = sqlx::query("SELECT reading_status_provider FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found("library not found"))?;
let provider: Option<String> = lib_row.get("reading_status_provider");
if provider.as_deref() != Some("anilist") {
return Err(ApiError::bad_request(
"This library has no AniList reading status provider configured",
));
}
// Check AniList is configured globally with a local_user_id
let (_, _, local_user_id) = anilist::load_anilist_settings(&state.pool).await?;
if local_user_id.is_none() {
return Err(ApiError::bad_request(
"AniList local_user_id not configured — required for reading status push",
));
}
// Check no existing running job for this library
let existing: Option<Uuid> = sqlx::query_scalar(
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'reading_status_push' AND status IN ('pending', 'running') LIMIT 1",
)
.bind(library_id)
.fetch_optional(&state.pool)
.await?;
if let Some(existing_id) = existing {
return Ok(Json(serde_json::json!({
"id": existing_id.to_string(),
"status": "already_running",
})));
}
let job_id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'reading_status_push', 'running', NOW())",
)
.bind(job_id)
.bind(library_id)
.execute(&state.pool)
.await?;
let pool = state.pool.clone();
let library_name: Option<String> =
sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(&state.pool)
.await
.ok()
.flatten();
tokio::spawn(async move {
if let Err(e) = process_reading_status_push(&pool, job_id, library_id).await {
warn!("[READING_STATUS_PUSH] job {job_id} failed: {e}");
let partial_stats = build_push_stats(&pool, job_id).await;
let _ = sqlx::query(
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW(), stats_json = $3 WHERE id = $1",
)
.bind(job_id)
.bind(e.to_string())
.bind(&partial_stats)
.execute(&pool)
.await;
notifications::notify(
pool.clone(),
notifications::NotificationEvent::ReadingStatusPushFailed {
library_name,
error: e.to_string(),
},
);
}
});
Ok(Json(serde_json::json!({
"id": job_id.to_string(),
"status": "running",
})))
}
// ---------------------------------------------------------------------------
// GET /reading-status/push/:id/report
// ---------------------------------------------------------------------------
#[utoipa::path(
get,
path = "/reading-status/push/{id}/report",
tag = "reading_status",
params(("id" = String, Path, description = "Job UUID")),
responses(
(status = 200, body = ReadingStatusPushReportDto),
(status = 404, description = "Job not found"),
),
security(("Bearer" = []))
)]
pub async fn get_push_report(
State(state): State<AppState>,
axum::extract::Path(job_id): axum::extract::Path<Uuid>,
) -> Result<Json<ReadingStatusPushReportDto>, ApiError> {
let row = sqlx::query(
"SELECT status, total_files FROM index_jobs WHERE id = $1 AND type = 'reading_status_push'",
)
.bind(job_id)
.fetch_optional(&state.pool)
.await?
.ok_or_else(|| ApiError::not_found("job not found"))?;
let job_status: String = row.get("status");
let total_files: Option<i32> = row.get("total_files");
let counts = sqlx::query(
"SELECT status, COUNT(*) as cnt FROM reading_status_push_results WHERE job_id = $1 GROUP BY status",
)
.bind(job_id)
.fetch_all(&state.pool)
.await?;
let mut pushed = 0i64;
let mut skipped = 0i64;
let mut no_books = 0i64;
let mut errors = 0i64;
for r in &counts {
let status: String = r.get("status");
let cnt: i64 = r.get("cnt");
match status.as_str() {
"pushed" => pushed = cnt,
"skipped" => skipped = cnt,
"no_books" => no_books = cnt,
"error" => errors = cnt,
_ => {}
}
}
Ok(Json(ReadingStatusPushReportDto {
job_id,
status: job_status,
total_series: total_files.unwrap_or(0) as i64,
pushed,
skipped,
no_books,
errors,
}))
}
// ---------------------------------------------------------------------------
// GET /reading-status/push/:id/results
// ---------------------------------------------------------------------------
#[derive(Deserialize)]
pub struct PushResultsQuery {
pub status: Option<String>,
}
#[utoipa::path(
get,
path = "/reading-status/push/{id}/results",
tag = "reading_status",
params(
("id" = String, Path, description = "Job UUID"),
("status" = Option<String>, Query, description = "Filter by status"),
),
responses(
(status = 200, body = Vec<ReadingStatusPushResultDto>),
),
security(("Bearer" = []))
)]
pub async fn get_push_results(
State(state): State<AppState>,
axum::extract::Path(job_id): axum::extract::Path<Uuid>,
axum::extract::Query(query): axum::extract::Query<PushResultsQuery>,
) -> Result<Json<Vec<ReadingStatusPushResultDto>>, ApiError> {
let rows = if let Some(status_filter) = &query.status {
sqlx::query(
"SELECT id, series_name, status, anilist_id, anilist_title, anilist_url, anilist_status, progress_volumes, error_message
FROM reading_status_push_results
WHERE job_id = $1 AND status = $2
ORDER BY series_name",
)
.bind(job_id)
.bind(status_filter)
.fetch_all(&state.pool)
.await?
} else {
sqlx::query(
"SELECT id, series_name, status, anilist_id, anilist_title, anilist_url, anilist_status, progress_volumes, error_message
FROM reading_status_push_results
WHERE job_id = $1
ORDER BY status, series_name",
)
.bind(job_id)
.fetch_all(&state.pool)
.await?
};
let results = rows
.iter()
.map(|row| ReadingStatusPushResultDto {
id: row.get("id"),
series_name: row.get("series_name"),
status: row.get("status"),
anilist_id: row.get("anilist_id"),
anilist_title: row.get("anilist_title"),
anilist_url: row.get("anilist_url"),
anilist_status: row.get("anilist_status"),
progress_volumes: row.get("progress_volumes"),
error_message: row.get("error_message"),
})
.collect();
Ok(Json(results))
}
// ---------------------------------------------------------------------------
// Background processing
// ---------------------------------------------------------------------------
struct SeriesInfo {
series_name: String,
anilist_id: i32,
anilist_title: Option<String>,
anilist_url: Option<String>,
}
pub async fn process_reading_status_push(
pool: &PgPool,
job_id: Uuid,
library_id: Uuid,
) -> Result<(), String> {
let (token, _, local_user_id_opt) = anilist::load_anilist_settings(pool)
.await
.map_err(|e| e.message)?;
let local_user_id = local_user_id_opt
.ok_or_else(|| "AniList local_user_id not configured".to_string())?;
// Find all linked series that need a push (differential)
let series_to_push: Vec<SeriesInfo> = sqlx::query(
r#"
SELECT
asl.series_name,
asl.anilist_id,
asl.anilist_title,
asl.anilist_url
FROM anilist_series_links asl
WHERE asl.library_id = $1
AND asl.anilist_id IS NOT NULL
AND (
asl.synced_at IS NULL
OR EXISTS (
SELECT 1
FROM book_reading_progress brp
JOIN books b2 ON b2.id = brp.book_id
WHERE b2.library_id = asl.library_id
AND COALESCE(NULLIF(b2.series, ''), 'unclassified') = asl.series_name
AND brp.user_id = $2
AND brp.updated_at > asl.synced_at
)
OR EXISTS (
SELECT 1
FROM books b2
WHERE b2.library_id = asl.library_id
AND COALESCE(NULLIF(b2.series, ''), 'unclassified') = asl.series_name
AND b2.created_at > asl.synced_at
)
)
ORDER BY asl.series_name
"#,
)
.bind(library_id)
.bind(local_user_id)
.fetch_all(pool)
.await
.map_err(|e| e.to_string())?
.into_iter()
.map(|row| SeriesInfo {
series_name: row.get("series_name"),
anilist_id: row.get("anilist_id"),
anilist_title: row.get("anilist_title"),
anilist_url: row.get("anilist_url"),
})
.collect();
let total = series_to_push.len() as i32;
sqlx::query("UPDATE index_jobs SET total_files = $2 WHERE id = $1")
.bind(job_id)
.bind(total)
.execute(pool)
.await
.map_err(|e| e.to_string())?;
let mut processed = 0i32;
for series in &series_to_push {
if is_job_cancelled(pool, job_id).await {
sqlx::query(
"UPDATE index_jobs SET status = 'cancelled', finished_at = NOW() WHERE id = $1",
)
.bind(job_id)
.execute(pool)
.await
.map_err(|e| e.to_string())?;
return Ok(());
}
processed += 1;
let progress = (processed * 100 / total.max(1)).min(100);
sqlx::query(
"UPDATE index_jobs SET processed_files = $2, progress_percent = $3, current_file = $4 WHERE id = $1",
)
.bind(job_id)
.bind(processed)
.bind(progress)
.bind(&series.series_name)
.execute(pool)
.await
.ok();
// Compute reading status for this series
let stats_row = sqlx::query(
r#"
SELECT
COUNT(b.id) AS total_books,
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read
FROM books b
LEFT JOIN book_reading_progress brp
ON brp.book_id = b.id AND brp.user_id = $3
WHERE b.library_id = $1
AND COALESCE(NULLIF(b.series, ''), 'unclassified') = $2
"#,
)
.bind(library_id)
.bind(&series.series_name)
.bind(local_user_id)
.fetch_one(pool)
.await
.map_err(|e| e.to_string())?;
let total_books: i64 = stats_row.get("total_books");
let books_read: i64 = stats_row.get("books_read");
if total_books == 0 {
insert_push_result(
pool, job_id, library_id, &series.series_name, "no_books",
Some(series.anilist_id), series.anilist_title.as_deref(), series.anilist_url.as_deref(),
None, None, None,
).await;
tokio::time::sleep(Duration::from_millis(700)).await;
continue;
}
let anilist_status = if books_read == 0 {
"PLANNING"
} else if books_read >= total_books {
"COMPLETED"
} else {
"CURRENT"
};
let progress_volumes = books_read as i32;
match push_to_anilist(
&token,
series.anilist_id,
anilist_status,
progress_volumes,
)
.await
{
Ok(()) => {
// Update synced_at
let _ = sqlx::query(
"UPDATE anilist_series_links SET synced_at = NOW() WHERE library_id = $1 AND series_name = $2",
)
.bind(library_id)
.bind(&series.series_name)
.execute(pool)
.await;
insert_push_result(
pool, job_id, library_id, &series.series_name, "pushed",
Some(series.anilist_id), series.anilist_title.as_deref(), series.anilist_url.as_deref(),
Some(anilist_status), Some(progress_volumes), None,
).await;
}
Err(e) if e.contains("429") || e.contains("Too Many Requests") => {
warn!("[READING_STATUS_PUSH] rate limit hit for '{}', waiting 10s before retry", series.series_name);
tokio::time::sleep(Duration::from_secs(10)).await;
match push_to_anilist(&token, series.anilist_id, anilist_status, progress_volumes).await {
Ok(()) => {
let _ = sqlx::query(
"UPDATE anilist_series_links SET synced_at = NOW() WHERE library_id = $1 AND series_name = $2",
)
.bind(library_id)
.bind(&series.series_name)
.execute(pool)
.await;
insert_push_result(
pool, job_id, library_id, &series.series_name, "pushed",
Some(series.anilist_id), series.anilist_title.as_deref(), series.anilist_url.as_deref(),
Some(anilist_status), Some(progress_volumes), None,
).await;
}
Err(e2) => {
return Err(format!(
"AniList rate limit exceeded (429) — job stopped after {processed}/{total} series: {e2}"
));
}
}
}
Err(e) => {
warn!("[READING_STATUS_PUSH] series '{}': {e}", series.series_name);
insert_push_result(
pool, job_id, library_id, &series.series_name, "error",
Some(series.anilist_id), series.anilist_title.as_deref(), series.anilist_url.as_deref(),
None, None, Some(&e),
).await;
}
}
// Respect AniList rate limit (~90 req/min)
tokio::time::sleep(Duration::from_millis(700)).await;
}
// Build final stats
let counts = sqlx::query(
"SELECT status, COUNT(*) as cnt FROM reading_status_push_results WHERE job_id = $1 GROUP BY status",
)
.bind(job_id)
.fetch_all(pool)
.await
.map_err(|e| e.to_string())?;
let mut count_pushed = 0i64;
let mut count_skipped = 0i64;
let mut count_no_books = 0i64;
let mut count_errors = 0i64;
for row in &counts {
let s: String = row.get("status");
let c: i64 = row.get("cnt");
match s.as_str() {
"pushed" => count_pushed = c,
"skipped" => count_skipped = c,
"no_books" => count_no_books = c,
"error" => count_errors = c,
_ => {}
}
}
let stats = serde_json::json!({
"total_series": total as i64,
"pushed": count_pushed,
"skipped": count_skipped,
"no_books": count_no_books,
"errors": count_errors,
});
sqlx::query(
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), stats_json = $2, progress_percent = 100 WHERE id = $1",
)
.bind(job_id)
.bind(&stats)
.execute(pool)
.await
.map_err(|e| e.to_string())?;
info!(
"[READING_STATUS_PUSH] job={job_id} completed: {}/{} series, pushed={count_pushed}, no_books={count_no_books}, errors={count_errors}",
processed, total
);
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_optional(pool)
.await
.ok()
.flatten();
notifications::notify(
pool.clone(),
notifications::NotificationEvent::ReadingStatusPushCompleted {
library_name,
total_series: total,
pushed: count_pushed as i32,
},
);
Ok(())
}
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
async fn push_to_anilist(
token: &str,
anilist_id: i32,
status: &str,
progress: i32,
) -> Result<(), String> {
let gql = r#"
mutation SaveMediaListEntry($mediaId: Int, $status: MediaListStatus, $progress: Int) {
SaveMediaListEntry(mediaId: $mediaId, status: $status, progress: $progress) {
id
status
progress
}
}
"#;
anilist::anilist_graphql(
token,
gql,
serde_json::json!({
"mediaId": anilist_id,
"status": status,
"progress": progress,
}),
)
.await
.map_err(|e| e.message)?;
Ok(())
}
#[allow(clippy::too_many_arguments)]
async fn insert_push_result(
pool: &PgPool,
job_id: Uuid,
library_id: Uuid,
series_name: &str,
status: &str,
anilist_id: Option<i32>,
anilist_title: Option<&str>,
anilist_url: Option<&str>,
anilist_status: Option<&str>,
progress_volumes: Option<i32>,
error_message: Option<&str>,
) {
let _ = sqlx::query(
r#"
INSERT INTO reading_status_push_results
(job_id, library_id, series_name, status, anilist_id, anilist_title, anilist_url, anilist_status, progress_volumes, error_message)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
"#,
)
.bind(job_id)
.bind(library_id)
.bind(series_name)
.bind(status)
.bind(anilist_id)
.bind(anilist_title)
.bind(anilist_url)
.bind(anilist_status)
.bind(progress_volumes)
.bind(error_message)
.execute(pool)
.await;
}
async fn build_push_stats(pool: &PgPool, job_id: Uuid) -> serde_json::Value {
let total: Option<i32> = sqlx::query_scalar("SELECT total_files FROM index_jobs WHERE id = $1")
.bind(job_id)
.fetch_optional(pool)
.await
.ok()
.flatten();
let counts = sqlx::query(
"SELECT status, COUNT(*) as cnt FROM reading_status_push_results WHERE job_id = $1 GROUP BY status",
)
.bind(job_id)
.fetch_all(pool)
.await
.unwrap_or_default();
let mut pushed = 0i64;
let mut skipped = 0i64;
let mut no_books = 0i64;
let mut errors = 0i64;
for row in &counts {
let s: String = row.get("status");
let c: i64 = row.get("cnt");
match s.as_str() {
"pushed" => pushed = c,
"skipped" => skipped = c,
"no_books" => no_books = c,
"error" => errors = c,
_ => {}
}
}
serde_json::json!({
"total_series": total.unwrap_or(0) as i64,
"pushed": pushed,
"skipped": skipped,
"no_books": no_books,
"errors": errors,
})
}
async fn is_job_cancelled(pool: &PgPool, job_id: Uuid) -> bool {
sqlx::query_scalar::<_, String>("SELECT status FROM index_jobs WHERE id = $1")
.bind(job_id)
.fetch_optional(pool)
.await
.ok()
.flatten()
.as_deref()
== Some("cancelled")
}

View File

@@ -43,11 +43,11 @@ pub struct SearchResponse {
#[utoipa::path(
get,
path = "/search",
tag = "books",
tag = "search",
params(
("q" = String, Query, description = "Search query (books + series via PostgreSQL full-text)"),
("library_id" = Option<String>, Query, description = "Filter by library ID"),
("type" = Option<String>, Query, description = "Filter by type (cbz, cbr, pdf)"),
("type" = Option<String>, Query, description = "Filter by type (cbz, cbr, pdf, epub)"),
("kind" = Option<String>, Query, description = "Filter by kind (alias for type)"),
("limit" = Option<usize>, Query, description = "Max results per type (max 100)"),
),

1078
apps/api/src/series.rs Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,12 @@
use axum::{
extract::State,
routing::{get, post},
extract::{Path as AxumPath, State},
routing::{delete, get, post},
Json, Router,
};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use sqlx::Row;
use uuid::Uuid;
use utoipa::ToSchema;
use crate::{error::ApiError, state::{AppState, load_dynamic_settings}};
@@ -42,6 +43,14 @@ pub fn settings_routes() -> Router<AppState> {
.route("/settings/cache/clear", post(clear_cache))
.route("/settings/cache/stats", get(get_cache_stats))
.route("/settings/thumbnail/stats", get(get_thumbnail_stats))
.route(
"/settings/status-mappings",
get(list_status_mappings).post(upsert_status_mapping),
)
.route(
"/settings/status-mappings/:id",
delete(delete_status_mapping),
)
}
/// List all settings
@@ -324,3 +333,125 @@ pub async fn get_thumbnail_stats(State(_state): State<AppState>) -> Result<Json<
Ok(Json(stats))
}
// ---------------------------------------------------------------------------
// Status Mappings
// ---------------------------------------------------------------------------
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
pub struct StatusMappingDto {
pub id: String,
pub provider_status: String,
pub mapped_status: Option<String>,
}
#[derive(Debug, Clone, Deserialize, ToSchema)]
pub struct UpsertStatusMappingRequest {
pub provider_status: String,
pub mapped_status: String,
}
/// List all status mappings
#[utoipa::path(
get,
path = "/settings/status-mappings",
tag = "settings",
responses(
(status = 200, body = Vec<StatusMappingDto>),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn list_status_mappings(
State(state): State<AppState>,
) -> Result<Json<Vec<StatusMappingDto>>, ApiError> {
let rows = sqlx::query(
"SELECT id, provider_status, mapped_status FROM status_mappings ORDER BY mapped_status NULLS LAST, provider_status",
)
.fetch_all(&state.pool)
.await?;
let mappings = rows
.iter()
.map(|row| StatusMappingDto {
id: row.get::<Uuid, _>("id").to_string(),
provider_status: row.get("provider_status"),
mapped_status: row.get::<Option<String>, _>("mapped_status"),
})
.collect();
Ok(Json(mappings))
}
/// Create or update a status mapping
#[utoipa::path(
post,
path = "/settings/status-mappings",
tag = "settings",
request_body = UpsertStatusMappingRequest,
responses(
(status = 200, body = StatusMappingDto),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn upsert_status_mapping(
State(state): State<AppState>,
Json(body): Json<UpsertStatusMappingRequest>,
) -> Result<Json<StatusMappingDto>, ApiError> {
let provider_status = body.provider_status.to_lowercase();
let row = sqlx::query(
r#"
INSERT INTO status_mappings (provider_status, mapped_status)
VALUES ($1, $2)
ON CONFLICT (provider_status)
DO UPDATE SET mapped_status = $2, updated_at = NOW()
RETURNING id, provider_status, mapped_status
"#,
)
.bind(&provider_status)
.bind(&body.mapped_status)
.fetch_one(&state.pool)
.await?;
Ok(Json(StatusMappingDto {
id: row.get::<Uuid, _>("id").to_string(),
provider_status: row.get("provider_status"),
mapped_status: row.get::<Option<String>, _>("mapped_status"),
}))
}
/// Unmap a status mapping (sets mapped_status to NULL, keeps the provider status known)
#[utoipa::path(
delete,
path = "/settings/status-mappings/{id}",
tag = "settings",
params(("id" = String, Path, description = "Mapping UUID")),
responses(
(status = 200, body = StatusMappingDto),
(status = 401, description = "Unauthorized"),
(status = 404, description = "Not found"),
),
security(("Bearer" = []))
)]
pub async fn delete_status_mapping(
State(state): State<AppState>,
AxumPath(id): AxumPath<Uuid>,
) -> Result<Json<StatusMappingDto>, ApiError> {
let row = sqlx::query(
"UPDATE status_mappings SET mapped_status = NULL, updated_at = NOW() WHERE id = $1 RETURNING id, provider_status, mapped_status",
)
.bind(id)
.fetch_optional(&state.pool)
.await?;
match row {
Some(row) => Ok(Json(StatusMappingDto {
id: row.get::<Uuid, _>("id").to_string(),
provider_status: row.get("provider_status"),
mapped_status: row.get::<Option<String>, _>("mapped_status"),
})),
None => Err(ApiError::not_found("status mapping not found")),
}
}

View File

@@ -1,9 +1,18 @@
use axum::{extract::State, Json};
use serde::Serialize;
use axum::{
extract::{Extension, Query, State},
Json,
};
use serde::{Deserialize, Serialize};
use sqlx::Row;
use utoipa::ToSchema;
use utoipa::{IntoParams, ToSchema};
use crate::{error::ApiError, state::AppState};
use crate::{auth::AuthUser, error::ApiError, state::AppState};
#[derive(Deserialize, IntoParams)]
pub struct StatsQuery {
/// Granularity: "day", "week" or "month" (default: "month")
pub period: Option<String>,
}
#[derive(Serialize, ToSchema)]
pub struct StatsOverview {
@@ -74,23 +83,72 @@ pub struct ProviderCount {
pub count: i64,
}
#[derive(Serialize, ToSchema)]
pub struct CurrentlyReadingItem {
pub book_id: String,
pub title: String,
pub series: Option<String>,
pub current_page: i32,
pub page_count: i32,
pub username: Option<String>,
}
#[derive(Serialize, ToSchema)]
pub struct RecentlyReadItem {
pub book_id: String,
pub title: String,
pub series: Option<String>,
pub last_read_at: String,
pub username: Option<String>,
}
#[derive(Serialize, ToSchema)]
pub struct MonthlyReading {
pub month: String,
pub books_read: i64,
pub pages_read: i64,
}
#[derive(Serialize, ToSchema)]
pub struct UserMonthlyReading {
pub month: String,
pub username: String,
pub books_read: i64,
pub pages_read: i64,
}
#[derive(Serialize, ToSchema)]
pub struct JobTimePoint {
pub label: String,
pub scan: i64,
pub rebuild: i64,
pub thumbnail: i64,
pub other: i64,
}
#[derive(Serialize, ToSchema)]
pub struct StatsResponse {
pub overview: StatsOverview,
pub reading_status: ReadingStatusStats,
pub currently_reading: Vec<CurrentlyReadingItem>,
pub recently_read: Vec<RecentlyReadItem>,
pub reading_over_time: Vec<MonthlyReading>,
pub by_format: Vec<FormatCount>,
pub by_language: Vec<LanguageCount>,
pub by_library: Vec<LibraryStats>,
pub top_series: Vec<TopSeries>,
pub additions_over_time: Vec<MonthlyAdditions>,
pub jobs_over_time: Vec<JobTimePoint>,
pub metadata: MetadataStats,
pub users_reading_over_time: Vec<UserMonthlyReading>,
}
/// Get collection statistics for the dashboard
#[utoipa::path(
get,
path = "/stats",
tag = "books",
tag = "stats",
params(StatsQuery),
responses(
(status = 200, body = StatsResponse),
(status = 401, description = "Unauthorized"),
@@ -99,7 +157,11 @@ pub struct StatsResponse {
)]
pub async fn get_stats(
State(state): State<AppState>,
Query(query): Query<StatsQuery>,
user: Option<Extension<AuthUser>>,
) -> Result<Json<StatsResponse>, ApiError> {
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
let period = query.period.as_deref().unwrap_or("month");
// Overview + reading status in one query
let overview_row = sqlx::query(
r#"
@@ -117,9 +179,10 @@ pub async fn get_stats(
COUNT(*) FILTER (WHERE brp.status = 'reading') AS reading,
COUNT(*) FILTER (WHERE brp.status = 'read') AS read
FROM books b
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
"#,
)
.bind(user_id)
.fetch_one(&state.pool)
.await?;
@@ -207,7 +270,7 @@ pub async fn get_stats(
COUNT(*) FILTER (WHERE COALESCE(brp.status, 'unread') = 'unread') AS unread_count
FROM libraries l
LEFT JOIN books b ON b.library_id = l.id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
LEFT JOIN LATERAL (
SELECT size_bytes FROM book_files WHERE book_id = b.id ORDER BY updated_at DESC LIMIT 1
) bf ON TRUE
@@ -215,6 +278,7 @@ pub async fn get_stats(
ORDER BY book_count DESC
"#,
)
.bind(user_id)
.fetch_all(&state.pool)
.await?;
@@ -239,13 +303,14 @@ pub async fn get_stats(
COUNT(*) FILTER (WHERE brp.status = 'read') AS read_count,
COALESCE(SUM(b.page_count), 0)::BIGINT AS total_pages
FROM books b
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
WHERE b.series IS NOT NULL AND b.series != ''
GROUP BY b.series
ORDER BY book_count DESC
LIMIT 10
"#,
)
.bind(user_id)
.fetch_all(&state.pool)
.await?;
@@ -259,20 +324,74 @@ pub async fn get_stats(
})
.collect();
// Additions over time (last 12 months)
let additions_rows = sqlx::query(
// Additions over time (with gap filling)
let additions_rows = match period {
"day" => {
sqlx::query(
r#"
SELECT
TO_CHAR(DATE_TRUNC('month', created_at), 'YYYY-MM') AS month,
COUNT(*) AS books_added
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
COALESCE(cnt.books_added, 0) AS books_added
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
LEFT JOIN (
SELECT created_at::date AS dt, COUNT(*) AS books_added
FROM books
WHERE created_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
GROUP BY DATE_TRUNC('month', created_at)
WHERE created_at >= CURRENT_DATE - INTERVAL '6 days'
GROUP BY created_at::date
) cnt ON cnt.dt = d.dt
ORDER BY month ASC
"#,
)
.fetch_all(&state.pool)
.await?;
.await?
}
"week" => {
sqlx::query(
r#"
SELECT
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
COALESCE(cnt.books_added, 0) AS books_added
FROM generate_series(
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
DATE_TRUNC('week', NOW()),
'1 week'
) AS d(dt)
LEFT JOIN (
SELECT DATE_TRUNC('week', created_at) AS dt, COUNT(*) AS books_added
FROM books
WHERE created_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
GROUP BY DATE_TRUNC('week', created_at)
) cnt ON cnt.dt = d.dt
ORDER BY month ASC
"#,
)
.fetch_all(&state.pool)
.await?
}
_ => {
sqlx::query(
r#"
SELECT
TO_CHAR(d.dt, 'YYYY-MM') AS month,
COALESCE(cnt.books_added, 0) AS books_added
FROM generate_series(
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
DATE_TRUNC('month', NOW()),
'1 month'
) AS d(dt)
LEFT JOIN (
SELECT DATE_TRUNC('month', created_at) AS dt, COUNT(*) AS books_added
FROM books
WHERE created_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
GROUP BY DATE_TRUNC('month', created_at)
) cnt ON cnt.dt = d.dt
ORDER BY month ASC
"#,
)
.fetch_all(&state.pool)
.await?
}
};
let additions_over_time: Vec<MonthlyAdditions> = additions_rows
.iter()
@@ -327,14 +446,396 @@ pub async fn get_stats(
by_provider,
};
// Currently reading books
let reading_rows = sqlx::query(
r#"
SELECT b.id AS book_id, b.title, b.series, brp.current_page, b.page_count, u.username
FROM book_reading_progress brp
JOIN books b ON b.id = brp.book_id
LEFT JOIN users u ON u.id = brp.user_id
WHERE brp.status = 'reading' AND brp.current_page IS NOT NULL
AND ($1::uuid IS NULL OR brp.user_id = $1)
ORDER BY brp.updated_at DESC
LIMIT 20
"#,
)
.bind(user_id)
.fetch_all(&state.pool)
.await?;
let currently_reading: Vec<CurrentlyReadingItem> = reading_rows
.iter()
.map(|r| {
let id: uuid::Uuid = r.get("book_id");
CurrentlyReadingItem {
book_id: id.to_string(),
title: r.get("title"),
series: r.get("series"),
current_page: r.get::<Option<i32>, _>("current_page").unwrap_or(0),
page_count: r.get::<Option<i32>, _>("page_count").unwrap_or(0),
username: r.get("username"),
}
})
.collect();
// Recently read books
let recent_rows = sqlx::query(
r#"
SELECT b.id AS book_id, b.title, b.series,
TO_CHAR(brp.last_read_at, 'YYYY-MM-DD') AS last_read_at,
u.username
FROM book_reading_progress brp
JOIN books b ON b.id = brp.book_id
LEFT JOIN users u ON u.id = brp.user_id
WHERE brp.status = 'read' AND brp.last_read_at IS NOT NULL
AND ($1::uuid IS NULL OR brp.user_id = $1)
ORDER BY brp.last_read_at DESC
LIMIT 10
"#,
)
.bind(user_id)
.fetch_all(&state.pool)
.await?;
let recently_read: Vec<RecentlyReadItem> = recent_rows
.iter()
.map(|r| {
let id: uuid::Uuid = r.get("book_id");
RecentlyReadItem {
book_id: id.to_string(),
title: r.get("title"),
series: r.get("series"),
last_read_at: r.get::<Option<String>, _>("last_read_at").unwrap_or_default(),
username: r.get("username"),
}
})
.collect();
// Reading activity over time (with gap filling)
let reading_time_rows = match period {
"day" => {
sqlx::query(
r#"
SELECT
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
COALESCE(cnt.books_read, 0) AS books_read,
COALESCE(cnt.pages_read, 0) AS pages_read
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
LEFT JOIN (
SELECT brp.last_read_at::date AS dt, COUNT(*) AS books_read,
COALESCE(SUM(b.page_count), 0)::BIGINT AS pages_read
FROM book_reading_progress brp
JOIN books b ON b.id = brp.book_id
WHERE brp.status = 'read'
AND brp.last_read_at >= CURRENT_DATE - INTERVAL '6 days'
AND ($1::uuid IS NULL OR brp.user_id = $1)
GROUP BY brp.last_read_at::date
) cnt ON cnt.dt = d.dt
ORDER BY month ASC
"#,
)
.bind(user_id)
.fetch_all(&state.pool)
.await?
}
"week" => {
sqlx::query(
r#"
SELECT
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
COALESCE(cnt.books_read, 0) AS books_read,
COALESCE(cnt.pages_read, 0) AS pages_read
FROM generate_series(
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
DATE_TRUNC('week', NOW()),
'1 week'
) AS d(dt)
LEFT JOIN (
SELECT DATE_TRUNC('week', brp.last_read_at) AS dt, COUNT(*) AS books_read,
COALESCE(SUM(b.page_count), 0)::BIGINT AS pages_read
FROM book_reading_progress brp
JOIN books b ON b.id = brp.book_id
WHERE brp.status = 'read'
AND brp.last_read_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
AND ($1::uuid IS NULL OR brp.user_id = $1)
GROUP BY DATE_TRUNC('week', brp.last_read_at)
) cnt ON cnt.dt = d.dt
ORDER BY month ASC
"#,
)
.bind(user_id)
.fetch_all(&state.pool)
.await?
}
_ => {
sqlx::query(
r#"
SELECT
TO_CHAR(d.dt, 'YYYY-MM') AS month,
COALESCE(cnt.books_read, 0) AS books_read,
COALESCE(cnt.pages_read, 0) AS pages_read
FROM generate_series(
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
DATE_TRUNC('month', NOW()),
'1 month'
) AS d(dt)
LEFT JOIN (
SELECT DATE_TRUNC('month', brp.last_read_at) AS dt, COUNT(*) AS books_read,
COALESCE(SUM(b.page_count), 0)::BIGINT AS pages_read
FROM book_reading_progress brp
JOIN books b ON b.id = brp.book_id
WHERE brp.status = 'read'
AND brp.last_read_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
AND ($1::uuid IS NULL OR brp.user_id = $1)
GROUP BY DATE_TRUNC('month', brp.last_read_at)
) cnt ON cnt.dt = d.dt
ORDER BY month ASC
"#,
)
.bind(user_id)
.fetch_all(&state.pool)
.await?
}
};
let reading_over_time: Vec<MonthlyReading> = reading_time_rows
.iter()
.map(|r| MonthlyReading {
month: r.get::<Option<String>, _>("month").unwrap_or_default(),
books_read: r.get("books_read"),
pages_read: r.get("pages_read"),
})
.collect();
// Per-user reading over time (admin view — always all users, no user_id filter)
let users_reading_time_rows = match period {
"day" => {
sqlx::query(
r#"
SELECT
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
u.username,
COALESCE(cnt.books_read, 0) AS books_read,
COALESCE(cnt.pages_read, 0) AS pages_read
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
CROSS JOIN users u
LEFT JOIN (
SELECT brp.last_read_at::date AS dt, brp.user_id, COUNT(*) AS books_read,
COALESCE(SUM(b.page_count), 0)::BIGINT AS pages_read
FROM book_reading_progress brp
JOIN books b ON b.id = brp.book_id
WHERE brp.status = 'read'
AND brp.last_read_at >= CURRENT_DATE - INTERVAL '6 days'
GROUP BY brp.last_read_at::date, brp.user_id
) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id
ORDER BY month ASC, u.username
"#,
)
.fetch_all(&state.pool)
.await?
}
"week" => {
sqlx::query(
r#"
SELECT
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
u.username,
COALESCE(cnt.books_read, 0) AS books_read,
COALESCE(cnt.pages_read, 0) AS pages_read
FROM generate_series(
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
DATE_TRUNC('week', NOW()),
'1 week'
) AS d(dt)
CROSS JOIN users u
LEFT JOIN (
SELECT DATE_TRUNC('week', brp.last_read_at) AS dt, brp.user_id, COUNT(*) AS books_read,
COALESCE(SUM(b.page_count), 0)::BIGINT AS pages_read
FROM book_reading_progress brp
JOIN books b ON b.id = brp.book_id
WHERE brp.status = 'read'
AND brp.last_read_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
GROUP BY DATE_TRUNC('week', brp.last_read_at), brp.user_id
) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id
ORDER BY month ASC, u.username
"#,
)
.fetch_all(&state.pool)
.await?
}
_ => {
sqlx::query(
r#"
SELECT
TO_CHAR(d.dt, 'YYYY-MM') AS month,
u.username,
COALESCE(cnt.books_read, 0) AS books_read,
COALESCE(cnt.pages_read, 0) AS pages_read
FROM generate_series(
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
DATE_TRUNC('month', NOW()),
'1 month'
) AS d(dt)
CROSS JOIN users u
LEFT JOIN (
SELECT DATE_TRUNC('month', brp.last_read_at) AS dt, brp.user_id, COUNT(*) AS books_read,
COALESCE(SUM(b.page_count), 0)::BIGINT AS pages_read
FROM book_reading_progress brp
JOIN books b ON b.id = brp.book_id
WHERE brp.status = 'read'
AND brp.last_read_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
GROUP BY DATE_TRUNC('month', brp.last_read_at), brp.user_id
) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id
ORDER BY month ASC, u.username
"#,
)
.fetch_all(&state.pool)
.await?
}
};
let users_reading_over_time: Vec<UserMonthlyReading> = users_reading_time_rows
.iter()
.map(|r| UserMonthlyReading {
month: r.get::<Option<String>, _>("month").unwrap_or_default(),
username: r.get("username"),
books_read: r.get("books_read"),
pages_read: r.get("pages_read"),
})
.collect();
// Jobs over time (with gap filling, grouped by type category)
let jobs_rows = match period {
"day" => {
sqlx::query(
r#"
SELECT
TO_CHAR(d.dt, 'YYYY-MM-DD') AS label,
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'scan'), 0)::BIGINT AS scan,
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'rebuild'), 0)::BIGINT AS rebuild,
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'thumbnail'), 0)::BIGINT AS thumbnail,
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'other'), 0)::BIGINT AS other
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
LEFT JOIN (
SELECT
finished_at::date AS dt,
CASE
WHEN type = 'scan' THEN 'scan'
WHEN type IN ('rebuild', 'full_rebuild', 'rescan') THEN 'rebuild'
WHEN type IN ('thumbnail_rebuild', 'thumbnail_regenerate') THEN 'thumbnail'
ELSE 'other'
END AS cat,
COUNT(*) AS c
FROM index_jobs
WHERE status IN ('success', 'failed')
AND finished_at >= CURRENT_DATE - INTERVAL '6 days'
GROUP BY finished_at::date, cat
) cnt ON cnt.dt = d.dt
GROUP BY d.dt
ORDER BY label ASC
"#,
)
.fetch_all(&state.pool)
.await?
}
"week" => {
sqlx::query(
r#"
SELECT
TO_CHAR(d.dt, 'YYYY-MM-DD') AS label,
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'scan'), 0)::BIGINT AS scan,
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'rebuild'), 0)::BIGINT AS rebuild,
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'thumbnail'), 0)::BIGINT AS thumbnail,
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'other'), 0)::BIGINT AS other
FROM generate_series(
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
DATE_TRUNC('week', NOW()),
'1 week'
) AS d(dt)
LEFT JOIN (
SELECT
DATE_TRUNC('week', finished_at) AS dt,
CASE
WHEN type = 'scan' THEN 'scan'
WHEN type IN ('rebuild', 'full_rebuild', 'rescan') THEN 'rebuild'
WHEN type IN ('thumbnail_rebuild', 'thumbnail_regenerate') THEN 'thumbnail'
ELSE 'other'
END AS cat,
COUNT(*) AS c
FROM index_jobs
WHERE status IN ('success', 'failed')
AND finished_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
GROUP BY DATE_TRUNC('week', finished_at), cat
) cnt ON cnt.dt = d.dt
GROUP BY d.dt
ORDER BY label ASC
"#,
)
.fetch_all(&state.pool)
.await?
}
_ => {
sqlx::query(
r#"
SELECT
TO_CHAR(d.dt, 'YYYY-MM') AS label,
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'scan'), 0)::BIGINT AS scan,
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'rebuild'), 0)::BIGINT AS rebuild,
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'thumbnail'), 0)::BIGINT AS thumbnail,
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'other'), 0)::BIGINT AS other
FROM generate_series(
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
DATE_TRUNC('month', NOW()),
'1 month'
) AS d(dt)
LEFT JOIN (
SELECT
DATE_TRUNC('month', finished_at) AS dt,
CASE
WHEN type = 'scan' THEN 'scan'
WHEN type IN ('rebuild', 'full_rebuild', 'rescan') THEN 'rebuild'
WHEN type IN ('thumbnail_rebuild', 'thumbnail_regenerate') THEN 'thumbnail'
ELSE 'other'
END AS cat,
COUNT(*) AS c
FROM index_jobs
WHERE status IN ('success', 'failed')
AND finished_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
GROUP BY DATE_TRUNC('month', finished_at), cat
) cnt ON cnt.dt = d.dt
GROUP BY d.dt
ORDER BY label ASC
"#,
)
.fetch_all(&state.pool)
.await?
}
};
let jobs_over_time: Vec<JobTimePoint> = jobs_rows
.iter()
.map(|r| JobTimePoint {
label: r.get("label"),
scan: r.get("scan"),
rebuild: r.get("rebuild"),
thumbnail: r.get("thumbnail"),
other: r.get("other"),
})
.collect();
Ok(Json(StatsResponse {
overview,
reading_status,
currently_reading,
recently_read,
reading_over_time,
by_format,
by_language,
by_library,
top_series,
additions_over_time,
jobs_over_time,
metadata,
users_reading_over_time,
}))
}

46
apps/api/src/telegram.rs Normal file
View File

@@ -0,0 +1,46 @@
use axum::{extract::State, Json};
use serde::Serialize;
use utoipa::ToSchema;
use crate::{error::ApiError, state::AppState};
#[derive(Serialize, ToSchema)]
pub struct TelegramTestResponse {
pub success: bool,
pub message: String,
}
/// Test Telegram connection by sending a test message
#[utoipa::path(
get,
path = "/telegram/test",
tag = "notifications",
responses(
(status = 200, body = TelegramTestResponse),
(status = 400, description = "Telegram not configured"),
(status = 401, description = "Unauthorized"),
),
security(("Bearer" = []))
)]
pub async fn test_telegram(
State(state): State<AppState>,
) -> Result<Json<TelegramTestResponse>, ApiError> {
let config = notifications::load_telegram_config(&state.pool)
.await
.ok_or_else(|| {
ApiError::bad_request(
"Telegram is not configured or disabled. Set bot_token, chat_id, and enable it.",
)
})?;
match notifications::send_test_message(&config).await {
Ok(()) => Ok(Json(TelegramTestResponse {
success: true,
message: "Test message sent successfully".to_string(),
})),
Err(e) => Ok(Json(TelegramTestResponse {
success: false,
message: format!("Failed to send: {e}"),
})),
}
}

View File

@@ -32,8 +32,32 @@ pub async fn start_thumbnails_rebuild(
payload: Option<Json<ThumbnailsRebuildRequest>>,
) -> Result<Json<index_jobs::IndexJobResponse>, ApiError> {
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
let job_id = Uuid::new_v4();
if library_id.is_none() {
let library_ids: Vec<Uuid> = sqlx::query_scalar("SELECT id FROM libraries ORDER BY name")
.fetch_all(&state.pool)
.await
.map_err(|e| ApiError::internal(e.to_string()))?;
let mut last_row = None;
for lib_id in library_ids {
let job_id = Uuid::new_v4();
let row = sqlx::query(
r#"INSERT INTO index_jobs (id, library_id, type, status)
VALUES ($1, $2, 'thumbnail_rebuild', 'pending')
RETURNING id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at"#,
)
.bind(job_id)
.bind(lib_id)
.fetch_one(&state.pool)
.await
.map_err(|e| ApiError::internal(e.to_string()))?;
last_row = Some(row);
}
let row = last_row.ok_or_else(|| ApiError::bad_request("No libraries found"))?;
return Ok(Json(index_jobs::map_row(row)));
}
let job_id = Uuid::new_v4();
let row = sqlx::query(
r#"INSERT INTO index_jobs (id, library_id, type, status)
VALUES ($1, $2, 'thumbnail_rebuild', 'pending')
@@ -66,8 +90,32 @@ pub async fn start_thumbnails_regenerate(
payload: Option<Json<ThumbnailsRebuildRequest>>,
) -> Result<Json<index_jobs::IndexJobResponse>, ApiError> {
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
let job_id = Uuid::new_v4();
if library_id.is_none() {
let library_ids: Vec<Uuid> = sqlx::query_scalar("SELECT id FROM libraries ORDER BY name")
.fetch_all(&state.pool)
.await
.map_err(|e| ApiError::internal(e.to_string()))?;
let mut last_row = None;
for lib_id in library_ids {
let job_id = Uuid::new_v4();
let row = sqlx::query(
r#"INSERT INTO index_jobs (id, library_id, type, status)
VALUES ($1, $2, 'thumbnail_regenerate', 'pending')
RETURNING id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at"#,
)
.bind(job_id)
.bind(lib_id)
.fetch_one(&state.pool)
.await
.map_err(|e| ApiError::internal(e.to_string()))?;
last_row = Some(row);
}
let row = last_row.ok_or_else(|| ApiError::bad_request("No libraries found"))?;
return Ok(Json(index_jobs::map_row(row)));
}
let job_id = Uuid::new_v4();
let row = sqlx::query(
r#"INSERT INTO index_jobs (id, library_id, type, status)
VALUES ($1, $2, 'thumbnail_regenerate', 'pending')

View File

@@ -16,6 +16,8 @@ pub struct CreateTokenRequest {
pub name: String,
#[schema(value_type = Option<String>, example = "read")]
pub scope: Option<String>,
#[schema(value_type = Option<String>)]
pub user_id: Option<Uuid>,
}
#[derive(Serialize, ToSchema)]
@@ -26,6 +28,9 @@ pub struct TokenResponse {
pub scope: String,
pub prefix: String,
#[schema(value_type = Option<String>)]
pub user_id: Option<Uuid>,
pub username: Option<String>,
#[schema(value_type = Option<String>)]
pub last_used_at: Option<DateTime<Utc>>,
#[schema(value_type = Option<String>)]
pub revoked_at: Option<DateTime<Utc>>,
@@ -71,6 +76,10 @@ pub async fn create_token(
_ => return Err(ApiError::bad_request("scope must be 'admin' or 'read'")),
};
if scope == "read" && input.user_id.is_none() {
return Err(ApiError::bad_request("user_id is required for read-scoped tokens"));
}
let mut random = [0u8; 24];
OsRng.fill_bytes(&mut random);
let secret = URL_SAFE_NO_PAD.encode(random);
@@ -85,13 +94,14 @@ pub async fn create_token(
let id = Uuid::new_v4();
sqlx::query(
"INSERT INTO api_tokens (id, name, prefix, token_hash, scope) VALUES ($1, $2, $3, $4, $5)",
"INSERT INTO api_tokens (id, name, prefix, token_hash, scope, user_id) VALUES ($1, $2, $3, $4, $5, $6)",
)
.bind(id)
.bind(input.name.trim())
.bind(&prefix)
.bind(token_hash)
.bind(scope)
.bind(input.user_id)
.execute(&state.pool)
.await?;
@@ -118,7 +128,13 @@ pub async fn create_token(
)]
pub async fn list_tokens(State(state): State<AppState>) -> Result<Json<Vec<TokenResponse>>, ApiError> {
let rows = sqlx::query(
"SELECT id, name, scope, prefix, last_used_at, revoked_at, created_at FROM api_tokens ORDER BY created_at DESC",
r#"
SELECT t.id, t.name, t.scope, t.prefix, t.user_id, u.username,
t.last_used_at, t.revoked_at, t.created_at
FROM api_tokens t
LEFT JOIN users u ON u.id = t.user_id
ORDER BY t.created_at DESC
"#,
)
.fetch_all(&state.pool)
.await?;
@@ -130,6 +146,8 @@ pub async fn list_tokens(State(state): State<AppState>) -> Result<Json<Vec<Token
name: row.get("name"),
scope: row.get("scope"),
prefix: row.get("prefix"),
user_id: row.get("user_id"),
username: row.get("username"),
last_used_at: row.get("last_used_at"),
revoked_at: row.get("revoked_at"),
created_at: row.get("created_at"),
@@ -171,6 +189,47 @@ pub async fn revoke_token(
Ok(Json(serde_json::json!({"revoked": true, "id": id})))
}
#[derive(Deserialize, ToSchema)]
pub struct UpdateTokenRequest {
#[schema(value_type = Option<String>)]
pub user_id: Option<Uuid>,
}
/// Update a token's assigned user
#[utoipa::path(
patch,
path = "/admin/tokens/{id}",
tag = "tokens",
params(
("id" = String, Path, description = "Token UUID"),
),
request_body = UpdateTokenRequest,
responses(
(status = 200, description = "Token updated"),
(status = 404, description = "Token not found"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden - Admin scope required"),
),
security(("Bearer" = []))
)]
pub async fn update_token(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(input): Json<UpdateTokenRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
let result = sqlx::query("UPDATE api_tokens SET user_id = $1 WHERE id = $2")
.bind(input.user_id)
.bind(id)
.execute(&state.pool)
.await?;
if result.rows_affected() == 0 {
return Err(ApiError::not_found("token not found"));
}
Ok(Json(serde_json::json!({"updated": true, "id": id})))
}
/// Permanently delete a revoked API token
#[utoipa::path(
post,

195
apps/api/src/users.rs Normal file
View File

@@ -0,0 +1,195 @@
use axum::{extract::{Path, State}, Json};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::Row;
use uuid::Uuid;
use utoipa::ToSchema;
use crate::{error::ApiError, state::AppState};
#[derive(Serialize, ToSchema)]
pub struct UserResponse {
#[schema(value_type = String)]
pub id: Uuid,
pub username: String,
pub token_count: i64,
pub books_read: i64,
pub books_reading: i64,
#[schema(value_type = String)]
pub created_at: DateTime<Utc>,
}
#[derive(Deserialize, ToSchema)]
pub struct CreateUserRequest {
pub username: String,
}
/// List all reader users with their associated token count
#[utoipa::path(
get,
path = "/admin/users",
tag = "users",
responses(
(status = 200, body = Vec<UserResponse>),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden - Admin scope required"),
),
security(("Bearer" = []))
)]
pub async fn list_users(State(state): State<AppState>) -> Result<Json<Vec<UserResponse>>, ApiError> {
let rows = sqlx::query(
r#"
SELECT u.id, u.username, u.created_at,
COUNT(DISTINCT t.id) AS token_count,
COUNT(DISTINCT brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read,
COUNT(DISTINCT brp.book_id) FILTER (WHERE brp.status = 'reading') AS books_reading
FROM users u
LEFT JOIN api_tokens t ON t.user_id = u.id AND t.revoked_at IS NULL
LEFT JOIN book_reading_progress brp ON brp.user_id = u.id
GROUP BY u.id, u.username, u.created_at
ORDER BY u.created_at DESC
"#,
)
.fetch_all(&state.pool)
.await?;
let items = rows
.into_iter()
.map(|row| UserResponse {
id: row.get("id"),
username: row.get("username"),
token_count: row.get("token_count"),
books_read: row.get("books_read"),
books_reading: row.get("books_reading"),
created_at: row.get("created_at"),
})
.collect();
Ok(Json(items))
}
/// Create a new reader user
#[utoipa::path(
post,
path = "/admin/users",
tag = "users",
request_body = CreateUserRequest,
responses(
(status = 200, body = UserResponse, description = "User created"),
(status = 400, description = "Invalid input"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden - Admin scope required"),
),
security(("Bearer" = []))
)]
pub async fn create_user(
State(state): State<AppState>,
Json(input): Json<CreateUserRequest>,
) -> Result<Json<UserResponse>, ApiError> {
if input.username.trim().is_empty() {
return Err(ApiError::bad_request("username is required"));
}
let id = Uuid::new_v4();
let row = sqlx::query(
"INSERT INTO users (id, username) VALUES ($1, $2) RETURNING id, username, created_at",
)
.bind(id)
.bind(input.username.trim())
.fetch_one(&state.pool)
.await
.map_err(|e| {
if let sqlx::Error::Database(ref db_err) = e {
if db_err.constraint() == Some("users_username_key") {
return ApiError::bad_request("username already exists");
}
}
ApiError::from(e)
})?;
Ok(Json(UserResponse {
id: row.get("id"),
username: row.get("username"),
token_count: 0,
books_read: 0,
books_reading: 0,
created_at: row.get("created_at"),
}))
}
/// Update a reader user's username
#[utoipa::path(
patch,
path = "/admin/users/{id}",
tag = "users",
request_body = CreateUserRequest,
responses(
(status = 200, body = UserResponse, description = "User updated"),
(status = 400, description = "Invalid input"),
(status = 404, description = "User not found"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden - Admin scope required"),
),
security(("Bearer" = []))
)]
pub async fn update_user(
State(state): State<AppState>,
Path(id): Path<Uuid>,
Json(input): Json<CreateUserRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
if input.username.trim().is_empty() {
return Err(ApiError::bad_request("username is required"));
}
let result = sqlx::query("UPDATE users SET username = $1 WHERE id = $2")
.bind(input.username.trim())
.bind(id)
.execute(&state.pool)
.await
.map_err(|e| {
if let sqlx::Error::Database(ref db_err) = e {
if db_err.constraint() == Some("users_username_key") {
return ApiError::bad_request("username already exists");
}
}
ApiError::from(e)
})?;
if result.rows_affected() == 0 {
return Err(ApiError::not_found("user not found"));
}
Ok(Json(serde_json::json!({"updated": true, "id": id})))
}
/// Delete a reader user (cascades on tokens and reading progress)
#[utoipa::path(
delete,
path = "/admin/users/{id}",
tag = "users",
params(
("id" = String, Path, description = "User UUID"),
),
responses(
(status = 200, description = "User deleted"),
(status = 404, description = "User not found"),
(status = 401, description = "Unauthorized"),
(status = 403, description = "Forbidden - Admin scope required"),
),
security(("Bearer" = []))
)]
pub async fn delete_user(
State(state): State<AppState>,
Path(id): Path<Uuid>,
) -> Result<Json<serde_json::Value>, ApiError> {
let result = sqlx::query("DELETE FROM users WHERE id = $1")
.bind(id)
.execute(&state.pool)
.await?;
if result.rows_affected() == 0 {
return Err(ApiError::not_found("user not found"));
}
Ok(Json(serde_json::json!({"deleted": true, "id": id})))
}

View File

@@ -1,4 +1,7 @@
API_BASE_URL=http://localhost:7080
API_BOOTSTRAP_TOKEN=stripstream-dev-bootstrap-token
API_BOOTSTRAP_TOKEN=change-me-in-production
NEXT_PUBLIC_API_BASE_URL=http://localhost:7080
NEXT_PUBLIC_API_BOOTSTRAP_TOKEN=stripstream-dev-bootstrap-token
NEXT_PUBLIC_API_BOOTSTRAP_TOKEN=change-me-in-production
ADMIN_USERNAME=admin
ADMIN_PASSWORD=changeme
SESSION_SECRET=change-me-in-production-use-32-chars-min

View File

@@ -0,0 +1,97 @@
"use client";
import { useEffect, useState } from "react";
import { useRouter } from "next/navigation";
export default function AnilistCallbackPage() {
const router = useRouter();
const [status, setStatus] = useState<"loading" | "success" | "error">("loading");
const [message, setMessage] = useState("");
useEffect(() => {
async function handleCallback() {
const hash = window.location.hash.slice(1); // remove leading #
const params = new URLSearchParams(hash);
const accessToken = params.get("access_token");
if (!accessToken) {
setStatus("error");
setMessage("Aucun token trouvé dans l'URL de callback.");
return;
}
try {
// Read existing settings to preserve client_id
const existingResp = await fetch("/api/settings/anilist").catch(() => null);
const existing = existingResp?.ok ? await existingResp.json().catch(() => ({})) : {};
const save = (extra: Record<string, unknown>) =>
fetch("/api/settings/anilist", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ value: { ...existing, access_token: accessToken, ...extra } }),
});
const saveResp = await save({});
if (!saveResp.ok) throw new Error("Impossible de sauvegarder le token");
// Auto-fetch user info to populate user_id
const statusResp = await fetch("/api/anilist/status");
if (statusResp.ok) {
const data = await statusResp.json();
if (data.user_id) {
await save({ user_id: data.user_id });
}
setMessage(`Connecté en tant que ${data.username}`);
} else {
setMessage("Token sauvegardé.");
}
setStatus("success");
setTimeout(() => router.push("/settings?tab=anilist"), 2000);
} catch (e) {
setStatus("error");
setMessage(e instanceof Error ? e.message : "Erreur inconnue");
}
}
handleCallback();
}, [router]);
return (
<div className="min-h-screen flex items-center justify-center bg-background">
<div className="text-center space-y-4 p-8">
{status === "loading" && (
<>
<div className="w-8 h-8 border-2 border-primary border-t-transparent rounded-full animate-spin mx-auto" />
<p className="text-muted-foreground">Connexion AniList en cours</p>
</>
)}
{status === "success" && (
<>
<div className="w-12 h-12 rounded-full bg-success/15 flex items-center justify-center mx-auto">
<svg className="w-6 h-6 text-success" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M5 13l4 4L19 7" />
</svg>
</div>
<p className="text-success font-medium">{message}</p>
<p className="text-sm text-muted-foreground">Redirection vers les paramètres</p>
</>
)}
{status === "error" && (
<>
<div className="w-12 h-12 rounded-full bg-destructive/15 flex items-center justify-center mx-auto">
<svg className="w-6 h-6 text-destructive" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M6 18L18 6M6 6l12 12" />
</svg>
</div>
<p className="text-destructive font-medium">{message}</p>
<a href="/settings" className="text-sm text-primary hover:underline">
Retour aux paramètres
</a>
</>
)}
</div>
</div>
);
}

View File

@@ -0,0 +1,135 @@
import { fetchBooks, fetchAllSeries, BooksPageDto, SeriesPageDto, getBookCoverUrl } from "@/lib/api";
import { getServerTranslations } from "@/lib/i18n/server";
import { BooksGrid } from "@/app/components/BookCard";
import { OffsetPagination } from "@/app/components/ui";
import Image from "next/image";
import Link from "next/link";
export const dynamic = "force-dynamic";
export default async function AuthorDetailPage({
params,
searchParams,
}: {
params: Promise<{ name: string }>;
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
}) {
const { t } = await getServerTranslations();
const { name: encodedName } = await params;
const authorName = decodeURIComponent(encodedName);
const searchParamsAwaited = await searchParams;
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
// Fetch books by this author (server-side filtering via API) and series by this author
const [booksPage, seriesPage] = await Promise.all([
fetchBooks(undefined, undefined, page, limit, undefined, undefined, authorName).catch(
() => ({ items: [], total: 0, page: 1, limit }) as BooksPageDto
),
fetchAllSeries(undefined, undefined, undefined, 1, 200, undefined, undefined, undefined, undefined, authorName).catch(
() => ({ items: [], total: 0, page: 1, limit: 200 }) as SeriesPageDto
),
]);
const totalPages = Math.ceil(booksPage.total / limit);
const authorSeries = seriesPage.items;
return (
<>
{/* Breadcrumb */}
<nav className="flex items-center gap-2 text-sm text-muted-foreground mb-6">
<Link href="/authors" className="hover:text-foreground transition-colors">
{t("authors.title")}
</Link>
<span>/</span>
<span className="text-foreground font-medium">{authorName}</span>
</nav>
{/* Author Header */}
<div className="flex items-center gap-4 mb-8">
<div className="w-16 h-16 rounded-full bg-accent/50 flex items-center justify-center flex-shrink-0">
<span className="text-2xl font-bold text-accent-foreground">
{authorName.charAt(0).toUpperCase()}
</span>
</div>
<div>
<h1 className="text-3xl font-bold text-foreground">{authorName}</h1>
<div className="flex items-center gap-4 mt-1">
<span className="text-sm text-muted-foreground">
{t("authors.bookCount", { count: String(booksPage.total), plural: booksPage.total !== 1 ? "s" : "" })}
</span>
{authorSeries.length > 0 && (
<span className="text-sm text-muted-foreground">
{t("authors.seriesCount", { count: String(authorSeries.length), plural: authorSeries.length !== 1 ? "s" : "" })}
</span>
)}
</div>
</div>
</div>
{/* Series Section */}
{authorSeries.length > 0 && (
<section className="mb-8">
<h2 className="text-xl font-semibold text-foreground mb-4">
{t("authors.seriesBy", { name: authorName })}
</h2>
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 xl:grid-cols-6 gap-4">
{authorSeries.map((s) => (
<Link
key={`${s.library_id}-${s.name}`}
href={`/libraries/${s.library_id}/series/${encodeURIComponent(s.name)}`}
className="group"
>
<div className="bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md hover:-translate-y-1 transition-all duration-200">
<div className="aspect-[2/3] relative bg-muted/50">
<Image
src={getBookCoverUrl(s.first_book_id)}
alt={s.name}
fill
className="object-cover"
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
/>
</div>
<div className="p-3">
<h3 className="font-medium text-foreground truncate text-sm" title={s.name}>
{s.name}
</h3>
<p className="text-xs text-muted-foreground mt-1">
{t("authors.bookCount", { count: String(s.book_count), plural: s.book_count !== 1 ? "s" : "" })}
</p>
</div>
</div>
</Link>
))}
</div>
</section>
)}
{/* Books Section */}
{booksPage.items.length > 0 && (
<section>
<h2 className="text-xl font-semibold text-foreground mb-4">
{t("authors.booksBy", { name: authorName })}
</h2>
<BooksGrid books={booksPage.items} />
<OffsetPagination
currentPage={page}
totalPages={totalPages}
pageSize={limit}
totalItems={booksPage.total}
/>
</section>
)}
{/* Empty State */}
{booksPage.items.length === 0 && authorSeries.length === 0 && (
<div className="flex flex-col items-center justify-center py-16 text-center">
<p className="text-muted-foreground text-lg">
{t("authors.noResults")}
</p>
</div>
)}
</>
);
}

View File

@@ -0,0 +1,122 @@
import { fetchAuthors, AuthorsPageDto } from "@/lib/api";
import { getServerTranslations } from "@/lib/i18n/server";
import { LiveSearchForm } from "@/app/components/LiveSearchForm";
import { Card, CardContent, OffsetPagination } from "@/app/components/ui";
import Link from "next/link";
export const dynamic = "force-dynamic";
export default async function AuthorsPage({
searchParams,
}: {
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
}) {
const { t } = await getServerTranslations();
const searchParamsAwaited = await searchParams;
const searchQuery = typeof searchParamsAwaited.q === "string" ? searchParamsAwaited.q : "";
const sort = typeof searchParamsAwaited.sort === "string" ? searchParamsAwaited.sort : undefined;
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
const authorsPage = await fetchAuthors(
searchQuery || undefined,
page,
limit,
sort,
).catch(() => ({ items: [], total: 0, page: 1, limit }) as AuthorsPageDto);
const totalPages = Math.ceil(authorsPage.total / limit);
const hasFilters = searchQuery || sort;
const sortOptions = [
{ value: "", label: t("authors.sortName") },
{ value: "books", label: t("authors.sortBooks") },
];
return (
<>
<div className="mb-6">
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
<svg className="w-8 h-8 text-violet-500" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z" />
</svg>
{t("authors.title")}
</h1>
</div>
<Card className="mb-6">
<CardContent className="pt-6">
<LiveSearchForm
basePath="/authors"
fields={[
{ name: "q", type: "text", label: t("common.search"), placeholder: t("authors.searchPlaceholder") },
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions },
]}
/>
</CardContent>
</Card>
{/* Results count */}
<p className="text-sm text-muted-foreground mb-4">
{authorsPage.total} {t("authors.title").toLowerCase()}
{searchQuery && <> {t("authors.matchingQuery")} &quot;{searchQuery}&quot;</>}
</p>
{/* Authors List */}
{authorsPage.items.length > 0 ? (
<>
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4 gap-4">
{authorsPage.items.map((author) => (
<Link
key={author.name}
href={`/authors/${encodeURIComponent(author.name)}`}
className="group"
>
<div className="bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md hover:-translate-y-1 transition-all duration-200 p-4">
<div className="flex items-center gap-3">
<div className="w-10 h-10 rounded-full bg-accent/50 flex items-center justify-center flex-shrink-0">
<span className="text-lg font-semibold text-violet-500">
{author.name.charAt(0).toUpperCase()}
</span>
</div>
<div className="min-w-0">
<h3 className="font-medium text-foreground truncate text-sm group-hover:text-violet-500 transition-colors" title={author.name}>
{author.name}
</h3>
<div className="flex items-center gap-3 mt-0.5">
<span className="text-xs text-muted-foreground">
{t("authors.bookCount", { count: String(author.book_count), plural: author.book_count !== 1 ? "s" : "" })}
</span>
<span className="text-xs text-muted-foreground">
{t("authors.seriesCount", { count: String(author.series_count), plural: author.series_count !== 1 ? "s" : "" })}
</span>
</div>
</div>
</div>
</div>
</Link>
))}
</div>
<OffsetPagination
currentPage={page}
totalPages={totalPages}
pageSize={limit}
totalItems={authorsPage.total}
/>
</>
) : (
<div className="flex flex-col items-center justify-center py-16 text-center">
<div className="w-16 h-16 mb-4 text-muted-foreground/30">
<svg fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={1.5} d="M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z" />
</svg>
</div>
<p className="text-muted-foreground text-lg">
{hasFilters ? t("authors.noResults") : t("authors.noAuthors")}
</p>
</div>
)}
</>
);
}

View File

@@ -1,12 +1,16 @@
import { fetchLibraries, getBookCoverUrl, BookDto, apiFetch, ReadingStatus } from "../../../lib/api";
import { BookPreview } from "../../components/BookPreview";
import { ConvertButton } from "../../components/ConvertButton";
import { MarkBookReadButton } from "../../components/MarkBookReadButton";
import { EditBookForm } from "../../components/EditBookForm";
import { SafeHtml } from "../../components/SafeHtml";
import { getServerTranslations } from "../../../lib/i18n/server";
import { fetchLibraries, getBookCoverUrl, BookDto, apiFetch, ReadingStatus } from "@/lib/api";
import { BookPreview } from "@/app/components/BookPreview";
import { ConvertButton } from "@/app/components/ConvertButton";
import { MarkBookReadButton } from "@/app/components/MarkBookReadButton";
import nextDynamic from "next/dynamic";
import { SafeHtml } from "@/app/components/SafeHtml";
import { getServerTranslations } from "@/lib/i18n/server";
import Image from "next/image";
import Link from "next/link";
const EditBookForm = nextDynamic(
() => import("@/app/components/EditBookForm").then(m => m.EditBookForm)
);
import { notFound } from "next/navigation";
export const dynamic = "force-dynamic";
@@ -95,7 +99,7 @@ export default async function BookDetailPage({
alt={t("bookDetail.coverOf", { title: book.title })}
fill
className="object-cover"
unoptimized
sizes="192px"
loading="lazy"
/>
</div>

View File

@@ -1,10 +1,10 @@
import { fetchBooks, searchBooks, fetchLibraries, BookDto, LibraryDto, SeriesHitDto, getBookCoverUrl } from "../../lib/api";
import { BooksGrid, EmptyState } from "../components/BookCard";
import { LiveSearchForm } from "../components/LiveSearchForm";
import { Card, CardContent, OffsetPagination } from "../components/ui";
import { fetchBooks, searchBooks, fetchLibraries, BookDto, LibraryDto, SeriesHitDto, getBookCoverUrl } from "@/lib/api";
import { BooksGrid, EmptyState } from "@/app/components/BookCard";
import { LiveSearchForm } from "@/app/components/LiveSearchForm";
import { Card, CardContent, OffsetPagination } from "@/app/components/ui";
import Link from "next/link";
import Image from "next/image";
import { getServerTranslations } from "../../lib/i18n/server";
import { getServerTranslations } from "@/lib/i18n/server";
export const dynamic = "force-dynamic";
@@ -18,6 +18,8 @@ export default async function BooksPage({
const libraryId = typeof searchParamsAwaited.library === "string" ? searchParamsAwaited.library : undefined;
const searchQuery = typeof searchParamsAwaited.q === "string" ? searchParamsAwaited.q : "";
const readingStatus = typeof searchParamsAwaited.status === "string" ? searchParamsAwaited.status : undefined;
const format = typeof searchParamsAwaited.format === "string" ? searchParamsAwaited.format : undefined;
const metadataProvider = typeof searchParamsAwaited.metadata === "string" ? searchParamsAwaited.metadata : undefined;
const sort = typeof searchParamsAwaited.sort === "string" ? searchParamsAwaited.sort : undefined;
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
@@ -62,7 +64,7 @@ export default async function BooksPage({
totalHits = searchResponse.estimated_total_hits;
}
} else {
const booksPage = await fetchBooks(libraryId, undefined, page, limit, readingStatus, sort).catch(() => ({
const booksPage = await fetchBooks(libraryId, undefined, page, limit, readingStatus, sort, undefined, format, metadataProvider).catch(() => ({
items: [] as BookDto[],
total: 0,
page: 1,
@@ -91,12 +93,26 @@ export default async function BooksPage({
{ value: "read", label: t("status.read") },
];
const formatOptions = [
{ value: "", label: t("books.allFormats") },
{ value: "cbz", label: "CBZ" },
{ value: "cbr", label: "CBR" },
{ value: "pdf", label: "PDF" },
{ value: "epub", label: "EPUB" },
];
const metadataOptions = [
{ value: "", label: t("series.metadataAll") },
{ value: "linked", label: t("series.metadataLinked") },
{ value: "unlinked", label: t("series.metadataUnlinked") },
];
const sortOptions = [
{ value: "", label: t("books.sortTitle") },
{ value: "latest", label: t("books.sortLatest") },
];
const hasFilters = searchQuery || libraryId || readingStatus || sort;
const hasFilters = searchQuery || libraryId || readingStatus || format || metadataProvider || sort;
return (
<>
@@ -114,10 +130,12 @@ export default async function BooksPage({
<LiveSearchForm
basePath="/books"
fields={[
{ name: "q", type: "text", label: t("common.search"), placeholder: t("books.searchPlaceholder"), className: "flex-1 w-full" },
{ name: "library", type: "select", label: t("books.library"), options: libraryOptions, className: "w-full sm:w-48" },
{ name: "status", type: "select", label: t("books.status"), options: statusOptions, className: "w-full sm:w-40" },
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions, className: "w-full sm:w-40" },
{ name: "q", type: "text", label: t("common.search"), placeholder: t("books.searchPlaceholder") },
{ name: "library", type: "select", label: t("books.library"), options: libraryOptions },
{ name: "status", type: "select", label: t("books.status"), options: statusOptions },
{ name: "format", type: "select", label: t("books.format"), options: formatOptions },
{ name: "metadata", type: "select", label: t("series.metadata"), options: metadataOptions },
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions },
]}
/>
</CardContent>
@@ -152,7 +170,7 @@ export default async function BooksPage({
alt={t("books.coverOf", { name: s.name })}
fill
className="object-cover"
unoptimized
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
/>
</div>
<div className="p-2">

View File

@@ -0,0 +1,97 @@
import Link from "next/link";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, StatBox } from "@/app/components/ui";
import { QbittorrentProvider, QbittorrentDownloadButton } from "@/app/components/QbittorrentDownloadButton";
import type { DownloadDetectionReportDto, DownloadDetectionResultDto } from "@/lib/api";
import type { TranslateFunction } from "@/lib/i18n/dictionaries";
export function DownloadDetectionReportCard({ report, t }: { report: DownloadDetectionReportDto; t: TranslateFunction }) {
return (
<Card>
<CardHeader>
<CardTitle>{t("jobDetail.downloadDetectionReport")}</CardTitle>
<CardDescription>{t("jobDetail.seriesAnalyzed", { count: String(report.total_series) })}</CardDescription>
</CardHeader>
<CardContent>
<div className="grid grid-cols-2 sm:grid-cols-3 gap-4">
<StatBox value={report.found} label={t("jobDetail.downloadFound")} variant="success" />
<StatBox value={report.not_found} label={t("jobDetail.downloadNotFound")} />
<StatBox value={report.no_missing} label={t("jobDetail.downloadNoMissing")} variant="primary" />
<StatBox value={report.no_metadata} label={t("jobDetail.downloadNoMetadata")} />
<StatBox value={report.errors} label={t("jobDetail.errors")} variant={report.errors > 0 ? "error" : "default"} />
</div>
</CardContent>
</Card>
);
}
export function DownloadDetectionResultsCard({ results, libraryId, t }: {
results: DownloadDetectionResultDto[];
libraryId: string | null;
t: TranslateFunction;
}) {
if (results.length === 0) return null;
return (
<QbittorrentProvider>
<Card className="lg:col-span-2">
<CardHeader>
<CardTitle>{t("jobDetail.downloadAvailableReleases")}</CardTitle>
<CardDescription>{t("jobDetail.downloadAvailableReleasesDesc", { count: String(results.length) })}</CardDescription>
</CardHeader>
<CardContent className="space-y-4 max-h-[700px] overflow-y-auto">
{results.map((r) => (
<div key={r.id} className="rounded-lg border border-success/20 bg-success/5 p-3">
<div className="flex items-center justify-between gap-2 mb-2">
{libraryId ? (
<Link
href={`/libraries/${libraryId}/series/${encodeURIComponent(r.series_name)}`}
className="font-semibold text-sm text-primary hover:underline truncate"
>
{r.series_name}
</Link>
) : (
<span className="font-semibold text-sm text-foreground truncate">{r.series_name}</span>
)}
<span className="text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap bg-warning/20 text-warning shrink-0">
{t("jobDetail.downloadMissingCount", { count: String(r.missing_count) })}
</span>
</div>
{r.available_releases && r.available_releases.length > 0 && (
<div className="space-y-1.5">
{r.available_releases.map((release, idx) => (
<div key={idx} className="flex items-center gap-2 p-2 rounded bg-background/60 border border-border/40">
<div className="flex-1 min-w-0">
<p className="text-xs font-mono text-foreground truncate" title={release.title}>{release.title}</p>
<div className="flex items-center gap-3 mt-1 flex-wrap">
{release.indexer && (
<span className="text-[10px] text-muted-foreground">{release.indexer}</span>
)}
{release.seeders != null && (
<span className="text-[10px] text-success font-medium">{release.seeders} {t("prowlarr.columnSeeders").toLowerCase()}</span>
)}
<span className="text-[10px] text-muted-foreground">
{(release.size / 1024 / 1024).toFixed(0)} MB
</span>
<div className="flex items-center gap-1">
{release.matched_missing_volumes.map((vol) => (
<span key={vol} className="text-[10px] px-1.5 py-0.5 rounded-full bg-success/20 text-success font-medium">
T.{vol}
</span>
))}
</div>
</div>
</div>
{release.download_url && (
<QbittorrentDownloadButton downloadUrl={release.download_url} releaseId={`${r.id}-${idx}`} />
)}
</div>
))}
</div>
)}
</div>
))}
</CardContent>
</Card>
</QbittorrentProvider>
);
}

View File

@@ -0,0 +1,31 @@
import { Card, CardHeader, CardTitle, CardDescription, CardContent } from "@/app/components/ui";
import type { TranslateFunction } from "@/lib/i18n/dictionaries";
interface JobError {
id: string;
file_path: string;
error_message: string;
created_at: string;
}
export function JobErrorsCard({ errors, t, locale }: { errors: JobError[]; t: TranslateFunction; locale: string }) {
if (errors.length === 0) return null;
return (
<Card className="lg:col-span-2">
<CardHeader>
<CardTitle>{t("jobDetail.fileErrors", { count: String(errors.length) })}</CardTitle>
<CardDescription>{t("jobDetail.fileErrorsDesc")}</CardDescription>
</CardHeader>
<CardContent className="space-y-2 max-h-80 overflow-y-auto">
{errors.map((error) => (
<div key={error.id} className="p-3 bg-destructive/10 rounded-lg border border-destructive/20">
<code className="block text-sm font-mono text-destructive mb-1">{error.file_path}</code>
<p className="text-sm text-destructive/80">{error.error_message}</p>
<span className="text-xs text-muted-foreground">{new Date(error.created_at).toLocaleString(locale)}</span>
</div>
))}
</CardContent>
</Card>
);
}

View File

@@ -0,0 +1,71 @@
import Link from "next/link";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, StatusBadge, JobTypeBadge } from "@/app/components/ui";
import type { TranslateFunction } from "@/lib/i18n/dictionaries";
interface JobOverviewCardProps {
job: {
id: string;
type: string;
status: string;
library_id: string | null;
book_id: string | null;
started_at: string | null;
finished_at: string | null;
};
typeInfo: { label: string; description: string | null };
t: TranslateFunction;
formatDuration: (start: string, end: string | null) => string;
}
export function JobOverviewCard({ job, typeInfo, t, formatDuration }: JobOverviewCardProps) {
return (
<Card>
<CardHeader>
<CardTitle>{t("jobDetail.overview")}</CardTitle>
{typeInfo.description && (
<CardDescription>{typeInfo.description}</CardDescription>
)}
</CardHeader>
<CardContent className="space-y-3">
<div className="flex items-center justify-between py-2 border-b border-border/60">
<span className="text-sm text-muted-foreground">ID</span>
<code className="px-2 py-1 bg-muted rounded font-mono text-sm text-foreground">{job.id}</code>
</div>
<div className="flex items-center justify-between py-2 border-b border-border/60">
<span className="text-sm text-muted-foreground">{t("jobsList.type")}</span>
<div className="flex items-center gap-2">
<JobTypeBadge type={job.type} />
<span className="text-sm text-muted-foreground">{typeInfo.label}</span>
</div>
</div>
<div className="flex items-center justify-between py-2 border-b border-border/60">
<span className="text-sm text-muted-foreground">{t("jobsList.status")}</span>
<StatusBadge status={job.status} />
</div>
<div className={`flex items-center justify-between py-2 ${(job.book_id || job.started_at) ? "border-b border-border/60" : ""}`}>
<span className="text-sm text-muted-foreground">{t("jobDetail.library")}</span>
<span className="text-sm text-foreground">{job.library_id || t("jobDetail.allLibraries")}</span>
</div>
{job.book_id && (
<div className={`flex items-center justify-between py-2 ${job.started_at ? "border-b border-border/60" : ""}`}>
<span className="text-sm text-muted-foreground">{t("jobDetail.book")}</span>
<Link
href={`/books/${job.book_id}`}
className="text-sm text-primary hover:text-primary/80 font-mono hover:underline"
>
{job.book_id.slice(0, 8)}
</Link>
</div>
)}
{job.started_at && (
<div className="flex items-center justify-between py-2">
<span className="text-sm text-muted-foreground">{t("jobsList.duration")}</span>
<span className="text-sm font-semibold text-foreground">
{formatDuration(job.started_at, job.finished_at)}
</span>
</div>
)}
</CardContent>
</Card>
);
}

View File

@@ -0,0 +1,141 @@
import { Card, CardHeader, CardTitle, CardDescription, CardContent, StatBox, ProgressBar } from "@/app/components/ui";
import type { TranslateFunction } from "@/lib/i18n/dictionaries";
interface JobProgressCardProps {
job: {
type: string;
status: string;
started_at: string | null;
finished_at: string | null;
total_files: number | null;
processed_files: number | null;
progress_percent: number | null;
current_file: string | null;
stats_json: {
scanned_files: number;
indexed_files: number;
removed_files: number;
errors: number;
warnings: number;
} | null;
};
isThumbnailOnly: boolean;
progressTitle: string;
progressDescription: string | undefined;
t: TranslateFunction;
formatDuration: (start: string, end: string | null) => string;
formatSpeed: (count: number, durationMs: number) => string;
}
export function JobProgressCard({ job, isThumbnailOnly, progressTitle, progressDescription, t }: JobProgressCardProps) {
const isCompleted = job.status === "success";
const isPhase2 = job.status === "extracting_pages" || job.status === "generating_thumbnails";
const showProgressCard =
(isCompleted || job.status === "failed" || job.status === "running" || isPhase2) &&
(job.total_files != null || !!job.current_file);
if (!showProgressCard) return null;
return (
<Card>
<CardHeader>
<CardTitle>{progressTitle}</CardTitle>
{progressDescription && <CardDescription>{progressDescription}</CardDescription>}
</CardHeader>
<CardContent>
{job.total_files != null && job.total_files > 0 && (
<>
<ProgressBar value={job.progress_percent || 0} showLabel size="lg" className="mb-4" />
<div className="grid grid-cols-3 gap-4">
<StatBox
value={job.processed_files ?? 0}
label={isThumbnailOnly || isPhase2 ? t("jobDetail.generated") : t("jobDetail.processed")}
variant="primary"
/>
<StatBox value={job.total_files} label={t("jobDetail.total")} />
<StatBox
value={Math.max(0, job.total_files - (job.processed_files ?? 0))}
label={t("jobDetail.remaining")}
variant={isCompleted ? "default" : "warning"}
/>
</div>
</>
)}
{job.current_file && (
<div className="mt-4 p-3 bg-muted/50 rounded-lg">
<span className="text-xs text-muted-foreground uppercase tracking-wide">{t("jobDetail.currentFile")}</span>
<code className="block mt-1 text-xs font-mono text-foreground break-all">{job.current_file}</code>
</div>
)}
</CardContent>
</Card>
);
}
export function IndexStatsCard({ job, t, formatDuration, formatSpeed, durationMs }: {
job: JobProgressCardProps["job"];
t: TranslateFunction;
formatDuration: (start: string, end: string | null) => string;
formatSpeed: (count: number, durationMs: number) => string;
durationMs: number;
}) {
if (!job.stats_json) return null;
const speedCount = job.stats_json.scanned_files;
return (
<Card>
<CardHeader>
<CardTitle>{t("jobDetail.indexStats")}</CardTitle>
{job.started_at && (
<CardDescription>
{formatDuration(job.started_at, job.finished_at)}
{speedCount > 0 && ` · ${formatSpeed(speedCount, durationMs)} scan rate`}
</CardDescription>
)}
</CardHeader>
<CardContent>
<div className="grid grid-cols-2 sm:grid-cols-5 gap-4">
<StatBox value={job.stats_json.scanned_files} label={t("jobDetail.scanned")} variant="success" />
<StatBox value={job.stats_json.indexed_files} label={t("jobDetail.indexed")} variant="primary" />
<StatBox value={job.stats_json.removed_files} label={t("jobDetail.removed")} variant="warning" />
<StatBox value={job.stats_json.warnings ?? 0} label={t("jobDetail.warnings")} variant={(job.stats_json.warnings ?? 0) > 0 ? "warning" : "default"} />
<StatBox value={job.stats_json.errors} label={t("jobDetail.errors")} variant={job.stats_json.errors > 0 ? "error" : "default"} />
</div>
</CardContent>
</Card>
);
}
export function ThumbnailStatsCard({ job, t, formatDuration, formatSpeed, durationMs }: {
job: JobProgressCardProps["job"];
t: TranslateFunction;
formatDuration: (start: string, end: string | null) => string;
formatSpeed: (count: number, durationMs: number) => string;
durationMs: number;
}) {
if (job.total_files == null) return null;
const speedCount = job.processed_files ?? 0;
return (
<Card>
<CardHeader>
<CardTitle>{t("jobDetail.thumbnailStats")}</CardTitle>
{job.started_at && (
<CardDescription>
{formatDuration(job.started_at, job.finished_at)}
{speedCount > 0 && ` · ${formatSpeed(speedCount, durationMs)} thumbnails/s`}
</CardDescription>
)}
</CardHeader>
<CardContent>
<div className="grid grid-cols-2 gap-4">
<StatBox value={job.processed_files ?? job.total_files} label={t("jobDetail.generated")} variant="success" />
<StatBox value={job.total_files} label={t("jobDetail.total")} />
</div>
</CardContent>
</Card>
);
}

View File

@@ -0,0 +1,131 @@
import type { MetadataBatchReportDto, MetadataRefreshReportDto, ReadingStatusMatchReportDto, ReadingStatusPushReportDto, DownloadDetectionReportDto } from "@/lib/api";
import type { TranslateFunction } from "@/lib/i18n/dictionaries";
interface JobSummaryBannerProps {
job: {
type: string;
status: string;
started_at: string | null;
finished_at: string | null;
error_opt: string | null;
stats_json: {
scanned_files: number;
indexed_files: number;
removed_files: number;
errors: number;
warnings: number;
} | null;
total_files: number | null;
processed_files: number | null;
};
batchReport: MetadataBatchReportDto | null;
refreshReport: MetadataRefreshReportDto | null;
readingStatusReport: ReadingStatusMatchReportDto | null;
readingStatusPushReport: ReadingStatusPushReportDto | null;
downloadDetectionReport: DownloadDetectionReportDto | null;
t: TranslateFunction;
formatDuration: (start: string, end: string | null) => string;
}
export function JobSummaryBanner({
job, batchReport, refreshReport, readingStatusReport, readingStatusPushReport, downloadDetectionReport, t, formatDuration,
}: JobSummaryBannerProps) {
const isCompleted = job.status === "success";
const isFailed = job.status === "failed";
const isCancelled = job.status === "cancelled";
const isMetadataBatch = job.type === "metadata_batch";
const isMetadataRefresh = job.type === "metadata_refresh";
const isReadingStatusMatch = job.type === "reading_status_match";
const isReadingStatusPush = job.type === "reading_status_push";
const isDownloadDetection = job.type === "download_detection";
const isThumbnailOnly = job.type === "thumbnail_rebuild" || job.type === "thumbnail_regenerate";
if (isCompleted && job.started_at) {
return (
<div className="mb-6 p-4 rounded-xl bg-success/10 border border-success/30 flex items-start gap-3">
<svg className="w-5 h-5 text-success mt-0.5 shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z" />
</svg>
<div className="text-sm text-success">
<span className="font-semibold">{t("jobDetail.completedIn", { duration: formatDuration(job.started_at, job.finished_at) })}</span>
{isMetadataBatch && batchReport && (
<span className="ml-2 text-success/80">
{batchReport.auto_matched} {t("jobDetail.autoMatched").toLowerCase()}, {batchReport.already_linked} {t("jobDetail.alreadyLinked").toLowerCase()}, {batchReport.no_results} {t("jobDetail.noResults").toLowerCase()}, {batchReport.errors} {t("jobDetail.errors").toLowerCase()}
</span>
)}
{isMetadataRefresh && refreshReport && (
<span className="ml-2 text-success/80">
{refreshReport.refreshed} {t("jobDetail.refreshed").toLowerCase()}, {refreshReport.unchanged} {t("jobDetail.unchanged").toLowerCase()}, {refreshReport.errors} {t("jobDetail.errors").toLowerCase()}
</span>
)}
{isReadingStatusMatch && readingStatusReport && (
<span className="ml-2 text-success/80">
{readingStatusReport.linked} {t("jobDetail.linked").toLowerCase()}, {readingStatusReport.no_results} {t("jobDetail.noResults").toLowerCase()}, {readingStatusReport.ambiguous} {t("jobDetail.ambiguous").toLowerCase()}, {readingStatusReport.errors} {t("jobDetail.errors").toLowerCase()}
</span>
)}
{isReadingStatusPush && readingStatusPushReport && (
<span className="ml-2 text-success/80">
{readingStatusPushReport.pushed} {t("jobDetail.pushed").toLowerCase()}, {readingStatusPushReport.no_books} {t("jobDetail.noBooks").toLowerCase()}, {readingStatusPushReport.errors} {t("jobDetail.errors").toLowerCase()}
</span>
)}
{isDownloadDetection && downloadDetectionReport && (
<span className="ml-2 text-success/80">
{downloadDetectionReport.found} {t("jobDetail.downloadFound").toLowerCase()}, {downloadDetectionReport.not_found} {t("jobDetail.downloadNotFound").toLowerCase()}, {downloadDetectionReport.errors} {t("jobDetail.errors").toLowerCase()}
</span>
)}
{!isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && !isReadingStatusPush && !isDownloadDetection && job.stats_json && (
<span className="ml-2 text-success/80">
{job.stats_json.scanned_files} {t("jobDetail.scanned").toLowerCase()}, {job.stats_json.indexed_files} {t("jobDetail.indexed").toLowerCase()}
{job.stats_json.removed_files > 0 && `, ${job.stats_json.removed_files} ${t("jobDetail.removed").toLowerCase()}`}
{(job.stats_json.warnings ?? 0) > 0 && `, ${job.stats_json.warnings} ${t("jobDetail.warnings").toLowerCase()}`}
{job.stats_json.errors > 0 && `, ${job.stats_json.errors} ${t("jobDetail.errors").toLowerCase()}`}
{job.total_files != null && job.total_files > 0 && `, ${job.total_files} ${t("jobType.thumbnail_rebuild").toLowerCase()}`}
</span>
)}
{!isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && !isReadingStatusPush && !job.stats_json && isThumbnailOnly && job.total_files != null && (
<span className="ml-2 text-success/80">
{job.processed_files ?? job.total_files} {t("jobDetail.generated").toLowerCase()}
</span>
)}
</div>
</div>
);
}
if (isFailed) {
return (
<div className="mb-6 p-4 rounded-xl bg-destructive/10 border border-destructive/30 flex items-start gap-3">
<svg className="w-5 h-5 text-destructive mt-0.5 shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 8v4m0 4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" />
</svg>
<div className="text-sm text-destructive">
<span className="font-semibold">{t("jobDetail.jobFailed")}</span>
{job.started_at && (
<span className="ml-2 text-destructive/80">{t("jobDetail.failedAfter", { duration: formatDuration(job.started_at, job.finished_at) })}</span>
)}
{job.error_opt && (
<p className="mt-1 text-destructive/70 font-mono text-xs break-all">{job.error_opt}</p>
)}
</div>
</div>
);
}
if (isCancelled) {
return (
<div className="mb-6 p-4 rounded-xl bg-muted border border-border flex items-start gap-3">
<svg className="w-5 h-5 text-muted-foreground mt-0.5 shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M18.364 18.364A9 9 0 005.636 5.636m12.728 12.728A9 9 0 015.636 5.636m12.728 12.728L5.636 5.636" />
</svg>
<span className="text-sm text-muted-foreground">
<span className="font-semibold">{t("jobDetail.cancelled")}</span>
{job.started_at && (
<span className="ml-2">{t("jobDetail.failedAfter", { duration: formatDuration(job.started_at, job.finished_at) })}</span>
)}
</span>
</div>
);
}
return null;
}

View File

@@ -0,0 +1,167 @@
import { Card, CardHeader, CardTitle, CardContent } from "@/app/components/ui";
import type { TranslateFunction } from "@/lib/i18n/dictionaries";
interface JobTimelineCardProps {
job: {
type: string;
status: string;
created_at: string;
started_at: string | null;
finished_at: string | null;
phase2_started_at: string | null;
generating_thumbnails_started_at: string | null;
stats_json: {
scanned_files: number;
indexed_files: number;
removed_files: number;
warnings: number;
} | null;
total_files: number | null;
processed_files: number | null;
};
isThumbnailOnly: boolean;
t: TranslateFunction;
locale: string;
formatDuration: (start: string, end: string | null) => string;
}
export function JobTimelineCard({ job, isThumbnailOnly, t, locale, formatDuration }: JobTimelineCardProps) {
const isCompleted = job.status === "success";
const isFailed = job.status === "failed";
const isExtractingPages = job.status === "extracting_pages";
const isThumbnailPhase = job.status === "generating_thumbnails";
return (
<Card>
<CardHeader>
<CardTitle>{t("jobDetail.timeline")}</CardTitle>
</CardHeader>
<CardContent>
<div className="relative">
<div className="absolute left-[7px] top-2 bottom-2 w-px bg-border" />
<div className="space-y-5">
{/* Created */}
<div className="flex items-start gap-4">
<div className="w-3.5 h-3.5 rounded-full mt-0.5 bg-muted border-2 border-border shrink-0 z-10" />
<div className="flex-1 min-w-0">
<span className="text-sm font-medium text-foreground">{t("jobDetail.created")}</span>
<p className="text-xs text-muted-foreground">{new Date(job.created_at).toLocaleString(locale)}</p>
</div>
</div>
{/* Phase 1 start */}
{job.started_at && job.phase2_started_at && (
<div className="flex items-start gap-4">
<div className="w-3.5 h-3.5 rounded-full mt-0.5 bg-primary shrink-0 z-10" />
<div className="flex-1 min-w-0">
<span className="text-sm font-medium text-foreground">{t("jobDetail.phase1")}</span>
<p className="text-xs text-muted-foreground">{new Date(job.started_at).toLocaleString(locale)}</p>
<p className="text-xs text-primary/80 font-medium mt-0.5">
{t("jobDetail.duration", { duration: formatDuration(job.started_at, job.phase2_started_at) })}
{job.stats_json && (
<span className="text-muted-foreground font-normal ml-1">
· {job.stats_json.scanned_files} {t("jobDetail.scanned").toLowerCase()}, {job.stats_json.indexed_files} {t("jobDetail.indexed").toLowerCase()}
{job.stats_json.removed_files > 0 && `, ${job.stats_json.removed_files} ${t("jobDetail.removed").toLowerCase()}`}
{(job.stats_json.warnings ?? 0) > 0 && `, ${job.stats_json.warnings} ${t("jobDetail.warnings").toLowerCase()}`}
</span>
)}
</p>
</div>
</div>
)}
{/* Phase 2a — Extracting pages */}
{job.phase2_started_at && !isThumbnailOnly && (
<div className="flex items-start gap-4">
<div className={`w-3.5 h-3.5 rounded-full mt-0.5 shrink-0 z-10 ${
job.generating_thumbnails_started_at || job.finished_at ? "bg-primary" : "bg-primary animate-pulse"
}`} />
<div className="flex-1 min-w-0">
<span className="text-sm font-medium text-foreground">{t("jobDetail.phase2a")}</span>
<p className="text-xs text-muted-foreground">{new Date(job.phase2_started_at).toLocaleString(locale)}</p>
<p className="text-xs text-primary/80 font-medium mt-0.5">
{t("jobDetail.duration", { duration: formatDuration(job.phase2_started_at, job.generating_thumbnails_started_at ?? job.finished_at ?? null) })}
{!job.generating_thumbnails_started_at && !job.finished_at && isExtractingPages && (
<span className="text-muted-foreground font-normal ml-1">· {t("jobDetail.inProgress")}</span>
)}
</p>
</div>
</div>
)}
{/* Phase 2b — Generating thumbnails */}
{(job.generating_thumbnails_started_at || (job.phase2_started_at && isThumbnailOnly)) && (
<div className="flex items-start gap-4">
<div className={`w-3.5 h-3.5 rounded-full mt-0.5 shrink-0 z-10 ${
job.finished_at ? "bg-primary" : "bg-primary animate-pulse"
}`} />
<div className="flex-1 min-w-0">
<span className="text-sm font-medium text-foreground">
{isThumbnailOnly ? t("jobType.thumbnail_rebuild") : t("jobDetail.phase2b")}
</span>
<p className="text-xs text-muted-foreground">
{(job.generating_thumbnails_started_at ? new Date(job.generating_thumbnails_started_at) : job.phase2_started_at ? new Date(job.phase2_started_at) : null)?.toLocaleString(locale)}
</p>
{(job.generating_thumbnails_started_at || job.finished_at) && (
<p className="text-xs text-primary/80 font-medium mt-0.5">
{t("jobDetail.duration", { duration: formatDuration(
job.generating_thumbnails_started_at ?? job.phase2_started_at!,
job.finished_at ?? null
) })}
{job.total_files != null && job.total_files > 0 && (
<span className="text-muted-foreground font-normal ml-1">
· {job.processed_files ?? job.total_files} {t("jobType.thumbnail_rebuild").toLowerCase()}
</span>
)}
</p>
)}
{!job.finished_at && isThumbnailPhase && (
<span className="text-xs text-muted-foreground">{t("jobDetail.inProgress")}</span>
)}
</div>
</div>
)}
{/* Started — for jobs without phase2 */}
{job.started_at && !job.phase2_started_at && (
<div className="flex items-start gap-4">
<div className={`w-3.5 h-3.5 rounded-full mt-0.5 shrink-0 z-10 ${
job.finished_at ? "bg-primary" : "bg-primary animate-pulse"
}`} />
<div className="flex-1 min-w-0">
<span className="text-sm font-medium text-foreground">{t("jobDetail.started")}</span>
<p className="text-xs text-muted-foreground">{new Date(job.started_at).toLocaleString(locale)}</p>
</div>
</div>
)}
{/* Pending */}
{!job.started_at && (
<div className="flex items-start gap-4">
<div className="w-3.5 h-3.5 rounded-full mt-0.5 bg-warning shrink-0 z-10" />
<div className="flex-1 min-w-0">
<span className="text-sm font-medium text-foreground">{t("jobDetail.pendingStart")}</span>
</div>
</div>
)}
{/* Finished */}
{job.finished_at && (
<div className="flex items-start gap-4">
<div className={`w-3.5 h-3.5 rounded-full mt-0.5 shrink-0 z-10 ${
isCompleted ? "bg-success" : isFailed ? "bg-destructive" : "bg-muted"
}`} />
<div className="flex-1 min-w-0">
<span className="text-sm font-medium text-foreground">
{isCompleted ? t("jobDetail.finished") : isFailed ? t("jobDetail.failed") : t("jobDetail.cancelled")}
</span>
<p className="text-xs text-muted-foreground">{new Date(job.finished_at).toLocaleString(locale)}</p>
</div>
</div>
)}
</div>
</div>
</CardContent>
</Card>
);
}

View File

@@ -0,0 +1,245 @@
import Link from "next/link";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, StatBox } from "@/app/components/ui";
import type { MetadataBatchReportDto, MetadataBatchResultDto, MetadataRefreshReportDto } from "@/lib/api";
import type { TranslateFunction } from "@/lib/i18n/dictionaries";
export function MetadataBatchReportCard({ report, t }: { report: MetadataBatchReportDto; t: TranslateFunction }) {
return (
<Card>
<CardHeader>
<CardTitle>{t("jobDetail.batchReport")}</CardTitle>
<CardDescription>{t("jobDetail.seriesAnalyzed", { count: String(report.total_series) })}</CardDescription>
</CardHeader>
<CardContent>
<div className="grid grid-cols-2 sm:grid-cols-3 gap-4">
<StatBox value={report.auto_matched} label={t("jobDetail.autoMatched")} variant="success" />
<StatBox value={report.already_linked} label={t("jobDetail.alreadyLinked")} variant="primary" />
<StatBox value={report.no_results} label={t("jobDetail.noResults")} />
<StatBox value={report.too_many_results} label={t("jobDetail.tooManyResults")} variant="warning" />
<StatBox value={report.low_confidence} label={t("jobDetail.lowConfidence")} variant="warning" />
<StatBox value={report.errors} label={t("jobDetail.errors")} variant={report.errors > 0 ? "error" : "default"} />
</div>
</CardContent>
</Card>
);
}
export function MetadataBatchResultsCard({ results, libraryId, t }: {
results: MetadataBatchResultDto[];
libraryId: string | null;
t: TranslateFunction;
}) {
if (results.length === 0) return null;
return (
<Card className="lg:col-span-2">
<CardHeader>
<CardTitle>{t("jobDetail.resultsBySeries")}</CardTitle>
<CardDescription>{t("jobDetail.seriesProcessed", { count: String(results.length) })}</CardDescription>
</CardHeader>
<CardContent className="space-y-2 max-h-[600px] overflow-y-auto">
{results.map((r) => (
<div
key={r.id}
className={`p-3 rounded-lg border ${
r.status === "auto_matched" ? "bg-success/10 border-success/20" :
r.status === "already_linked" ? "bg-primary/10 border-primary/20" :
r.status === "error" ? "bg-destructive/10 border-destructive/20" :
"bg-muted/50 border-border/60"
}`}
>
<div className="flex items-center justify-between gap-2">
{libraryId ? (
<Link
href={`/libraries/${libraryId}/series/${encodeURIComponent(r.series_name)}`}
className="font-medium text-sm text-primary hover:underline truncate"
>
{r.series_name}
</Link>
) : (
<span className="font-medium text-sm text-foreground truncate">{r.series_name}</span>
)}
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap ${
r.status === "auto_matched" ? "bg-success/20 text-success" :
r.status === "already_linked" ? "bg-primary/20 text-primary" :
r.status === "no_results" ? "bg-muted text-muted-foreground" :
r.status === "too_many_results" ? "bg-amber-500/15 text-amber-600" :
r.status === "low_confidence" ? "bg-amber-500/15 text-amber-600" :
r.status === "error" ? "bg-destructive/20 text-destructive" :
"bg-muted text-muted-foreground"
}`}>
{r.status === "auto_matched" ? t("jobDetail.autoMatched") :
r.status === "already_linked" ? t("jobDetail.alreadyLinked") :
r.status === "no_results" ? t("jobDetail.noResults") :
r.status === "too_many_results" ? t("jobDetail.tooManyResults") :
r.status === "low_confidence" ? t("jobDetail.lowConfidence") :
r.status === "error" ? t("common.error") :
r.status}
</span>
</div>
<div className="flex items-center gap-3 mt-1 text-xs text-muted-foreground">
{r.provider_used && (
<span>{r.provider_used}{r.fallback_used ? ` ${t("metadata.fallbackUsed")}` : ""}</span>
)}
{r.candidates_count > 0 && (
<span>{r.candidates_count} {t("jobDetail.candidates", { plural: r.candidates_count > 1 ? "s" : "" })}</span>
)}
{r.best_confidence != null && (
<span>{Math.round(r.best_confidence * 100)}% {t("jobDetail.confidence")}</span>
)}
</div>
{r.best_candidate_json && (
<p className="text-xs text-muted-foreground mt-1">
{t("jobDetail.match", { title: (r.best_candidate_json as { title?: string }).title || r.best_candidate_json.toString() })}
</p>
)}
{r.error_message && (
<p className="text-xs text-destructive/80 mt-1">{r.error_message}</p>
)}
</div>
))}
</CardContent>
</Card>
);
}
export function MetadataRefreshReportCard({ report, t }: { report: MetadataRefreshReportDto; t: TranslateFunction }) {
return (
<Card>
<CardHeader>
<CardTitle>{t("jobDetail.refreshReport")}</CardTitle>
<CardDescription>{t("jobDetail.refreshReportDesc", { count: String(report.total_links) })}</CardDescription>
</CardHeader>
<CardContent>
<div className="grid grid-cols-2 sm:grid-cols-4 gap-4">
<StatBox
value={report.refreshed}
label={t("jobDetail.refreshed")}
variant="success"
icon={
<svg className="w-6 h-6 text-success" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
</svg>
}
/>
<StatBox value={report.unchanged} label={t("jobDetail.unchanged")} />
<StatBox value={report.errors} label={t("jobDetail.errors")} variant={report.errors > 0 ? "error" : "default"} />
<StatBox value={report.total_links} label={t("jobDetail.total")} />
</div>
</CardContent>
</Card>
);
}
export function MetadataRefreshChangesCard({ report, libraryId, t }: {
report: MetadataRefreshReportDto;
libraryId: string | null;
t: TranslateFunction;
}) {
if (report.changes.length === 0) return null;
return (
<Card className="lg:col-span-2">
<CardHeader>
<CardTitle>{t("jobDetail.refreshChanges")}</CardTitle>
<CardDescription>{t("jobDetail.refreshChangesDesc", { count: String(report.changes.length) })}</CardDescription>
</CardHeader>
<CardContent className="space-y-3 max-h-[600px] overflow-y-auto">
{report.changes.map((r, idx) => (
<div
key={idx}
className={`p-3 rounded-lg border ${
r.status === "updated" ? "bg-success/10 border-success/20" :
r.status === "error" ? "bg-destructive/10 border-destructive/20" :
"bg-muted/50 border-border/60"
}`}
>
<div className="flex items-center justify-between gap-2">
{libraryId ? (
<Link
href={`/libraries/${libraryId}/series/${encodeURIComponent(r.series_name)}`}
className="font-medium text-sm text-primary hover:underline truncate"
>
{r.series_name}
</Link>
) : (
<span className="font-medium text-sm text-foreground truncate">{r.series_name}</span>
)}
<div className="flex items-center gap-2">
<span className="text-[10px] text-muted-foreground">{r.provider}</span>
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap ${
r.status === "updated" ? "bg-success/20 text-success" :
r.status === "error" ? "bg-destructive/20 text-destructive" :
"bg-muted text-muted-foreground"
}`}>
{r.status === "updated" ? t("jobDetail.refreshed") :
r.status === "error" ? t("common.error") :
t("jobDetail.unchanged")}
</span>
</div>
</div>
{r.error && (
<p className="text-xs text-destructive/80 mt-1">{r.error}</p>
)}
{r.series_changes.length > 0 && (
<div className="mt-2">
<span className="text-[10px] uppercase tracking-wide text-muted-foreground font-semibold">{t("metadata.seriesLabel")}</span>
<div className="mt-1 space-y-1">
{r.series_changes.map((c, ci) => (
<div key={ci} className="flex items-start gap-2 text-xs">
<span className="font-medium text-foreground shrink-0 w-24">{t(`field.${c.field}` as never) || c.field}</span>
<span className="text-muted-foreground line-through truncate max-w-[200px]" title={String(c.old ?? "—")}>
{c.old != null ? (Array.isArray(c.old) ? (c.old as string[]).join(", ") : String(c.old)) : "—"}
</span>
<span className="text-success shrink-0"></span>
<span className="text-success truncate max-w-[200px]" title={String(c.new ?? "—")}>
{c.new != null ? (Array.isArray(c.new) ? (c.new as string[]).join(", ") : String(c.new)) : "—"}
</span>
</div>
))}
</div>
</div>
)}
{r.book_changes.length > 0 && (
<div className="mt-2">
<span className="text-[10px] uppercase tracking-wide text-muted-foreground font-semibold">
{t("metadata.booksLabel")} ({r.book_changes.length})
</span>
<div className="mt-1 space-y-2">
{r.book_changes.map((b, bi) => (
<div key={bi} className="pl-2 border-l-2 border-border/60">
<Link
href={`/books/${b.book_id}`}
className="text-xs text-primary hover:underline font-medium"
>
{b.volume != null && <span className="text-muted-foreground mr-1">T.{b.volume}</span>}
{b.title}
</Link>
<div className="mt-0.5 space-y-0.5">
{b.changes.map((c, ci) => (
<div key={ci} className="flex items-start gap-2 text-xs">
<span className="font-medium text-foreground shrink-0 w-24">{t(`field.${c.field}` as never) || c.field}</span>
<span className="text-muted-foreground line-through truncate max-w-[150px]" title={String(c.old ?? "—")}>
{c.old != null ? (Array.isArray(c.old) ? (c.old as string[]).join(", ") : String(c.old).substring(0, 60)) : "—"}
</span>
<span className="text-success shrink-0"></span>
<span className="text-success truncate max-w-[150px]" title={String(c.new ?? "—")}>
{c.new != null ? (Array.isArray(c.new) ? (c.new as string[]).join(", ") : String(c.new).substring(0, 60)) : "—"}
</span>
</div>
))}
</div>
</div>
))}
</div>
</div>
)}
</div>
))}
</CardContent>
</Card>
);
}

View File

@@ -0,0 +1,195 @@
import Link from "next/link";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, StatBox } from "@/app/components/ui";
import type { ReadingStatusMatchReportDto, ReadingStatusMatchResultDto, ReadingStatusPushReportDto, ReadingStatusPushResultDto } from "@/lib/api";
import type { TranslateFunction } from "@/lib/i18n/dictionaries";
export function ReadingStatusMatchReportCard({ report, t }: { report: ReadingStatusMatchReportDto; t: TranslateFunction }) {
return (
<Card>
<CardHeader>
<CardTitle>{t("jobDetail.readingStatusMatchReport")}</CardTitle>
<CardDescription>{t("jobDetail.seriesAnalyzed", { count: String(report.total_series) })}</CardDescription>
</CardHeader>
<CardContent>
<div className="grid grid-cols-2 sm:grid-cols-3 gap-4">
<StatBox value={report.linked} label={t("jobDetail.linked")} variant="success" />
<StatBox value={report.already_linked} label={t("jobDetail.alreadyLinked")} variant="primary" />
<StatBox value={report.no_results} label={t("jobDetail.noResults")} />
<StatBox value={report.ambiguous} label={t("jobDetail.ambiguous")} variant="warning" />
<StatBox value={report.errors} label={t("jobDetail.errors")} variant={report.errors > 0 ? "error" : "default"} />
</div>
</CardContent>
</Card>
);
}
export function ReadingStatusMatchResultsCard({ results, libraryId, t }: {
results: ReadingStatusMatchResultDto[];
libraryId: string | null;
t: TranslateFunction;
}) {
if (results.length === 0) return null;
return (
<Card className="lg:col-span-2">
<CardHeader>
<CardTitle>{t("jobDetail.resultsBySeries")}</CardTitle>
<CardDescription>{t("jobDetail.seriesProcessed", { count: String(results.length) })}</CardDescription>
</CardHeader>
<CardContent className="space-y-2 max-h-[600px] overflow-y-auto">
{results.map((r) => (
<div
key={r.id}
className={`p-3 rounded-lg border ${
r.status === "linked" ? "bg-success/10 border-success/20" :
r.status === "already_linked" ? "bg-primary/10 border-primary/20" :
r.status === "error" ? "bg-destructive/10 border-destructive/20" :
r.status === "ambiguous" ? "bg-amber-500/10 border-amber-500/20" :
"bg-muted/50 border-border/60"
}`}
>
<div className="flex items-center justify-between gap-2">
{libraryId ? (
<Link
href={`/libraries/${libraryId}/series/${encodeURIComponent(r.series_name)}`}
className="font-medium text-sm text-primary hover:underline truncate"
>
{r.series_name}
</Link>
) : (
<span className="font-medium text-sm text-foreground truncate">{r.series_name}</span>
)}
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap ${
r.status === "linked" ? "bg-success/20 text-success" :
r.status === "already_linked" ? "bg-primary/20 text-primary" :
r.status === "no_results" ? "bg-muted text-muted-foreground" :
r.status === "ambiguous" ? "bg-amber-500/15 text-amber-600" :
r.status === "error" ? "bg-destructive/20 text-destructive" :
"bg-muted text-muted-foreground"
}`}>
{r.status === "linked" ? t("jobDetail.linked") :
r.status === "already_linked" ? t("jobDetail.alreadyLinked") :
r.status === "no_results" ? t("jobDetail.noResults") :
r.status === "ambiguous" ? t("jobDetail.ambiguous") :
r.status === "error" ? t("common.error") :
r.status}
</span>
</div>
{r.status === "linked" && r.anilist_title && (
<div className="mt-1 flex items-center gap-1.5 text-xs text-muted-foreground">
<svg className="w-3 h-3 text-success shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M13.828 10.172a4 4 0 00-5.656 0l-4 4a4 4 0 105.656 5.656l1.102-1.101m-.758-4.899a4 4 0 005.656 0l4-4a4 4 0 00-5.656-5.656l-1.1 1.1" />
</svg>
{r.anilist_url ? (
<a href={r.anilist_url} target="_blank" rel="noopener noreferrer" className="text-success hover:underline">
{r.anilist_title}
</a>
) : (
<span className="text-success">{r.anilist_title}</span>
)}
{r.anilist_id && <span className="text-muted-foreground/60">#{r.anilist_id}</span>}
</div>
)}
{r.error_message && (
<p className="text-xs text-destructive/80 mt-1">{r.error_message}</p>
)}
</div>
))}
</CardContent>
</Card>
);
}
export function ReadingStatusPushReportCard({ report, t }: { report: ReadingStatusPushReportDto; t: TranslateFunction }) {
return (
<Card>
<CardHeader>
<CardTitle>{t("jobDetail.readingStatusPushReport")}</CardTitle>
<CardDescription>{t("jobDetail.seriesAnalyzed", { count: String(report.total_series) })}</CardDescription>
</CardHeader>
<CardContent>
<div className="grid grid-cols-2 sm:grid-cols-3 gap-4">
<StatBox value={report.pushed} label={t("jobDetail.pushed")} variant="success" />
<StatBox value={report.skipped} label={t("jobDetail.skipped")} variant="primary" />
<StatBox value={report.no_books} label={t("jobDetail.noBooks")} />
<StatBox value={report.errors} label={t("jobDetail.errors")} variant={report.errors > 0 ? "error" : "default"} />
</div>
</CardContent>
</Card>
);
}
export function ReadingStatusPushResultsCard({ results, libraryId, t }: {
results: ReadingStatusPushResultDto[];
libraryId: string | null;
t: TranslateFunction;
}) {
if (results.length === 0) return null;
return (
<Card className="lg:col-span-2">
<CardHeader>
<CardTitle>{t("jobDetail.resultsBySeries")}</CardTitle>
<CardDescription>{t("jobDetail.seriesProcessed", { count: String(results.length) })}</CardDescription>
</CardHeader>
<CardContent className="space-y-2 max-h-[600px] overflow-y-auto">
{results.map((r) => (
<div
key={r.id}
className={`p-3 rounded-lg border ${
r.status === "pushed" ? "bg-success/10 border-success/20" :
r.status === "error" ? "bg-destructive/10 border-destructive/20" :
r.status === "skipped" ? "bg-primary/10 border-primary/20" :
"bg-muted/50 border-border/60"
}`}
>
<div className="flex items-center justify-between gap-2">
{libraryId ? (
<Link
href={`/libraries/${libraryId}/series/${encodeURIComponent(r.series_name)}`}
className="font-medium text-sm text-primary hover:underline truncate"
>
{r.series_name}
</Link>
) : (
<span className="font-medium text-sm text-foreground truncate">{r.series_name}</span>
)}
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap ${
r.status === "pushed" ? "bg-success/20 text-success" :
r.status === "skipped" ? "bg-primary/20 text-primary" :
r.status === "no_books" ? "bg-muted text-muted-foreground" :
r.status === "error" ? "bg-destructive/20 text-destructive" :
"bg-muted text-muted-foreground"
}`}>
{r.status === "pushed" ? t("jobDetail.pushed") :
r.status === "skipped" ? t("jobDetail.skipped") :
r.status === "no_books" ? t("jobDetail.noBooks") :
r.status === "error" ? t("common.error") :
r.status}
</span>
</div>
{r.status === "pushed" && r.anilist_title && (
<div className="mt-1 flex items-center gap-1.5 text-xs text-muted-foreground">
<svg className="w-3 h-3 text-success shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M7 16a4 4 0 01-.88-7.903A5 5 0 1115.9 6L16 6a5 5 0 011 9.9M15 13l-3-3m0 0l-3 3m3-3v12" />
</svg>
{r.anilist_url ? (
<a href={r.anilist_url} target="_blank" rel="noopener noreferrer" className="text-success hover:underline">
{r.anilist_title}
</a>
) : (
<span className="text-success">{r.anilist_title}</span>
)}
{r.anilist_status && <span className="text-muted-foreground/70 font-medium">{r.anilist_status}</span>}
{r.progress_volumes != null && <span className="text-muted-foreground/60">vol. {r.progress_volumes}</span>}
</div>
)}
{r.error_message && (
<p className="text-xs text-destructive/80 mt-1">{r.error_message}</p>
)}
</div>
))}
</CardContent>
</Card>
);
}

View File

@@ -0,0 +1,283 @@
export const dynamic = "force-dynamic";
import { notFound } from "next/navigation";
import Link from "next/link";
import { apiFetch, getMetadataBatchReport, getMetadataBatchResults, getMetadataRefreshReport, getReadingStatusMatchReport, getReadingStatusMatchResults, getReadingStatusPushReport, getReadingStatusPushResults, getDownloadDetectionReport, getDownloadDetectionResults, MetadataBatchReportDto, MetadataBatchResultDto, MetadataRefreshReportDto, ReadingStatusMatchReportDto, ReadingStatusMatchResultDto, ReadingStatusPushReportDto, ReadingStatusPushResultDto, DownloadDetectionReportDto, DownloadDetectionResultDto } from "@/lib/api";
import { JobDetailLive } from "@/app/components/JobDetailLive";
import { getServerTranslations } from "@/lib/i18n/server";
import { JobSummaryBanner } from "./components/JobSummaryBanner";
import { JobOverviewCard } from "./components/JobOverviewCard";
import { JobTimelineCard } from "./components/JobTimelineCard";
import { JobProgressCard, IndexStatsCard, ThumbnailStatsCard } from "./components/JobProgressCard";
import { MetadataBatchReportCard, MetadataBatchResultsCard, MetadataRefreshReportCard, MetadataRefreshChangesCard } from "./components/MetadataReportCards";
import { ReadingStatusMatchReportCard, ReadingStatusMatchResultsCard, ReadingStatusPushReportCard, ReadingStatusPushResultsCard } from "./components/ReadingStatusReportCards";
import { DownloadDetectionReportCard, DownloadDetectionResultsCard } from "./components/DownloadDetectionCards";
import { JobErrorsCard } from "./components/JobErrorsCard";
interface JobDetailPageProps {
params: Promise<{ id: string }>;
}
interface JobDetails {
id: string;
library_id: string | null;
book_id: string | null;
type: string;
status: string;
created_at: string;
started_at: string | null;
finished_at: string | null;
phase2_started_at: string | null;
generating_thumbnails_started_at: string | null;
current_file: string | null;
progress_percent: number | null;
processed_files: number | null;
total_files: number | null;
stats_json: {
scanned_files: number;
indexed_files: number;
removed_files: number;
errors: number;
warnings: number;
} | null;
error_opt: string | null;
}
interface JobError {
id: string;
file_path: string;
error_message: string;
created_at: string;
}
async function getJobDetails(jobId: string): Promise<JobDetails | null> {
try {
return await apiFetch<JobDetails>(`/index/jobs/${jobId}`);
} catch {
return null;
}
}
async function getJobErrors(jobId: string): Promise<JobError[]> {
try {
return await apiFetch<JobError[]>(`/index/jobs/${jobId}/errors`);
} catch {
return [];
}
}
function formatDuration(start: string, end: string | null): string {
const startDate = new Date(start);
const endDate = end ? new Date(end) : new Date();
const diff = endDate.getTime() - startDate.getTime();
if (diff < 60000) return `${Math.floor(diff / 1000)}s`;
if (diff < 3600000) return `${Math.floor(diff / 60000)}m ${Math.floor((diff % 60000) / 1000)}s`;
return `${Math.floor(diff / 3600000)}h ${Math.floor((diff % 3600000) / 60000)}m`;
}
function formatSpeed(count: number, durationMs: number): string {
if (durationMs === 0 || count === 0) return "-";
return `${(count / (durationMs / 1000)).toFixed(1)}/s`;
}
export default async function JobDetailPage({ params }: JobDetailPageProps) {
const { id } = await params;
const [job, errors] = await Promise.all([
getJobDetails(id),
getJobErrors(id),
]);
if (!job) {
notFound();
}
const { t, locale } = await getServerTranslations();
const JOB_TYPE_INFO: Record<string, { label: string; description: string; isThumbnailOnly: boolean }> = {
rebuild: { label: t("jobType.rebuildLabel"), description: t("jobType.rebuildDesc"), isThumbnailOnly: false },
full_rebuild: { label: t("jobType.full_rebuildLabel"), description: t("jobType.full_rebuildDesc"), isThumbnailOnly: false },
rescan: { label: t("jobType.rescanLabel"), description: t("jobType.rescanDesc"), isThumbnailOnly: false },
thumbnail_rebuild: { label: t("jobType.thumbnail_rebuildLabel"), description: t("jobType.thumbnail_rebuildDesc"), isThumbnailOnly: true },
thumbnail_regenerate: { label: t("jobType.thumbnail_regenerateLabel"), description: t("jobType.thumbnail_regenerateDesc"), isThumbnailOnly: true },
cbr_to_cbz: { label: t("jobType.cbr_to_cbzLabel"), description: t("jobType.cbr_to_cbzDesc"), isThumbnailOnly: false },
metadata_batch: { label: t("jobType.metadata_batchLabel"), description: t("jobType.metadata_batchDesc"), isThumbnailOnly: false },
metadata_refresh: { label: t("jobType.metadata_refreshLabel"), description: t("jobType.metadata_refreshDesc"), isThumbnailOnly: false },
reading_status_match: { label: t("jobType.reading_status_matchLabel"), description: t("jobType.reading_status_matchDesc"), isThumbnailOnly: false },
reading_status_push: { label: t("jobType.reading_status_pushLabel"), description: t("jobType.reading_status_pushDesc"), isThumbnailOnly: false },
download_detection: { label: t("jobType.download_detectionLabel"), description: t("jobType.download_detectionDesc"), isThumbnailOnly: false },
};
const isMetadataBatch = job.type === "metadata_batch";
const isMetadataRefresh = job.type === "metadata_refresh";
const isReadingStatusMatch = job.type === "reading_status_match";
const isReadingStatusPush = job.type === "reading_status_push";
const isDownloadDetection = job.type === "download_detection";
let batchReport: MetadataBatchReportDto | null = null;
let batchResults: MetadataBatchResultDto[] = [];
if (isMetadataBatch) {
[batchReport, batchResults] = await Promise.all([
getMetadataBatchReport(id).catch(() => null),
getMetadataBatchResults(id).catch(() => []),
]);
}
let refreshReport: MetadataRefreshReportDto | null = null;
if (isMetadataRefresh) {
refreshReport = await getMetadataRefreshReport(id).catch(() => null);
}
let readingStatusReport: ReadingStatusMatchReportDto | null = null;
let readingStatusResults: ReadingStatusMatchResultDto[] = [];
if (isReadingStatusMatch) {
[readingStatusReport, readingStatusResults] = await Promise.all([
getReadingStatusMatchReport(id).catch(() => null),
getReadingStatusMatchResults(id).catch(() => []),
]);
}
let readingStatusPushReport: ReadingStatusPushReportDto | null = null;
let readingStatusPushResults: ReadingStatusPushResultDto[] = [];
if (isReadingStatusPush) {
[readingStatusPushReport, readingStatusPushResults] = await Promise.all([
getReadingStatusPushReport(id).catch(() => null),
getReadingStatusPushResults(id).catch(() => []),
]);
}
let downloadDetectionReport: DownloadDetectionReportDto | null = null;
let downloadDetectionResults: DownloadDetectionResultDto[] = [];
if (isDownloadDetection) {
[downloadDetectionReport, downloadDetectionResults] = await Promise.all([
getDownloadDetectionReport(id).catch(() => null),
getDownloadDetectionResults(id, "found").catch(() => []),
]);
}
const typeInfo = JOB_TYPE_INFO[job.type] ?? { label: job.type, description: null, isThumbnailOnly: false };
const { isThumbnailOnly } = typeInfo;
const durationMs = job.started_at
? new Date(job.finished_at || new Date()).getTime() - new Date(job.started_at).getTime()
: 0;
const isCompleted = job.status === "success";
const isFailed = job.status === "failed";
const isCancelled = job.status === "cancelled";
const isTerminal = isCompleted || isFailed || isCancelled;
const isExtractingPages = job.status === "extracting_pages";
const isThumbnailPhase = job.status === "generating_thumbnails";
const isPhase2 = isExtractingPages || isThumbnailPhase;
const progressTitle = isMetadataBatch
? t("jobDetail.metadataSearch")
: isMetadataRefresh
? t("jobDetail.metadataRefresh")
: isReadingStatusMatch
? t("jobDetail.readingStatusMatch")
: isReadingStatusPush
? t("jobDetail.readingStatusPush")
: isDownloadDetection
? t("jobDetail.downloadDetection")
: isThumbnailOnly
? t("jobType.thumbnail_rebuild")
: isExtractingPages
? t("jobDetail.phase2a")
: isThumbnailPhase
? t("jobDetail.phase2b")
: t("jobDetail.phase1");
const progressDescription = isMetadataBatch
? t("jobDetail.metadataSearchDesc")
: isMetadataRefresh
? t("jobDetail.metadataRefreshDesc")
: isReadingStatusMatch
? t("jobDetail.readingStatusMatchDesc")
: isReadingStatusPush
? t("jobDetail.readingStatusPushDesc")
: isDownloadDetection
? t("jobDetail.downloadDetectionDesc")
: isThumbnailOnly
? undefined
: isExtractingPages
? t("jobDetail.phase2aDesc")
: isThumbnailPhase
? t("jobDetail.phase2bDesc")
: t("jobDetail.phase1Desc");
return (
<>
<JobDetailLive jobId={id} isTerminal={isTerminal} />
<div className="mb-6">
<Link
href="/jobs"
className="inline-flex items-center text-sm text-muted-foreground hover:text-primary transition-colors duration-200"
>
<svg className="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M15 19l-7-7 7-7" />
</svg>
{t("jobDetail.backToJobs")}
</Link>
<h1 className="text-3xl font-bold text-foreground mt-2">{t("jobDetail.title")}</h1>
</div>
<JobSummaryBanner
job={job}
batchReport={batchReport}
refreshReport={refreshReport}
readingStatusReport={readingStatusReport}
readingStatusPushReport={readingStatusPushReport}
downloadDetectionReport={downloadDetectionReport}
t={t}
formatDuration={formatDuration}
/>
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
<JobOverviewCard job={job} typeInfo={typeInfo} t={t} formatDuration={formatDuration} />
<JobTimelineCard job={job} isThumbnailOnly={isThumbnailOnly} t={t} locale={locale} formatDuration={formatDuration} />
<JobProgressCard
job={job}
isThumbnailOnly={isThumbnailOnly}
progressTitle={progressTitle}
progressDescription={progressDescription}
t={t}
formatDuration={formatDuration}
formatSpeed={formatSpeed}
/>
{/* Index Statistics */}
{job.stats_json && !isThumbnailOnly && !isMetadataBatch && !isMetadataRefresh && !isReadingStatusMatch && !isReadingStatusPush && !isDownloadDetection && (
<IndexStatsCard job={job} t={t} formatDuration={formatDuration} formatSpeed={formatSpeed} durationMs={durationMs} />
)}
{/* Thumbnail statistics */}
{isThumbnailOnly && isCompleted && job.total_files != null && (
<ThumbnailStatsCard job={job} t={t} formatDuration={formatDuration} formatSpeed={formatSpeed} durationMs={durationMs} />
)}
{/* Metadata batch */}
{isMetadataBatch && batchReport && <MetadataBatchReportCard report={batchReport} t={t} />}
{isMetadataRefresh && refreshReport && <MetadataRefreshReportCard report={refreshReport} t={t} />}
{isMetadataRefresh && refreshReport && <MetadataRefreshChangesCard report={refreshReport} libraryId={job.library_id} t={t} />}
{/* Reading status */}
{isReadingStatusMatch && readingStatusReport && <ReadingStatusMatchReportCard report={readingStatusReport} t={t} />}
{isReadingStatusMatch && <ReadingStatusMatchResultsCard results={readingStatusResults} libraryId={job.library_id} t={t} />}
{isReadingStatusPush && readingStatusPushReport && <ReadingStatusPushReportCard report={readingStatusPushReport} t={t} />}
{isReadingStatusPush && <ReadingStatusPushResultsCard results={readingStatusPushResults} libraryId={job.library_id} t={t} />}
{/* Download detection */}
{isDownloadDetection && downloadDetectionReport && <DownloadDetectionReportCard report={downloadDetectionReport} t={t} />}
{isDownloadDetection && <DownloadDetectionResultsCard results={downloadDetectionResults} libraryId={job.library_id} t={t} />}
{/* Metadata batch results */}
{isMetadataBatch && <MetadataBatchResultsCard results={batchResults} libraryId={job.library_id} t={t} />}
{/* File errors */}
<JobErrorsCard errors={errors} t={t} locale={locale} />
</div>
</>
);
}

View File

@@ -0,0 +1,307 @@
import { revalidatePath } from "next/cache";
import { redirect } from "next/navigation";
import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, startMetadataRefresh, startReadingStatusMatch, startReadingStatusPush, startDownloadDetection, apiFetch, IndexJobDto, LibraryDto } from "@/lib/api";
import { JobsList } from "@/app/components/JobsList";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, FormField, FormSelect } from "@/app/components/ui";
import { getServerTranslations } from "@/lib/i18n/server";
export const dynamic = "force-dynamic";
export default async function JobsPage({ searchParams }: { searchParams: Promise<{ highlight?: string }> }) {
const { highlight } = await searchParams;
const { t } = await getServerTranslations();
const [jobs, libraries, prowlarrSettings] = await Promise.all([
listJobs().catch(() => [] as IndexJobDto[]),
fetchLibraries().catch(() => [] as LibraryDto[]),
apiFetch<{ url?: string }>("/settings/prowlarr").catch(() => null),
]);
const prowlarrConfigured = !!prowlarrSettings?.url;
const libraryMap = new Map(libraries.map(l => [l.id, l.name]));
const readingStatusLibraries = libraries.filter(l => l.reading_status_provider);
async function triggerRebuild(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await rebuildIndex(libraryId || undefined);
revalidatePath("/jobs");
redirect(`/jobs?highlight=${result.id}`);
}
async function triggerFullRebuild(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await rebuildIndex(libraryId || undefined, true);
revalidatePath("/jobs");
redirect(`/jobs?highlight=${result.id}`);
}
async function triggerRescan(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await rebuildIndex(libraryId || undefined, false, true);
revalidatePath("/jobs");
redirect(`/jobs?highlight=${result.id}`);
}
async function triggerThumbnailsRebuild(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await rebuildThumbnails(libraryId || undefined);
revalidatePath("/jobs");
redirect(`/jobs?highlight=${result.id}`);
}
async function triggerThumbnailsRegenerate(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await regenerateThumbnails(libraryId || undefined);
revalidatePath("/jobs");
redirect(`/jobs?highlight=${result.id}`);
}
async function triggerMetadataBatch(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await startMetadataBatch(libraryId || undefined);
revalidatePath("/jobs");
redirect(result.id ? `/jobs?highlight=${result.id}` : "/jobs");
}
async function triggerMetadataRefresh(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await startMetadataRefresh(libraryId || undefined);
revalidatePath("/jobs");
redirect(result.id ? `/jobs?highlight=${result.id}` : "/jobs");
}
async function triggerReadingStatusMatch(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await startReadingStatusMatch(libraryId || undefined);
revalidatePath("/jobs");
redirect(result.id ? `/jobs?highlight=${result.id}` : "/jobs");
}
async function triggerReadingStatusPush(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await startReadingStatusPush(libraryId || undefined);
revalidatePath("/jobs");
redirect(result.id ? `/jobs?highlight=${result.id}` : "/jobs");
}
async function triggerDownloadDetection(formData: FormData) {
"use server";
const libraryId = formData.get("library_id") as string;
const result = await startDownloadDetection(libraryId || undefined);
revalidatePath("/jobs");
redirect(result.id ? `/jobs?highlight=${result.id}` : "/jobs");
}
return (
<>
<div className="mb-6">
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
<svg className="w-8 h-8 text-warning" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M13 10V3L4 14h7v7l9-11h-7z" />
</svg>
{t("jobs.title")}
</h1>
</div>
<Card className="mb-6">
<CardHeader>
<CardTitle>{t("jobs.startJob")}</CardTitle>
<CardDescription>{t("jobs.startJobDescription")}</CardDescription>
</CardHeader>
<CardContent>
<form>
<div className="mb-6">
<FormField className="max-w-xs">
<FormSelect name="library_id" defaultValue="">
<option value="">{t("jobs.allLibraries")}</option>
{libraries.map((lib) => (
<option key={lib.id} value={lib.id}>{lib.name}</option>
))}
</FormSelect>
</FormField>
</div>
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
{/* Indexation group */}
<div className="space-y-3">
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" />
</svg>
{t("jobs.groupIndexation")}
</div>
<div className="space-y-2">
<button type="submit" formAction={triggerRebuild}
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
<div className="flex items-center gap-2">
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
</svg>
<span className="font-medium text-sm text-foreground">{t("jobs.rebuild")}</span>
</div>
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.rebuildShort")}</p>
</button>
<button type="submit" formAction={triggerRescan}
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
<div className="flex items-center gap-2">
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
</svg>
<span className="font-medium text-sm text-foreground">{t("jobs.rescan")}</span>
</div>
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.rescanShort")}</p>
</button>
<button type="submit" formAction={triggerFullRebuild}
className="w-full text-left rounded-lg border border-destructive/30 bg-destructive/5 p-3 hover:bg-destructive/10 transition-colors group cursor-pointer">
<div className="flex items-center gap-2">
<svg className="w-4 h-4 text-destructive shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" />
</svg>
<span className="font-medium text-sm text-destructive">{t("jobs.fullRebuild")}</span>
</div>
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.fullRebuildShort")}</p>
</button>
</div>
</div>
{/* Thumbnails group */}
<div className="space-y-3">
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" />
</svg>
{t("jobs.groupThumbnails")}
</div>
<div className="space-y-2">
<button type="submit" formAction={triggerThumbnailsRebuild}
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
<div className="flex items-center gap-2">
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6v6m0 0v6m0-6h6m-6 0H6" />
</svg>
<span className="font-medium text-sm text-foreground">{t("jobs.generateThumbnails")}</span>
</div>
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.generateThumbnailsShort")}</p>
</button>
<button type="submit" formAction={triggerThumbnailsRegenerate}
className="w-full text-left rounded-lg border border-warning/30 bg-warning/5 p-3 hover:bg-warning/10 transition-colors group cursor-pointer">
<div className="flex items-center gap-2">
<svg className="w-4 h-4 text-warning shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" />
</svg>
<span className="font-medium text-sm text-warning">{t("jobs.regenerateThumbnails")}</span>
</div>
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.regenerateThumbnailsShort")}</p>
</button>
</div>
</div>
{/* Metadata group */}
<div className="space-y-3">
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M7 7h.01M7 3h5c.512 0 1.024.195 1.414.586l7 7a2 2 0 010 2.828l-7 7a2 2 0 01-2.828 0l-7-7A1.994 1.994 0 013 12V7a4 4 0 014-4z" />
</svg>
{t("jobs.groupMetadata")}
</div>
<div className="space-y-2">
<button type="submit" formAction={triggerMetadataBatch}
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer disabled:opacity-50 disabled:cursor-not-allowed disabled:hover:bg-background">
<div className="flex items-center gap-2">
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
</svg>
<span className="font-medium text-sm text-foreground">{t("jobs.batchMetadata")}</span>
</div>
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.batchMetadataShort")}</p>
</button>
<button type="submit" formAction={triggerMetadataRefresh}
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer disabled:opacity-50 disabled:cursor-not-allowed disabled:hover:bg-background">
<div className="flex items-center gap-2">
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
</svg>
<span className="font-medium text-sm text-foreground">{t("jobs.refreshMetadata")}</span>
</div>
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.refreshMetadataShort")}</p>
</button>
</div>
</div>
{/* Reading status group — only shown if at least one library has a provider configured */}
{readingStatusLibraries.length > 0 && (
<div className="space-y-3">
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z" />
</svg>
{t("jobs.groupReadingStatus")}
</div>
<div className="space-y-2">
<button type="submit" formAction={triggerReadingStatusMatch}
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
<div className="flex items-center gap-2">
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M13.828 10.172a4 4 0 00-5.656 0l-4 4a4 4 0 105.656 5.656l1.102-1.101m-.758-4.899a4 4 0 005.656 0l4-4a4 4 0 00-5.656-5.656l-1.1 1.1" />
</svg>
<span className="font-medium text-sm text-foreground">{t("jobs.matchReadingStatus")}</span>
</div>
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.matchReadingStatusShort")}</p>
</button>
<button type="submit" formAction={triggerReadingStatusPush}
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
<div className="flex items-center gap-2">
<svg className="w-4 h-4 text-success shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M7 16a4 4 0 01-.88-7.903A5 5 0 1115.9 6L16 6a5 5 0 011 9.9M15 13l-3-3m0 0l-3 3m3-3v12" />
</svg>
<span className="font-medium text-sm text-foreground">{t("jobs.pushReadingStatus")}</span>
</div>
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.pushReadingStatusShort")}</p>
</button>
</div>
</div>
)}
{/* Download group — only shown if Prowlarr is configured */}
{prowlarrConfigured && <div className="space-y-3">
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-4l-4 4m0 0l-4-4m4 4V4" />
</svg>
{t("jobs.groupProwlarr")}
</div>
<div className="space-y-2">
<button type="submit" formAction={triggerDownloadDetection}
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
<div className="flex items-center gap-2">
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0zM10 7v3m0 0v3m0-3h3m-3 0H7" />
</svg>
<span className="font-medium text-sm text-foreground">{t("jobs.downloadDetection")}</span>
</div>
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.downloadDetectionShort")}</p>
</button>
</div>
</div>}
</div>
</form>
</CardContent>
</Card>
<JobsList
initialJobs={jobs}
libraries={libraryMap}
highlightJobId={highlight}
/>
</>
);
}

View File

@@ -0,0 +1,136 @@
import Image from "next/image";
import Link from "next/link";
import type { ReactNode } from "react";
import { cookies } from "next/headers";
import { revalidatePath } from "next/cache";
import { ThemeToggle } from "@/app/theme-toggle";
import { JobsIndicator } from "@/app/components/JobsIndicator";
import { NavIcon, Icon } from "@/app/components/ui";
import { LogoutButton } from "@/app/components/LogoutButton";
import { MobileNav } from "@/app/components/MobileNav";
import { UserSwitcher } from "@/app/components/UserSwitcher";
import { fetchUsers } from "@/lib/api";
import { getServerTranslations } from "@/lib/i18n/server";
import type { TranslationKey } from "@/lib/i18n/fr";
type NavItem = {
href: "/" | "/books" | "/series" | "/authors" | "/libraries" | "/jobs" | "/tokens" | "/settings";
labelKey: TranslationKey;
icon: "dashboard" | "books" | "series" | "authors" | "libraries" | "jobs" | "tokens" | "settings";
};
const navItems: NavItem[] = [
{ href: "/books", labelKey: "nav.books", icon: "books" },
{ href: "/series", labelKey: "nav.series", icon: "series" },
{ href: "/authors", labelKey: "nav.authors", icon: "authors" },
{ href: "/libraries", labelKey: "nav.libraries", icon: "libraries" },
{ href: "/jobs", labelKey: "nav.jobs", icon: "jobs" },
{ href: "/tokens", labelKey: "nav.tokens", icon: "tokens" },
];
export default async function AppLayout({ children }: { children: ReactNode }) {
const { t } = await getServerTranslations();
const cookieStore = await cookies();
const activeUserId = cookieStore.get("as_user_id")?.value || null;
const users = await fetchUsers().catch(() => []);
async function setActiveUserAction(formData: FormData) {
"use server";
const userId = formData.get("user_id") as string;
const store = await cookies();
if (userId) {
store.set("as_user_id", userId, { path: "/", httpOnly: false, sameSite: "lax" });
} else {
store.delete("as_user_id");
}
revalidatePath("/", "layout");
}
return (
<>
<header className="sticky top-0 z-50 w-full border-b border-border/40 bg-background/70 backdrop-blur-xl backdrop-saturate-150 supports-[backdrop-filter]:bg-background/60">
<nav className="container mx-auto flex h-16 items-center justify-between px-4">
<Link
href="/"
className="flex items-center gap-3 hover:opacity-80 transition-opacity duration-200"
>
<Image src="/logo.png" alt="StripStream" width={36} height={36} className="rounded-lg" />
<div className="flex items-baseline gap-2">
<span className="text-xl font-bold tracking-tight text-foreground">StripStream</span>
<span className="text-sm text-muted-foreground font-medium hidden xl:inline">
{t("common.backoffice")}
</span>
</div>
</Link>
<div className="flex items-center gap-2">
<div className="hidden md:flex items-center gap-1">
{navItems.map((item) => (
<NavLink key={item.href} href={item.href} title={t(item.labelKey)}>
<NavIcon name={item.icon} />
<span className="ml-2 hidden xl:inline">{t(item.labelKey)}</span>
</NavLink>
))}
</div>
<div className="hidden md:block">
<UserSwitcher
users={users}
activeUserId={activeUserId}
setActiveUserAction={setActiveUserAction}
/>
</div>
<div className="flex items-center gap-1 pl-4 ml-2 border-l border-border/60">
<JobsIndicator />
<Link
href="/settings"
className="hidden xl:flex p-2 rounded-lg text-muted-foreground hover:text-foreground hover:bg-accent transition-colors"
title={t("nav.settings")}
>
<Icon name="settings" size="md" />
</Link>
<ThemeToggle />
<LogoutButton />
<MobileNav
navItems={[
{ href: "/", label: t("nav.dashboard"), icon: "dashboard" },
...navItems.map(item => ({ ...item, label: t(item.labelKey) })),
]}
users={users}
activeUserId={activeUserId}
setActiveUserAction={setActiveUserAction}
/>
</div>
</div>
</nav>
</header>
<main className="container mx-auto px-4 sm:px-6 lg:px-8 py-8 pb-16">
{children}
</main>
</>
);
}
function NavLink({ href, title, children }: { href: NavItem["href"]; title?: string; children: React.ReactNode }) {
return (
<Link
href={href}
title={title}
className="
flex items-center
px-2 lg:px-3 py-2
rounded-lg
text-sm font-medium
text-muted-foreground
hover:text-foreground
hover:bg-accent
transition-colors duration-200
active:scale-[0.98]
"
>
{children}
</Link>
);
}

View File

@@ -1,9 +1,9 @@
import { fetchLibraries, fetchBooks, getBookCoverUrl, LibraryDto, BookDto } from "../../../../lib/api";
import { BooksGrid, EmptyState } from "../../../components/BookCard";
import { LibrarySubPageHeader } from "../../../components/LibrarySubPageHeader";
import { OffsetPagination } from "../../../components/ui";
import { fetchLibraries, fetchBooks, getBookCoverUrl, LibraryDto, BookDto } from "@/lib/api";
import { BooksGrid, EmptyState } from "@/app/components/BookCard";
import { LibrarySubPageHeader } from "@/app/components/LibrarySubPageHeader";
import { OffsetPagination } from "@/app/components/ui";
import { notFound } from "next/navigation";
import { getServerTranslations } from "../../../../lib/i18n/server";
import { getServerTranslations } from "@/lib/i18n/server";
export const dynamic = "force-dynamic";

View File

@@ -1,15 +1,28 @@
import { fetchLibraries, fetchBooks, fetchSeriesMetadata, getBookCoverUrl, getMetadataLink, getMissingBooks, BookDto, SeriesMetadataDto, ExternalMetadataLinkDto, MissingBooksDto } from "../../../../../lib/api";
import { BooksGrid, EmptyState } from "../../../../components/BookCard";
import { MarkSeriesReadButton } from "../../../../components/MarkSeriesReadButton";
import { MarkBookReadButton } from "../../../../components/MarkBookReadButton";
import { EditSeriesForm } from "../../../../components/EditSeriesForm";
import { MetadataSearchModal } from "../../../../components/MetadataSearchModal";
import { OffsetPagination } from "../../../../components/ui";
import { SafeHtml } from "../../../../components/SafeHtml";
import { fetchLibraries, fetchBooks, fetchSeriesMetadata, getBookCoverUrl, getMetadataLink, getMissingBooks, getReadingStatusLink, BookDto, SeriesMetadataDto, ExternalMetadataLinkDto, MissingBooksDto, AnilistSeriesLinkDto } from "@/lib/api";
import { BooksGrid, EmptyState } from "@/app/components/BookCard";
import { MarkSeriesReadButton } from "@/app/components/MarkSeriesReadButton";
import { MarkBookReadButton } from "@/app/components/MarkBookReadButton";
import { ProviderIcon, providerLabel } from "@/app/components/ProviderIcon";
import nextDynamic from "next/dynamic";
import { OffsetPagination } from "@/app/components/ui";
import { SafeHtml } from "@/app/components/SafeHtml";
import Image from "next/image";
import Link from "next/link";
const EditSeriesForm = nextDynamic(
() => import("@/app/components/EditSeriesForm").then(m => m.EditSeriesForm)
);
const MetadataSearchModal = nextDynamic(
() => import("@/app/components/MetadataSearchModal").then(m => m.MetadataSearchModal)
);
const ReadingStatusModal = nextDynamic(
() => import("@/app/components/ReadingStatusModal").then(m => m.ReadingStatusModal)
);
const ProwlarrSearchModal = nextDynamic(
() => import("@/app/components/ProwlarrSearchModal").then(m => m.ProwlarrSearchModal)
);
import { notFound } from "next/navigation";
import { getServerTranslations } from "../../../../../lib/i18n/server";
import { getServerTranslations } from "@/lib/i18n/server";
export const dynamic = "force-dynamic";
@@ -28,7 +41,7 @@ export default async function SeriesDetailPage({
const seriesName = decodeURIComponent(name);
const [library, booksPage, seriesMeta, metadataLinks] = await Promise.all([
const [library, booksPage, seriesMeta, metadataLinks, readingStatusLink] = await Promise.all([
fetchLibraries().then((libs) => libs.find((l) => l.id === id)),
fetchBooks(id, seriesName, page, limit).catch(() => ({
items: [] as BookDto[],
@@ -38,6 +51,7 @@ export default async function SeriesDetailPage({
})),
fetchSeriesMetadata(id, seriesName).catch(() => null as SeriesMetadataDto | null),
getMetadataLink(id, seriesName).catch(() => [] as ExternalMetadataLinkDto[]),
getReadingStatusLink(id, seriesName).catch(() => null as AnilistSeriesLinkDto | null),
]);
const existingLink = metadataLinks.find((l) => l.status === "approved") ?? metadataLinks[0] ?? null;
@@ -93,7 +107,7 @@ export default async function SeriesDetailPage({
alt={t("books.coverOf", { name: displayName })}
fill
className="object-cover"
unoptimized
sizes="160px"
/>
</div>
</div>
@@ -117,6 +131,37 @@ export default async function SeriesDetailPage({
{t(`seriesStatus.${seriesMeta.status}` as any) || seriesMeta.status}
</span>
)}
{existingLink?.status === "approved" && (
existingLink.external_url ? (
<a
href={existingLink.external_url}
target="_blank"
rel="noopener noreferrer"
className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full bg-primary/10 text-primary text-xs border border-primary/30 hover:bg-primary/20 transition-colors"
>
<ProviderIcon provider={existingLink.provider} size={12} />
{providerLabel(existingLink.provider)}
</a>
) : (
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full bg-primary/10 text-primary text-xs border border-primary/30">
<ProviderIcon provider={existingLink.provider} size={12} />
{providerLabel(existingLink.provider)}
</span>
)
)}
{readingStatusLink && (
<a
href={readingStatusLink.anilist_url ?? `https://anilist.co/manga/${readingStatusLink.anilist_id}`}
target="_blank"
rel="noopener noreferrer"
className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full bg-cyan-500/10 text-cyan-600 text-xs border border-cyan-500/30 hover:bg-cyan-500/20 transition-colors"
>
<svg className="w-3 h-3" viewBox="0 0 24 24" fill="currentColor">
<path d="M6.361 2.943 0 21.056h4.942l1.077-3.133H11.4l1.077 3.133H17.5L11.128 2.943H6.361zm1.58 11.152 1.84-5.354 1.84 5.354H7.941zM17.358 2.943v18.113h4.284V2.943h-4.284z"/>
</svg>
AniList
</a>
)}
</div>
{seriesMeta?.description && (
@@ -138,10 +183,10 @@ export default async function SeriesDetailPage({
</span>
<span className="w-px h-4 bg-border" />
<span className="text-muted-foreground">
{t("series.readCount", { read: String(booksReadCount), total: String(booksPage.total) })}
{t("series.readCount", { read: String(booksReadCount), total: String(booksPage.total), plural: booksPage.total !== 1 ? "s" : "" })}
</span>
{/* Progress bar */}
{/* Reading progress bar */}
<div className="flex items-center gap-2 flex-1 min-w-[120px] max-w-[200px]">
<div className="flex-1 h-2 bg-muted rounded-full overflow-hidden">
<div
@@ -150,6 +195,22 @@ export default async function SeriesDetailPage({
/>
</div>
</div>
{/* Collection progress bar (owned / expected) */}
{missingData && missingData.total_external > 0 && (
<>
<span className="w-px h-4 bg-border" />
<span className="text-muted-foreground">
{booksPage.total}/{missingData.total_external} {t("series.missingCount", { count: missingData.missing_count, plural: missingData.missing_count !== 1 ? "s" : "" })}
</span>
<div className="w-[150px] h-2 bg-muted rounded-full overflow-hidden">
<div
className="h-full bg-amber-500 rounded-full transition-all"
style={{ width: `${Math.round((booksPage.total / missingData.total_external) * 100)}%` }}
/>
</div>
</>
)}
</div>
<div className="flex flex-wrap items-center gap-3">
@@ -171,12 +232,22 @@ export default async function SeriesDetailPage({
currentStatus={seriesMeta?.status ?? null}
currentLockedFields={seriesMeta?.locked_fields ?? {}}
/>
<ProwlarrSearchModal
seriesName={seriesName}
missingBooks={missingData?.missing_books ?? null}
/>
<MetadataSearchModal
libraryId={id}
seriesName={seriesName}
existingLink={existingLink}
initialMissing={missingData}
/>
<ReadingStatusModal
libraryId={id}
seriesName={seriesName}
readingStatusProvider={library.reading_status_provider ?? null}
existingLink={readingStatusLink}
/>
</div>
</div>
</div>

View File

@@ -1,12 +1,12 @@
import { fetchLibraries, fetchSeries, fetchSeriesStatuses, getBookCoverUrl, LibraryDto, SeriesDto, SeriesPageDto } from "../../../../lib/api";
import { OffsetPagination } from "../../../components/ui";
import { MarkSeriesReadButton } from "../../../components/MarkSeriesReadButton";
import { SeriesFilters } from "../../../components/SeriesFilters";
import { fetchLibraries, fetchSeries, fetchSeriesStatuses, getBookCoverUrl, LibraryDto, SeriesDto, SeriesPageDto } from "@/lib/api";
import { OffsetPagination } from "@/app/components/ui";
import { MarkSeriesReadButton } from "@/app/components/MarkSeriesReadButton";
import { SeriesFilters } from "@/app/components/SeriesFilters";
import Image from "next/image";
import Link from "next/link";
import { notFound } from "next/navigation";
import { LibrarySubPageHeader } from "../../../components/LibrarySubPageHeader";
import { getServerTranslations } from "../../../../lib/i18n/server";
import { LibrarySubPageHeader } from "@/app/components/LibrarySubPageHeader";
import { getServerTranslations } from "@/lib/i18n/server";
export const dynamic = "force-dynamic";
@@ -86,7 +86,7 @@ export default async function LibrarySeriesPage({
alt={t("books.coverOf", { name: s.name })}
fill
className="object-cover"
unoptimized
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 20vw"
/>
</div>
<div className="p-3">
@@ -95,7 +95,7 @@ export default async function LibrarySeriesPage({
</h3>
<div className="flex items-center justify-between mt-1">
<p className="text-xs text-muted-foreground">
{t("series.readCount", { read: String(s.books_read_count), total: String(s.book_count) })}
{t("series.readCount", { read: String(s.books_read_count), total: String(s.book_count), plural: s.book_count !== 1 ? "s" : "" })}
</p>
<MarkSeriesReadButton
seriesName={s.name}

View File

@@ -0,0 +1,230 @@
import { revalidatePath } from "next/cache";
import Image from "next/image";
import Link from "next/link";
import { listFolders, createLibrary, deleteLibrary, fetchLibraries, getBookCoverUrl, LibraryDto, FolderItem } from "@/lib/api";
import type { TranslationKey } from "@/lib/i18n/fr";
import { getServerTranslations } from "@/lib/i18n/server";
import { LibraryActions } from "@/app/components/LibraryActions";
import { LibraryForm } from "@/app/components/LibraryForm";
import { ProviderIcon } from "@/app/components/ProviderIcon";
import {
Card, CardHeader, CardTitle, CardDescription, CardContent,
Button, Badge
} from "@/app/components/ui";
export const dynamic = "force-dynamic";
function formatNextScan(nextScanAt: string | null, imminentLabel: string): string {
if (!nextScanAt) return "-";
const date = new Date(nextScanAt);
const now = new Date();
const diff = date.getTime() - now.getTime();
if (diff < 0) return imminentLabel;
if (diff < 60000) return "< 1 min";
if (diff < 3600000) return `${Math.floor(diff / 60000)}m`;
if (diff < 86400000) return `${Math.floor(diff / 3600000)}h`;
return `${Math.floor(diff / 86400000)}d`;
}
export default async function LibrariesPage() {
const { t } = await getServerTranslations();
const [libraries, folders] = await Promise.all([
fetchLibraries().catch(() => [] as LibraryDto[]),
listFolders().catch(() => [] as FolderItem[])
]);
const thumbnailMap = new Map(
libraries.map(lib => [
lib.id,
(lib.thumbnail_book_ids || []).map(bookId => getBookCoverUrl(bookId)),
])
);
async function addLibrary(formData: FormData) {
"use server";
const name = formData.get("name") as string;
const rootPath = formData.get("root_path") as string;
if (name && rootPath) {
await createLibrary(name, rootPath);
revalidatePath("/libraries");
}
}
async function removeLibrary(formData: FormData) {
"use server";
const id = formData.get("id") as string;
await deleteLibrary(id);
revalidatePath("/libraries");
}
return (
<>
<div className="mb-6">
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
<svg className="w-8 h-8 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" />
</svg>
{t("libraries.title")}
</h1>
</div>
{/* Add Library Form */}
<Card className="mb-6">
<CardHeader>
<CardTitle>{t("libraries.addLibrary")}</CardTitle>
<CardDescription>{t("libraries.addLibraryDescription")}</CardDescription>
</CardHeader>
<CardContent>
<LibraryForm initialFolders={folders} action={addLibrary} />
</CardContent>
</Card>
{/* Libraries Grid */}
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
{libraries.map((lib) => {
const thumbnails = thumbnailMap.get(lib.id) || [];
return (
<Card key={lib.id} className="flex flex-col overflow-hidden">
{/* Thumbnail fan */}
{thumbnails.length > 0 ? (
<Link href={`/libraries/${lib.id}/series`} className="block relative h-48 overflow-hidden bg-muted/10">
<Image
src={thumbnails[0]}
alt=""
fill
className="object-cover blur-xl scale-110 opacity-40"
sizes="(max-width: 768px) 100vw, 33vw"
loading="lazy"
/>
<div className="absolute inset-0 flex items-end justify-center">
{thumbnails.map((url, i) => {
const count = thumbnails.length;
const mid = (count - 1) / 2;
const angle = (i - mid) * 12;
const radius = 220;
const rad = ((angle - 90) * Math.PI) / 180;
const cx = Math.cos(rad) * radius;
const cy = Math.sin(rad) * radius;
return (
<Image
key={i}
src={url}
alt=""
width={96}
height={144}
className="absolute object-cover shadow-lg"
style={{
transform: `translate(${cx}px, ${cy}px) rotate(${angle}deg)`,
transformOrigin: 'bottom center',
zIndex: count - Math.abs(Math.round(i - mid)),
bottom: '-185px',
}}
sizes="96px"
loading="lazy"
/>
);
})}
</div>
</Link>
) : (
<div className="h-8 bg-muted/10" />
)}
<CardHeader className="pb-2">
<div className="flex items-start justify-between">
<div>
<CardTitle className="text-lg">{lib.name}</CardTitle>
{!lib.enabled && <Badge variant="muted" className="mt-1">{t("libraries.disabled")}</Badge>}
</div>
<div className="flex items-center gap-1">
<LibraryActions
libraryId={lib.id}
monitorEnabled={lib.monitor_enabled}
scanMode={lib.scan_mode}
watcherEnabled={lib.watcher_enabled}
metadataProvider={lib.metadata_provider}
fallbackMetadataProvider={lib.fallback_metadata_provider}
metadataRefreshMode={lib.metadata_refresh_mode}
readingStatusProvider={lib.reading_status_provider}
readingStatusPushMode={lib.reading_status_push_mode}
downloadDetectionMode={lib.download_detection_mode ?? "manual"}
/>
<form>
<input type="hidden" name="id" value={lib.id} />
<Button type="submit" variant="ghost" size="sm" formAction={removeLibrary} className="text-muted-foreground hover:text-destructive">
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
</svg>
</Button>
</form>
</div>
</div>
<code className="text-xs font-mono text-muted-foreground break-all">{lib.root_path}</code>
</CardHeader>
<CardContent className="flex-1 pt-0">
{/* Stats */}
<div className="grid grid-cols-2 gap-3 mb-3">
<Link
href={`/libraries/${lib.id}/books`}
className="text-center p-2.5 bg-muted/50 rounded-lg hover:bg-accent transition-colors duration-200"
>
<span className="block text-2xl font-bold text-primary">{lib.book_count}</span>
<span className="text-xs text-muted-foreground">{t("libraries.books")}</span>
</Link>
<Link
href={`/libraries/${lib.id}/series`}
className="text-center p-2.5 bg-muted/50 rounded-lg hover:bg-accent transition-colors duration-200"
>
<span className="block text-2xl font-bold text-foreground">{lib.series_count}</span>
<span className="text-xs text-muted-foreground">{t("libraries.series")}</span>
</Link>
</div>
{/* Configuration tags */}
<div className="flex flex-wrap gap-1.5">
<span className={`inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium ${
lib.monitor_enabled
? 'bg-success/10 text-success'
: 'bg-muted/50 text-muted-foreground'
}`}>
<span className="text-[9px]">{lib.monitor_enabled ? '●' : '○'}</span>
{t("libraries.scanLabel", { mode: t(`monitoring.${lib.scan_mode}` as TranslationKey) })}
</span>
<span className={`inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium ${
lib.watcher_enabled
? 'bg-warning/10 text-warning'
: 'bg-muted/50 text-muted-foreground'
}`}>
<span>{lib.watcher_enabled ? '⚡' : '○'}</span>
<span>{t("libraries.watcherLabel")}</span>
</span>
{lib.metadata_provider && lib.metadata_provider !== "none" && (
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-primary/10 text-primary">
<ProviderIcon provider={lib.metadata_provider} size={11} />
{lib.metadata_provider.replace('_', ' ')}
</span>
)}
{lib.metadata_refresh_mode !== "manual" && (
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-muted/50 text-muted-foreground">
{t("libraries.metaRefreshLabel", { mode: t(`monitoring.${lib.metadata_refresh_mode}` as TranslationKey) })}
</span>
)}
{lib.monitor_enabled && lib.next_scan_at && (
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-muted/50 text-muted-foreground">
{t("libraries.nextScan", { time: formatNextScan(lib.next_scan_at, t("libraries.imminent")) })}
</span>
)}
</div>
</CardContent>
</Card>
);
})}
</div>
</>
);
}

View File

@@ -1,9 +1,13 @@
import React from "react";
import { fetchStats, StatsResponse } from "../lib/api";
import { Card, CardContent, CardHeader, CardTitle } from "./components/ui";
import { fetchStats, fetchUsers, StatsResponse, UserDto } from "@/lib/api";
import { Card, CardContent, CardHeader, CardTitle } from "@/app/components/ui";
import { RcDonutChart, RcBarChart, RcAreaChart, RcStackedBar, RcHorizontalBar, RcMultiLineChart } from "@/app/components/DashboardCharts";
import { PeriodToggle } from "@/app/components/PeriodToggle";
import { MetricToggle } from "@/app/components/MetricToggle";
import { CurrentlyReadingList, RecentlyReadList } from "@/app/components/ReadingUserFilter";
import Link from "next/link";
import { getServerTranslations } from "../lib/i18n/server";
import type { TranslateFunction } from "../lib/i18n/dictionaries";
import { getServerTranslations } from "@/lib/i18n/server";
import type { TranslateFunction } from "@/lib/i18n/dictionaries";
export const dynamic = "force-dynamic";
@@ -19,84 +23,25 @@ function formatNumber(n: number, locale: string): string {
return n.toLocaleString(locale === "fr" ? "fr-FR" : "en-US");
}
// Donut chart via SVG
function DonutChart({ data, colors, noDataLabel, locale = "fr" }: { data: { label: string; value: number; color: string }[]; colors?: string[]; noDataLabel?: string; locale?: string }) {
const total = data.reduce((sum, d) => sum + d.value, 0);
if (total === 0) return <p className="text-muted-foreground text-sm text-center py-8">{noDataLabel}</p>;
const radius = 40;
const circumference = 2 * Math.PI * radius;
let offset = 0;
return (
<div className="flex items-center gap-6">
<svg viewBox="0 0 100 100" className="w-32 h-32 shrink-0">
{data.map((d, i) => {
const pct = d.value / total;
const dashLength = pct * circumference;
const currentOffset = offset;
offset += dashLength;
return (
<circle
key={i}
cx="50"
cy="50"
r={radius}
fill="none"
stroke={d.color}
strokeWidth="16"
strokeDasharray={`${dashLength} ${circumference - dashLength}`}
strokeDashoffset={-currentOffset}
transform="rotate(-90 50 50)"
className="transition-all duration-500"
/>
);
})}
<text x="50" y="50" textAnchor="middle" dominantBaseline="central" className="fill-foreground text-[10px] font-bold">
{formatNumber(total, locale)}
</text>
</svg>
<div className="flex flex-col gap-1.5 min-w-0">
{data.map((d, i) => (
<div key={i} className="flex items-center gap-2 text-sm">
<span className="w-3 h-3 rounded-full shrink-0" style={{ backgroundColor: d.color }} />
<span className="text-muted-foreground truncate">{d.label}</span>
<span className="font-medium text-foreground ml-auto">{d.value}</span>
</div>
))}
</div>
</div>
);
function formatChartLabel(raw: string, period: "day" | "week" | "month", locale: string): string {
const loc = locale === "fr" ? "fr-FR" : "en-US";
if (period === "month") {
// raw = "YYYY-MM"
const [y, m] = raw.split("-");
const d = new Date(Number(y), Number(m) - 1, 1);
return d.toLocaleDateString(loc, { month: "short" });
}
if (period === "week") {
// raw = "YYYY-MM-DD" (Monday of the week)
const d = new Date(raw + "T00:00:00");
return d.toLocaleDateString(loc, { day: "numeric", month: "short" });
}
// day: raw = "YYYY-MM-DD"
const d = new Date(raw + "T00:00:00");
return d.toLocaleDateString(loc, { weekday: "short", day: "numeric" });
}
// Bar chart via pure CSS
function BarChart({ data, color = "var(--color-primary)", noDataLabel }: { data: { label: string; value: number }[]; color?: string; noDataLabel?: string }) {
const max = Math.max(...data.map((d) => d.value), 1);
if (data.length === 0) return <p className="text-muted-foreground text-sm text-center py-8">{noDataLabel}</p>;
return (
<div className="flex items-end gap-1.5 h-40">
{data.map((d, i) => (
<div key={i} className="flex-1 flex flex-col items-center gap-1 min-w-0">
<span className="text-[10px] text-muted-foreground font-medium">{d.value || ""}</span>
<div
className="w-full rounded-t-sm transition-all duration-500 min-h-[2px]"
style={{
height: `${(d.value / max) * 100}%`,
backgroundColor: color,
opacity: d.value === 0 ? 0.2 : 1,
}}
/>
<span className="text-[10px] text-muted-foreground truncate w-full text-center">
{d.label}
</span>
</div>
))}
</div>
);
}
// Horizontal progress bar for library breakdown
// Horizontal progress bar for metadata quality (stays server-rendered, no recharts needed)
function HorizontalBar({ label, value, max, subLabel, color = "var(--color-primary)" }: { label: string; value: number; max: number; subLabel?: string; color?: string }) {
const pct = max > 0 ? (value / max) * 100 : 0;
return (
@@ -115,12 +60,24 @@ function HorizontalBar({ label, value, max, subLabel, color = "var(--color-prima
);
}
export default async function DashboardPage() {
export default async function DashboardPage({
searchParams,
}: {
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
}) {
const searchParamsAwaited = await searchParams;
const rawPeriod = searchParamsAwaited.period;
const period = rawPeriod === "day" ? "day" as const : rawPeriod === "week" ? "week" as const : "month" as const;
const metric = searchParamsAwaited.metric === "pages" ? "pages" as const : "books" as const;
const { t, locale } = await getServerTranslations();
let stats: StatsResponse | null = null;
let users: UserDto[] = [];
try {
stats = await fetchStats();
[stats, users] = await Promise.all([
fetchStats(period),
fetchUsers().catch(() => []),
]);
} catch (e) {
console.error("Failed to fetch stats:", e);
}
@@ -137,7 +94,20 @@ export default async function DashboardPage() {
);
}
const { overview, reading_status, by_format, by_language, by_library, top_series, additions_over_time, metadata } = stats;
const {
overview,
reading_status,
currently_reading = [],
recently_read = [],
reading_over_time = [],
users_reading_over_time = [],
by_format,
by_library,
top_series,
additions_over_time,
jobs_over_time = [],
metadata = { total_series: 0, series_linked: 0, series_unlinked: 0, books_with_summary: 0, books_with_isbn: 0, by_provider: [] },
} = stats;
const readingColors = ["hsl(220 13% 70%)", "hsl(45 93% 47%)", "hsl(142 60% 45%)"];
const formatColors = [
@@ -146,7 +116,6 @@ export default async function DashboardPage() {
"hsl(170 60% 45%)", "hsl(220 60% 50%)",
];
const maxLibBooks = Math.max(...by_library.map((l) => l.book_count), 1);
const noDataLabel = t("common.noData");
return (
@@ -174,23 +143,129 @@ export default async function DashboardPage() {
<StatCard icon="size" label={t("dashboard.totalSize")} value={formatBytes(overview.total_size_bytes)} color="warning" />
</div>
{/* Currently reading + Recently read */}
{(currently_reading.length > 0 || recently_read.length > 0) && (
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
{/* Currently reading */}
<Card hover={false}>
<CardHeader>
<CardTitle className="text-base">{t("dashboard.currentlyReading")}</CardTitle>
</CardHeader>
<CardContent>
<CurrentlyReadingList
items={currently_reading}
allLabel={t("dashboard.allUsers")}
emptyLabel={t("dashboard.noCurrentlyReading")}
pageProgressTemplate={t("dashboard.pageProgress")}
/>
</CardContent>
</Card>
{/* Recently read */}
<Card hover={false}>
<CardHeader>
<CardTitle className="text-base">{t("dashboard.recentlyRead")}</CardTitle>
</CardHeader>
<CardContent>
<RecentlyReadList
items={recently_read}
allLabel={t("dashboard.allUsers")}
emptyLabel={t("dashboard.noRecentlyRead")}
/>
</CardContent>
</Card>
</div>
)}
{/* Reading activity line chart */}
<Card hover={false}>
<CardHeader className="flex flex-row items-center justify-between space-y-0">
<CardTitle className="text-base">{t("dashboard.readingActivity")}</CardTitle>
<div className="flex gap-2">
<MetricToggle labels={{ books: t("dashboard.metricBooks"), pages: t("dashboard.metricPages") }} />
<PeriodToggle labels={{ day: t("dashboard.periodDay"), week: t("dashboard.periodWeek"), month: t("dashboard.periodMonth") }} />
</div>
</CardHeader>
<CardContent>
{(() => {
const userColors = [
"hsl(142 60% 45%)", "hsl(198 78% 37%)", "hsl(45 93% 47%)",
"hsl(2 72% 48%)", "hsl(280 60% 50%)", "hsl(32 80% 50%)",
];
const dataKey = metric === "pages" ? "pages_read" : "books_read";
const usernames = [...new Set(users_reading_over_time.map(r => r.username))];
if (usernames.length === 0) {
return (
<RcAreaChart
noDataLabel={noDataLabel}
data={reading_over_time.map((m) => ({ label: formatChartLabel(m.month, period, locale), value: m[dataKey] }))}
color="hsl(142 60% 45%)"
/>
);
}
// Pivot: { label, username1: n, username2: n, ... }
const byMonth = new Map<string, Record<string, unknown>>();
for (const row of users_reading_over_time) {
const label = formatChartLabel(row.month, period, locale);
if (!byMonth.has(row.month)) byMonth.set(row.month, { label });
byMonth.get(row.month)![row.username] = row[dataKey];
}
const chartData = [...byMonth.values()];
const lines = usernames.map((u, i) => ({
key: u,
label: u,
color: userColors[i % userColors.length],
}));
return <RcMultiLineChart data={chartData} lines={lines} noDataLabel={noDataLabel} />;
})()}
</CardContent>
</Card>
{/* Charts row */}
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
{/* Reading status donut */}
{/* Reading status par lecteur */}
<Card hover={false}>
<CardHeader>
<CardTitle className="text-base">{t("dashboard.readingStatus")}</CardTitle>
</CardHeader>
<CardContent>
<DonutChart
locale={locale}
{users.length === 0 ? (
<RcDonutChart
noDataLabel={noDataLabel}
data={[
{ label: t("status.unread"), value: reading_status.unread, color: readingColors[0] },
{ label: t("status.reading"), value: reading_status.reading, color: readingColors[1] },
{ label: t("status.read"), value: reading_status.read, color: readingColors[2] },
{ name: t("status.unread"), value: reading_status.unread, color: readingColors[0] },
{ name: t("status.reading"), value: reading_status.reading, color: readingColors[1] },
{ name: t("status.read"), value: reading_status.read, color: readingColors[2] },
]}
/>
) : (
<div className="space-y-3">
{users.map((user) => {
const total = overview.total_books;
const read = user.books_read;
const reading = user.books_reading;
const unread = Math.max(0, total - read - reading);
const readPct = total > 0 ? (read / total) * 100 : 0;
const readingPct = total > 0 ? (reading / total) * 100 : 0;
return (
<div key={user.id} className="space-y-1">
<div className="flex items-center justify-between text-sm">
<span className="font-medium text-foreground truncate">{user.username}</span>
<span className="text-xs text-muted-foreground shrink-0 ml-2">
<span className="text-success font-medium">{read}</span>
{reading > 0 && <span className="text-amber-500 font-medium"> · {reading}</span>}
<span className="text-muted-foreground/60"> / {total}</span>
</span>
</div>
<div className="h-2 bg-muted rounded-full overflow-hidden flex">
<div className="h-full bg-success transition-all duration-500" style={{ width: `${readPct}%` }} />
<div className="h-full bg-amber-500 transition-all duration-500" style={{ width: `${readingPct}%` }} />
</div>
</div>
);
})}
</div>
)}
</CardContent>
</Card>
@@ -200,11 +275,10 @@ export default async function DashboardPage() {
<CardTitle className="text-base">{t("dashboard.byFormat")}</CardTitle>
</CardHeader>
<CardContent>
<DonutChart
locale={locale}
<RcDonutChart
noDataLabel={noDataLabel}
data={by_format.slice(0, 6).map((f, i) => ({
label: (f.format || t("dashboard.unknown")).toUpperCase(),
name: (f.format || t("dashboard.unknown")).toUpperCase(),
value: f.count,
color: formatColors[i % formatColors.length],
}))}
@@ -218,11 +292,10 @@ export default async function DashboardPage() {
<CardTitle className="text-base">{t("dashboard.byLibrary")}</CardTitle>
</CardHeader>
<CardContent>
<DonutChart
locale={locale}
<RcDonutChart
noDataLabel={noDataLabel}
data={by_library.slice(0, 6).map((l, i) => ({
label: l.library_name,
name: l.library_name,
value: l.book_count,
color: formatColors[i % formatColors.length],
}))}
@@ -239,12 +312,11 @@ export default async function DashboardPage() {
<CardTitle className="text-base">{t("dashboard.metadataCoverage")}</CardTitle>
</CardHeader>
<CardContent>
<DonutChart
locale={locale}
<RcDonutChart
noDataLabel={noDataLabel}
data={[
{ label: t("dashboard.seriesLinked"), value: metadata.series_linked, color: "hsl(142 60% 45%)" },
{ label: t("dashboard.seriesUnlinked"), value: metadata.series_unlinked, color: "hsl(220 13% 70%)" },
{ name: t("dashboard.seriesLinked"), value: metadata.series_linked, color: "hsl(142 60% 45%)" },
{ name: t("dashboard.seriesUnlinked"), value: metadata.series_unlinked, color: "hsl(220 13% 70%)" },
]}
/>
</CardContent>
@@ -256,11 +328,10 @@ export default async function DashboardPage() {
<CardTitle className="text-base">{t("dashboard.byProvider")}</CardTitle>
</CardHeader>
<CardContent>
<DonutChart
locale={locale}
<RcDonutChart
noDataLabel={noDataLabel}
data={metadata.by_provider.map((p, i) => ({
label: p.provider.replace(/_/g, " ").replace(/\b\w/g, (c) => c.toUpperCase()),
name: p.provider.replace(/_/g, " ").replace(/\b\w/g, (c) => c.toUpperCase()),
value: p.count,
color: formatColors[i % formatColors.length],
}))}
@@ -294,24 +365,32 @@ export default async function DashboardPage() {
</Card>
</div>
{/* Second row */}
{/* Libraries breakdown + Top series */}
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
{/* Monthly additions bar chart */}
{by_library.length > 0 && (
<Card hover={false}>
<CardHeader>
<CardTitle className="text-base">{t("dashboard.booksAdded")}</CardTitle>
<CardTitle className="text-base">{t("dashboard.libraries")}</CardTitle>
</CardHeader>
<CardContent>
<BarChart
noDataLabel={noDataLabel}
data={additions_over_time.map((m) => ({
label: m.month.slice(5), // "MM" from "YYYY-MM"
value: m.books_added,
<RcStackedBar
data={by_library.map((lib) => ({
name: lib.library_name,
read: lib.read_count,
reading: lib.reading_count,
unread: lib.unread_count,
sizeLabel: formatBytes(lib.size_bytes),
}))}
color="hsl(198 78% 37%)"
labels={{
read: t("status.read"),
reading: t("status.reading"),
unread: t("status.unread"),
books: t("dashboard.books"),
}}
/>
</CardContent>
</Card>
)}
{/* Top series */}
<Card hover={false}>
@@ -319,67 +398,59 @@ export default async function DashboardPage() {
<CardTitle className="text-base">{t("dashboard.popularSeries")}</CardTitle>
</CardHeader>
<CardContent>
<div className="space-y-3">
{top_series.slice(0, 8).map((s, i) => (
<HorizontalBar
key={i}
label={s.series}
value={s.book_count}
max={top_series[0]?.book_count || 1}
subLabel={t("dashboard.readCount", { read: s.read_count, total: s.book_count })}
<RcHorizontalBar
noDataLabel={t("dashboard.noSeries")}
data={top_series.slice(0, 8).map((s) => ({
name: s.series,
value: s.book_count,
subLabel: t("dashboard.readCount", { read: s.read_count, total: s.book_count }),
}))}
color="hsl(142 60% 45%)"
/>
))}
{top_series.length === 0 && (
<p className="text-muted-foreground text-sm text-center py-4">{t("dashboard.noSeries")}</p>
)}
</div>
</CardContent>
</Card>
</div>
{/* Libraries breakdown */}
{by_library.length > 0 && (
{/* Additions line chart full width */}
<Card hover={false}>
<CardHeader>
<CardTitle className="text-base">{t("dashboard.libraries")}</CardTitle>
<CardHeader className="flex flex-row items-center justify-between space-y-0">
<CardTitle className="text-base">{t("dashboard.booksAdded")}</CardTitle>
<PeriodToggle labels={{ day: t("dashboard.periodDay"), week: t("dashboard.periodWeek"), month: t("dashboard.periodMonth") }} />
</CardHeader>
<CardContent>
<div className="grid grid-cols-1 md:grid-cols-2 gap-x-8 gap-y-4">
{by_library.map((lib, i) => (
<div key={i} className="space-y-2">
<div className="flex justify-between items-baseline">
<span className="font-medium text-foreground text-sm">{lib.library_name}</span>
<span className="text-xs text-muted-foreground">{formatBytes(lib.size_bytes)}</span>
</div>
<div className="h-3 bg-muted rounded-full overflow-hidden flex">
<div
className="h-full transition-all duration-500"
style={{ width: `${(lib.read_count / Math.max(lib.book_count, 1)) * 100}%`, backgroundColor: "hsl(142 60% 45%)" }}
title={`${t("status.read")} : ${lib.read_count}`}
<RcAreaChart
noDataLabel={noDataLabel}
data={additions_over_time.map((m) => ({ label: formatChartLabel(m.month, period, locale), value: m.books_added }))}
color="hsl(198 78% 37%)"
/>
</CardContent>
</Card>
{/* Jobs over time multi-line chart */}
<Card hover={false}>
<CardHeader className="flex flex-row items-center justify-between space-y-0">
<CardTitle className="text-base">{t("dashboard.jobsOverTime")}</CardTitle>
<PeriodToggle labels={{ day: t("dashboard.periodDay"), week: t("dashboard.periodWeek"), month: t("dashboard.periodMonth") }} />
</CardHeader>
<CardContent>
<RcMultiLineChart
noDataLabel={noDataLabel}
data={jobs_over_time.map((j) => ({
label: formatChartLabel(j.label, period, locale),
scan: j.scan,
rebuild: j.rebuild,
thumbnail: j.thumbnail,
other: j.other,
}))}
lines={[
{ key: "scan", label: t("dashboard.jobScan"), color: "hsl(198 78% 37%)" },
{ key: "rebuild", label: t("dashboard.jobRebuild"), color: "hsl(142 60% 45%)" },
{ key: "thumbnail", label: t("dashboard.jobThumbnail"), color: "hsl(45 93% 47%)" },
{ key: "other", label: t("dashboard.jobOther"), color: "hsl(280 60% 50%)" },
]}
/>
<div
className="h-full transition-all duration-500"
style={{ width: `${(lib.reading_count / Math.max(lib.book_count, 1)) * 100}%`, backgroundColor: "hsl(45 93% 47%)" }}
title={`${t("status.reading")} : ${lib.reading_count}`}
/>
<div
className="h-full transition-all duration-500"
style={{ width: `${(lib.unread_count / Math.max(lib.book_count, 1)) * 100}%`, backgroundColor: "hsl(220 13% 70%)" }}
title={`${t("status.unread")} : ${lib.unread_count}`}
/>
</div>
<div className="flex gap-3 text-[11px] text-muted-foreground">
<span>{lib.book_count} {t("dashboard.books").toLowerCase()}</span>
<span className="text-success">{lib.read_count} {t("status.read").toLowerCase()}</span>
<span className="text-warning">{lib.reading_count} {t("status.reading").toLowerCase()}</span>
</div>
</div>
))}
</div>
</CardContent>
</Card>
)}
{/* Quick links */}
<QuickLinks t={t} />

View File

@@ -1,11 +1,12 @@
import { fetchAllSeries, fetchLibraries, fetchSeriesStatuses, LibraryDto, SeriesDto, SeriesPageDto, getBookCoverUrl } from "../../lib/api";
import { getServerTranslations } from "../../lib/i18n/server";
import { MarkSeriesReadButton } from "../components/MarkSeriesReadButton";
import { LiveSearchForm } from "../components/LiveSearchForm";
import { Card, CardContent, OffsetPagination } from "../components/ui";
import { fetchAllSeries, fetchLibraries, fetchSeriesStatuses, LibraryDto, SeriesDto, SeriesPageDto, getBookCoverUrl } from "@/lib/api";
import { getServerTranslations } from "@/lib/i18n/server";
import { MarkSeriesReadButton } from "@/app/components/MarkSeriesReadButton";
import { LiveSearchForm } from "@/app/components/LiveSearchForm";
import { Card, CardContent, OffsetPagination } from "@/app/components/ui";
import Image from "next/image";
import Link from "next/link";
import { ProviderIcon } from "../components/ProviderIcon";
import { ProviderIcon } from "@/app/components/ProviderIcon";
import { ExternalLinkBadge } from "@/app/components/ExternalLinkBadge";
export const dynamic = "force-dynamic";
@@ -99,13 +100,13 @@ export default async function SeriesPage({
<LiveSearchForm
basePath="/series"
fields={[
{ name: "q", type: "text", label: t("common.search"), placeholder: t("series.searchPlaceholder"), className: "flex-1 w-full" },
{ name: "library", type: "select", label: t("books.library"), options: libraryOptions, className: "w-full sm:w-44" },
{ name: "status", type: "select", label: t("series.reading"), options: statusOptions, className: "w-full sm:w-32" },
{ name: "series_status", type: "select", label: t("editSeries.status"), options: seriesStatusOptions, className: "w-full sm:w-36" },
{ name: "has_missing", type: "select", label: t("series.missing"), options: missingOptions, className: "w-full sm:w-36" },
{ name: "metadata_provider", type: "select", label: t("series.metadata"), options: metadataOptions, className: "w-full sm:w-36" },
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions, className: "w-full sm:w-32" },
{ name: "q", type: "text", label: t("common.search"), placeholder: t("series.searchPlaceholder") },
{ name: "library", type: "select", label: t("books.library"), options: libraryOptions },
{ name: "status", type: "select", label: t("series.reading"), options: statusOptions },
{ name: "series_status", type: "select", label: t("editSeries.status"), options: seriesStatusOptions },
{ name: "has_missing", type: "select", label: t("series.missing"), options: missingOptions },
{ name: "metadata_provider", type: "select", label: t("series.metadata"), options: metadataOptions },
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions },
]}
/>
</CardContent>
@@ -122,13 +123,9 @@ export default async function SeriesPage({
<>
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 xl:grid-cols-6 gap-4">
{series.map((s) => (
<Link
key={s.name}
href={`/libraries/${s.library_id}/series/${encodeURIComponent(s.name)}`}
className="group"
>
<div key={s.name} className="group relative">
<div
className={`bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md hover:-translate-y-1 transition-all duration-200 ${
className={`bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden group-hover:shadow-md group-hover:-translate-y-1 transition-all duration-200 ${
s.books_read_count >= s.book_count ? "opacity-50" : ""
}`}
>
@@ -138,7 +135,7 @@ export default async function SeriesPage({
alt={t("books.coverOf", { name: s.name })}
fill
className="object-cover"
unoptimized
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
/>
</div>
<div className="p-3">
@@ -149,13 +146,15 @@ export default async function SeriesPage({
<p className="text-xs text-muted-foreground">
{t("series.readCount", { read: String(s.books_read_count), total: String(s.book_count), plural: s.book_count !== 1 ? "s" : "" })}
</p>
<div className="relative z-20">
<MarkSeriesReadButton
seriesName={s.name}
bookCount={s.book_count}
booksReadCount={s.books_read_count}
/>
</div>
<div className="flex items-center gap-1 mt-1.5 flex-wrap">
</div>
<div className="relative z-20 flex items-center gap-1 mt-1.5 flex-wrap">
{s.series_status && (
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium ${
s.series_status === "ongoing" ? "bg-blue-500/15 text-blue-600" :
@@ -177,10 +176,24 @@ export default async function SeriesPage({
<ProviderIcon provider={s.metadata_provider} size={10} />
</span>
)}
{s.anilist_id && (
<ExternalLinkBadge
href={s.anilist_url ?? `https://anilist.co/manga/${s.anilist_id}`}
className="text-[10px] px-1.5 py-0.5 rounded-full font-medium bg-cyan-500/15 text-cyan-600 hover:bg-cyan-500/25"
>
AL
</ExternalLinkBadge>
)}
</div>
</div>
</div>
</Link>
{/* Link overlay covering the full card — below interactive elements */}
<Link
href={`/libraries/${s.library_id}/series/${encodeURIComponent(s.name)}`}
className="absolute inset-0 z-10 rounded-xl"
aria-label={s.name === "unclassified" ? t("books.unclassified") : s.name}
/>
</div>
))}
</div>

View File

@@ -0,0 +1,551 @@
"use client";
import { useState } from "react";
import { useRouter, useSearchParams } from "next/navigation";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, FormSelect, FormRow, Icon, toast, Toaster } from "@/app/components/ui";
import { Settings, CacheStats, ClearCacheResponse, ThumbnailStats, UserDto } from "@/lib/api";
import { useTranslation } from "@/lib/i18n/context";
import type { Locale } from "@/lib/i18n/types";
import { MetadataProvidersCard } from "./components/MetadataProvidersCard";
import { StatusMappingsCard } from "./components/StatusMappingsCard";
import { ProwlarrCard } from "./components/ProwlarrCard";
import { QBittorrentCard } from "./components/QBittorrentCard";
import { TelegramCard } from "./components/TelegramCard";
import { KomgaSyncCard } from "./components/KomgaSyncCard";
import { AnilistTab } from "./components/AnilistTab";
interface SettingsPageProps {
initialSettings: Settings;
initialCacheStats: CacheStats;
initialThumbnailStats: ThumbnailStats;
users: UserDto[];
initialTab?: string;
}
export default function SettingsPage({ initialSettings, initialCacheStats, initialThumbnailStats, users, initialTab }: SettingsPageProps) {
const { t, locale, setLocale } = useTranslation();
const router = useRouter();
const searchParams = useSearchParams();
const [settings, setSettings] = useState<Settings>({
...initialSettings,
thumbnail: initialSettings.thumbnail || { enabled: true, width: 300, height: 400, quality: 80, format: "webp", directory: "/data/thumbnails" }
});
const [cacheStats, setCacheStats] = useState<CacheStats>(initialCacheStats);
const [thumbnailStats, setThumbnailStats] = useState<ThumbnailStats>(initialThumbnailStats);
const [isClearing, setIsClearing] = useState(false);
const [clearResult, setClearResult] = useState<ClearCacheResponse | null>(null);
const [isSaving, setIsSaving] = useState(false);
const VALID_TABS = ["general", "downloadTools", "metadata", "readingStatus", "notifications"] as const;
type TabId = typeof VALID_TABS[number];
function resolveTab(tab: string | null | undefined): TabId {
if (tab && (VALID_TABS as readonly string[]).includes(tab)) return tab as TabId;
return "general";
}
const [activeTab, setActiveTab] = useState<TabId>(
resolveTab(searchParams.get("tab") ?? initialTab)
);
function handleTabChange(tab: TabId) {
setActiveTab(tab);
router.replace(`?tab=${tab}`, { scroll: false });
}
function hasEmptyValue(v: unknown): boolean {
if (v === null || v === "") return true;
if (typeof v === "object" && v !== null) {
return Object.values(v).some((val) => val !== undefined && hasEmptyValue(val));
}
return false;
}
async function handleUpdateSetting(key: string, value: unknown) {
if (hasEmptyValue(value)) return;
setIsSaving(true);
try {
const response = await fetch(`/api/settings/${key}`, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ value })
});
if (response.ok) {
toast(t("settings.savedSuccess"), "success");
} else {
toast(t("settings.savedError"), "error");
}
} catch {
toast(t("settings.saveError"), "error");
} finally {
setIsSaving(false);
}
}
async function handleClearCache() {
setIsClearing(true);
setClearResult(null);
try {
const response = await fetch("/api/settings/cache/clear", { method: "POST" });
const result = await response.json();
setClearResult(result);
// Refresh cache stats
const statsResponse = await fetch("/api/settings/cache/stats");
if (statsResponse.ok) {
const stats = await statsResponse.json();
setCacheStats(stats);
}
} catch {
setClearResult({ success: false, message: t("settings.cacheClearError") });
} finally {
setIsClearing(false);
}
}
const tabs = [
{ id: "general" as const, label: t("settings.general"), icon: "settings" as const },
{ id: "downloadTools" as const, label: t("settings.downloadTools"), icon: "play" as const },
{ id: "metadata" as const, label: t("settings.metadata"), icon: "tag" as const },
{ id: "readingStatus" as const, label: t("settings.readingStatus"), icon: "eye" as const },
{ id: "notifications" as const, label: t("settings.notifications"), icon: "bell" as const },
];
return (
<>
<div className="mb-6">
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
<Icon name="settings" size="xl" />
{t("settings.title")}
</h1>
</div>
{/* Tab Navigation */}
<div className="flex gap-1 mb-6 border-b border-border">
{tabs.map((tab) => (
<button
key={tab.id}
onClick={() => handleTabChange(tab.id)}
className={`flex items-center gap-2 px-4 py-2.5 text-sm font-medium border-b-2 transition-colors -mb-px ${
activeTab === tab.id
? "border-primary text-primary"
: "border-transparent text-muted-foreground hover:text-foreground hover:border-border"
}`}
>
<Icon name={tab.icon} size="sm" />
{tab.label}
</button>
))}
</div>
{activeTab === "general" && (<>
{/* Language Selector */}
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="settings" size="md" />
{t("settings.language")}
</CardTitle>
<CardDescription>{t("settings.languageDesc")}</CardDescription>
</CardHeader>
<CardContent>
<FormSelect
value={locale}
onChange={(e) => setLocale(e.target.value as Locale)}
>
<option value="fr">Français</option>
<option value="en">English</option>
</FormSelect>
</CardContent>
</Card>
{/* Image Processing Settings */}
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="image" size="md" />
{t("settings.imageProcessing")}
</CardTitle>
<CardDescription><span dangerouslySetInnerHTML={{ __html: t("settings.imageProcessingDesc") }} /></CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.defaultFormat")}</label>
<FormSelect
value={settings.image_processing.format}
onChange={(e) => {
const newSettings = { ...settings, image_processing: { ...settings.image_processing, format: e.target.value } };
setSettings(newSettings);
handleUpdateSetting("image_processing", newSettings.image_processing);
}}
>
<option value="webp">WebP</option>
<option value="jpeg">JPEG</option>
<option value="png">PNG</option>
</FormSelect>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.defaultQuality")}</label>
<FormInput
type="number"
min={1}
max={100}
value={settings.image_processing.quality}
onChange={(e) => {
const quality = parseInt(e.target.value) || 85;
const newSettings = { ...settings, image_processing: { ...settings.image_processing, quality } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("image_processing", settings.image_processing)}
/>
</FormField>
</FormRow>
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.defaultFilter")}</label>
<FormSelect
value={settings.image_processing.filter}
onChange={(e) => {
const newSettings = { ...settings, image_processing: { ...settings.image_processing, filter: e.target.value } };
setSettings(newSettings);
handleUpdateSetting("image_processing", newSettings.image_processing);
}}
>
<option value="lanczos3">{t("settings.filterLanczos")}</option>
<option value="triangle">{t("settings.filterTriangle")}</option>
<option value="nearest">{t("settings.filterNearest")}</option>
</FormSelect>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.maxWidth")}</label>
<FormInput
type="number"
min={100}
max={2160}
value={settings.image_processing.max_width}
onChange={(e) => {
const max_width = parseInt(e.target.value) || 2160;
const newSettings = { ...settings, image_processing: { ...settings.image_processing, max_width } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("image_processing", settings.image_processing)}
/>
</FormField>
</FormRow>
</div>
</CardContent>
</Card>
{/* Cache Settings */}
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="cache" size="md" />
{t("settings.cache")}
</CardTitle>
<CardDescription>{t("settings.cacheDesc")}</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<div className="grid grid-cols-3 gap-4 p-4 bg-muted/30 rounded-lg">
<div>
<p className="text-sm text-muted-foreground">{t("settings.cacheSize")}</p>
<p className="text-2xl font-semibold">{cacheStats.total_size_mb.toFixed(2)} MB</p>
</div>
<div>
<p className="text-sm text-muted-foreground">{t("settings.files")}</p>
<p className="text-2xl font-semibold">{cacheStats.file_count}</p>
</div>
<div>
<p className="text-sm text-muted-foreground">{t("settings.directory")}</p>
<p className="text-sm font-mono truncate" title={cacheStats.directory}>{cacheStats.directory}</p>
</div>
</div>
{clearResult && (
<div className={`p-3 rounded-lg ${clearResult.success ? 'bg-success/10 text-success' : 'bg-destructive/10 text-destructive'}`}>
{clearResult.message}
</div>
)}
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.cacheDirectory")}</label>
<FormInput
value={settings.cache.directory}
onChange={(e) => {
const newSettings = { ...settings, cache: { ...settings.cache, directory: e.target.value } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("cache", settings.cache)}
/>
</FormField>
<FormField className="w-32">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.maxSizeMb")}</label>
<FormInput
type="number"
value={settings.cache.max_size_mb}
onChange={(e) => {
const max_size_mb = parseInt(e.target.value) || 10000;
const newSettings = { ...settings, cache: { ...settings.cache, max_size_mb } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("cache", settings.cache)}
/>
</FormField>
</FormRow>
<Button
onClick={handleClearCache}
disabled={isClearing}
variant="destructive"
>
{isClearing ? (
<>
<Icon name="spinner" size="sm" className="animate-spin -ml-1 mr-2" />
{t("settings.clearing")}
</>
) : (
<>
<Icon name="trash" size="sm" className="mr-2" />
{t("settings.clearCache")}
</>
)}
</Button>
</div>
</CardContent>
</Card>
{/* Limits Settings */}
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="performance" size="md" />
{t("settings.performanceLimits")}
</CardTitle>
<CardDescription>{t("settings.performanceDesc")}</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.concurrentRenders")}</label>
<FormInput
type="number"
min={1}
max={20}
value={settings.limits.concurrent_renders}
onChange={(e) => {
const concurrent_renders = parseInt(e.target.value) || 4;
const newSettings = { ...settings, limits: { ...settings.limits, concurrent_renders } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("limits", settings.limits)}
/>
<p className="text-xs text-muted-foreground mt-1">
{t("settings.concurrentRendersHelp")}
</p>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.timeoutSeconds")}</label>
<FormInput
type="number"
min={5}
max={60}
value={settings.limits.timeout_seconds}
onChange={(e) => {
const timeout_seconds = parseInt(e.target.value) || 12;
const newSettings = { ...settings, limits: { ...settings.limits, timeout_seconds } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("limits", settings.limits)}
/>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.rateLimit")}</label>
<FormInput
type="number"
min={10}
max={1000}
value={settings.limits.rate_limit_per_second}
onChange={(e) => {
const rate_limit_per_second = parseInt(e.target.value) || 120;
const newSettings = { ...settings, limits: { ...settings.limits, rate_limit_per_second } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("limits", settings.limits)}
/>
</FormField>
</FormRow>
<p className="text-sm text-muted-foreground">
{t("settings.limitsNote")}
</p>
</div>
</CardContent>
</Card>
{/* Thumbnail Settings */}
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="image" size="md" />
{t("settings.thumbnails")}
</CardTitle>
<CardDescription>{t("settings.thumbnailsDesc")}</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.enableThumbnails")}</label>
<FormSelect
value={settings.thumbnail.enabled ? "true" : "false"}
onChange={(e) => {
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, enabled: e.target.value === "true" } };
setSettings(newSettings);
handleUpdateSetting("thumbnail", newSettings.thumbnail);
}}
>
<option value="true">{t("common.enabled")}</option>
<option value="false">{t("common.disabled")}</option>
</FormSelect>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.outputFormat")}</label>
<FormSelect
value={settings.thumbnail.format}
onChange={(e) => {
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, format: e.target.value } };
setSettings(newSettings);
handleUpdateSetting("thumbnail", newSettings.thumbnail);
}}
>
<option value="original">{t("settings.formatOriginal")}</option>
<option value="webp">WebP</option>
<option value="jpeg">JPEG</option>
<option value="png">PNG</option>
</FormSelect>
<p className="text-xs text-muted-foreground mt-1">
{settings.thumbnail.format === "original"
? t("settings.formatOriginalDesc")
: t("settings.formatReencodeDesc")}
</p>
</FormField>
</FormRow>
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.width")}</label>
<FormInput
type="number"
min={50}
max={600}
value={settings.thumbnail.width}
onChange={(e) => {
const width = parseInt(e.target.value) || 300;
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, width } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
/>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.height")}</label>
<FormInput
type="number"
min={50}
max={800}
value={settings.thumbnail.height}
onChange={(e) => {
const height = parseInt(e.target.value) || 400;
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, height } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
/>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.quality")}</label>
<FormInput
type="number"
min={1}
max={100}
value={settings.thumbnail.quality}
onChange={(e) => {
const quality = parseInt(e.target.value) || 80;
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, quality } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
/>
</FormField>
</FormRow>
<FormRow>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.thumbnailDirectory")}</label>
<FormInput
value={settings.thumbnail.directory}
onChange={(e) => {
const newSettings = { ...settings, thumbnail: { ...settings.thumbnail, directory: e.target.value } };
setSettings(newSettings);
}}
onBlur={() => handleUpdateSetting("thumbnail", settings.thumbnail)}
/>
</FormField>
</FormRow>
<div className="grid grid-cols-3 gap-4 p-4 bg-muted/30 rounded-lg">
<div>
<p className="text-sm text-muted-foreground">{t("settings.totalSize")}</p>
<p className="text-2xl font-semibold">{thumbnailStats.total_size_mb.toFixed(2)} MB</p>
</div>
<div>
<p className="text-sm text-muted-foreground">{t("settings.files")}</p>
<p className="text-2xl font-semibold">{thumbnailStats.file_count}</p>
</div>
<div>
<p className="text-sm text-muted-foreground">{t("settings.directory")}</p>
<p className="text-sm font-mono truncate" title={thumbnailStats.directory}>{thumbnailStats.directory}</p>
</div>
</div>
<p className="text-sm text-muted-foreground">
{t("settings.thumbnailsNote")}
</p>
</div>
</CardContent>
</Card>
</>)}
{activeTab === "metadata" && (<>
{/* Metadata Providers */}
<MetadataProvidersCard handleUpdateSetting={handleUpdateSetting} />
{/* Status Mappings */}
<StatusMappingsCard />
</>)}
{activeTab === "downloadTools" && (<>
{/* Prowlarr */}
<ProwlarrCard handleUpdateSetting={handleUpdateSetting} />
{/* qBittorrent */}
<QBittorrentCard handleUpdateSetting={handleUpdateSetting} />
</>)}
{activeTab === "notifications" && (<>
{/* Telegram Notifications */}
<TelegramCard handleUpdateSetting={handleUpdateSetting} />
</>)}
{activeTab === "readingStatus" && (<>
<AnilistTab handleUpdateSetting={handleUpdateSetting} users={users} />
<KomgaSyncCard users={users} />
</>)}
<Toaster />
</>
);
}

View File

@@ -0,0 +1,412 @@
"use client";
import { useState, useEffect } from "react";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, Icon } from "@/app/components/ui";
import { UserDto, AnilistStatusDto, AnilistSyncReportDto, AnilistPullReportDto, AnilistSyncPreviewItemDto, AnilistSyncItemDto, AnilistPullItemDto } from "@/lib/api";
import { useTranslation } from "@/lib/i18n/context";
export function AnilistTab({
handleUpdateSetting,
users,
}: {
handleUpdateSetting: (key: string, value: unknown) => Promise<void>;
users: UserDto[];
}) {
const { t } = useTranslation();
const [origin, setOrigin] = useState("");
useEffect(() => { setOrigin(window.location.origin); }, []);
const [clientId, setClientId] = useState("");
const [token, setToken] = useState("");
const [userId, setUserId] = useState("");
const [localUserId, setLocalUserId] = useState("");
const [isTesting, setIsTesting] = useState(false);
const [viewer, setViewer] = useState<AnilistStatusDto | null>(null);
const [testError, setTestError] = useState<string | null>(null);
const [isSyncing, setIsSyncing] = useState(false);
const [syncReport, setSyncReport] = useState<AnilistSyncReportDto | null>(null);
const [isPulling, setIsPulling] = useState(false);
const [pullReport, setPullReport] = useState<AnilistPullReportDto | null>(null);
const [actionError, setActionError] = useState<string | null>(null);
const [isPreviewing, setIsPreviewing] = useState(false);
const [previewItems, setPreviewItems] = useState<AnilistSyncPreviewItemDto[] | null>(null);
useEffect(() => {
fetch("/api/settings/anilist")
.then((r) => r.ok ? r.json() : null)
.then((data) => {
if (data) {
if (data.client_id) setClientId(String(data.client_id));
if (data.access_token) setToken(data.access_token);
if (data.user_id) setUserId(String(data.user_id));
if (data.local_user_id) setLocalUserId(String(data.local_user_id));
}
})
.catch(() => {});
}, []);
function buildAnilistSettings() {
return {
client_id: clientId || undefined,
access_token: token || undefined,
user_id: userId ? Number(userId) : undefined,
local_user_id: localUserId || undefined,
};
}
function handleConnect() {
if (!clientId) return;
// Save client_id first, then open OAuth URL
handleUpdateSetting("anilist", buildAnilistSettings()).then(() => {
window.location.href = `https://anilist.co/api/v2/oauth/authorize?client_id=${encodeURIComponent(clientId)}&response_type=token`;
});
}
async function handleSaveToken() {
await handleUpdateSetting("anilist", buildAnilistSettings());
}
async function handleTestConnection() {
setIsTesting(true);
setViewer(null);
setTestError(null);
try {
// Save token first so the API reads the current value
await handleUpdateSetting("anilist", buildAnilistSettings());
const resp = await fetch("/api/anilist/status");
const data = await resp.json();
if (!resp.ok) throw new Error(data.error || "Connection failed");
setViewer(data);
if (!userId && data.user_id) setUserId(String(data.user_id));
} catch (e) {
setTestError(e instanceof Error ? e.message : "Connection failed");
} finally {
setIsTesting(false);
}
}
async function handlePreview() {
setIsPreviewing(true);
setPreviewItems(null);
setActionError(null);
try {
const resp = await fetch("/api/anilist/sync/preview");
const data = await resp.json();
if (!resp.ok) throw new Error(data.error || "Preview failed");
setPreviewItems(data);
} catch (e) {
setActionError(e instanceof Error ? e.message : "Preview failed");
} finally {
setIsPreviewing(false);
}
}
async function handleSync() {
setIsSyncing(true);
setSyncReport(null);
setActionError(null);
try {
const resp = await fetch("/api/anilist/sync", { method: "POST" });
const data = await resp.json();
if (!resp.ok) throw new Error(data.error || "Sync failed");
setSyncReport(data);
} catch (e) {
setActionError(e instanceof Error ? e.message : "Sync failed");
} finally {
setIsSyncing(false);
}
}
async function handlePull() {
setIsPulling(true);
setPullReport(null);
setActionError(null);
try {
const resp = await fetch("/api/anilist/pull", { method: "POST" });
const data = await resp.json();
if (!resp.ok) throw new Error(data.error || "Pull failed");
setPullReport(data);
} catch (e) {
setActionError(e instanceof Error ? e.message : "Pull failed");
} finally {
setIsPulling(false);
}
}
return (
<>
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="link" size="md" />
{t("settings.anilistTitle")}
</CardTitle>
<CardDescription>{t("settings.anilistDesc")}</CardDescription>
</CardHeader>
<CardContent className="space-y-4">
<p className="text-sm text-muted-foreground">{t("settings.anilistConnectDesc")}</p>
{/* Redirect URL info */}
<div className="rounded-md bg-muted/50 border px-3 py-2 text-xs text-muted-foreground space-y-1">
<p className="font-medium text-foreground">{t("settings.anilistRedirectUrlLabel")}</p>
<code className="select-all font-mono">{origin ? `${origin}/anilist/callback` : "/anilist/callback"}</code>
<p>{t("settings.anilistRedirectUrlHint")}</p>
</div>
<div className="flex gap-4">
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.anilistClientId")}</label>
<FormInput
type="text"
autoComplete="off"
value={clientId}
onChange={(e) => setClientId(e.target.value)}
placeholder={t("settings.anilistClientIdPlaceholder")}
/>
</FormField>
</div>
<div className="flex items-center gap-3 flex-wrap">
<Button onClick={handleConnect} disabled={!clientId}>
<Icon name="link" size="sm" className="mr-2" />
{t("settings.anilistConnectButton")}
</Button>
<Button onClick={handleTestConnection} disabled={isTesting || !token} variant="secondary">
{isTesting ? (
<><Icon name="spinner" size="sm" className="animate-spin mr-2" />{t("settings.testing")}</>
) : (
<><Icon name="refresh" size="sm" className="mr-2" />{t("settings.anilistTestConnection")}</>
)}
</Button>
{viewer && (
<span className="text-sm text-success font-medium">
{t("settings.anilistConnected")} <strong>{viewer.username}</strong>
{" · "}
<a href={viewer.site_url} target="_blank" rel="noopener noreferrer" className="underline">AniList</a>
</span>
)}
{token && !viewer && (
<span className="text-sm text-muted-foreground">{t("settings.anilistTokenPresent")}</span>
)}
{testError && <span className="text-sm text-destructive">{testError}</span>}
</div>
<details className="group">
<summary className="text-sm text-muted-foreground cursor-pointer hover:text-foreground select-none">
{t("settings.anilistManualToken")}
</summary>
<div className="mt-3 space-y-3">
<div className="flex gap-4">
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.anilistToken")}</label>
<FormInput
type="password"
autoComplete="off"
value={token}
onChange={(e) => setToken(e.target.value)}
placeholder={t("settings.anilistTokenPlaceholder")}
/>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.anilistUserId")}</label>
<FormInput
type="text"
autoComplete="off"
value={userId}
onChange={(e) => setUserId(e.target.value)}
placeholder={t("settings.anilistUserIdPlaceholder")}
/>
</FormField>
</div>
<Button onClick={handleSaveToken} disabled={!token}>
{t("common.save")}
</Button>
</div>
</details>
<div className="border-t border-border/50 pt-4 mt-2">
<p className="text-sm font-medium text-foreground mb-1">{t("settings.anilistLocalUserTitle")}</p>
<p className="text-xs text-muted-foreground mb-3">{t("settings.anilistLocalUserDesc")}</p>
<div className="flex items-center gap-3">
<select
value={localUserId}
onChange={(e) => {
const newLocalUserId = e.target.value;
setLocalUserId(newLocalUserId);
handleUpdateSetting("anilist", {
...buildAnilistSettings(),
local_user_id: newLocalUserId || undefined,
});
}}
autoComplete="off"
className="flex-1 text-sm border border-border rounded-lg px-3 py-2.5 bg-background focus:outline-none focus:ring-2 focus:ring-ring h-10"
>
<option value="">{t("settings.anilistLocalUserNone")}</option>
{users.map((u) => (
<option key={u.id} value={u.id}>{u.username}</option>
))}
</select>
</div>
</div>
</CardContent>
</Card>
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="refresh" size="md" />
{t("settings.anilistSyncTitle")}
</CardTitle>
</CardHeader>
<CardContent className="space-y-4">
<div className="grid grid-cols-1 sm:grid-cols-2 gap-6">
<div className="space-y-2">
<p className="text-sm text-muted-foreground">{t("settings.anilistSyncDesc")}</p>
<div className="flex items-center gap-2 flex-wrap">
<Button onClick={handlePreview} disabled={isPreviewing} variant="secondary">
{isPreviewing ? (
<><Icon name="spinner" size="sm" className="animate-spin mr-2" />{t("settings.anilistPreviewing")}</>
) : (
<><Icon name="eye" size="sm" className="mr-2" />{t("settings.anilistPreviewButton")}</>
)}
</Button>
<Button onClick={handleSync} disabled={isSyncing}>
{isSyncing ? (
<><Icon name="spinner" size="sm" className="animate-spin mr-2" />{t("settings.anilistSyncing")}</>
) : (
<><Icon name="refresh" size="sm" className="mr-2" />{t("settings.anilistSyncButton")}</>
)}
</Button>
</div>
{syncReport && (
<div className="mt-2 border rounded-lg overflow-hidden">
<div className="px-4 py-2 bg-muted/50 flex items-center gap-3">
<span className="text-sm text-success font-medium">{t("settings.anilistSynced", { count: String(syncReport.synced) })}</span>
{syncReport.skipped > 0 && <span className="text-sm text-muted-foreground">{t("settings.anilistSkipped", { count: String(syncReport.skipped) })}</span>}
{syncReport.errors.length > 0 && <span className="text-sm text-destructive">{t("settings.anilistErrors", { count: String(syncReport.errors.length) })}</span>}
</div>
{syncReport.items.length > 0 && (
<div className="divide-y max-h-60 overflow-y-auto">
{syncReport.items.map((item: AnilistSyncItemDto) => (
<div key={item.series_name} className="flex items-center justify-between px-4 py-2 text-sm">
<a
href={item.anilist_url ?? `https://anilist.co/manga/`}
target="_blank"
rel="noopener noreferrer"
className="truncate font-medium hover:underline min-w-0 mr-3"
>
{item.anilist_title ?? item.series_name}
</a>
<div className="flex items-center gap-2 shrink-0">
<span className={`text-xs px-1.5 py-0.5 rounded-full font-medium ${
item.status === "COMPLETED" ? "bg-green-500/15 text-green-600" :
item.status === "CURRENT" ? "bg-blue-500/15 text-blue-600" :
"bg-muted text-muted-foreground"
}`}>{item.status}</span>
{item.progress_volumes > 0 && (
<span className="text-xs text-muted-foreground">{item.progress_volumes} vol.</span>
)}
</div>
</div>
))}
</div>
)}
{syncReport.errors.map((err: string, i: number) => (
<p key={i} className="text-xs text-destructive px-4 py-1 border-t">{err}</p>
))}
</div>
)}
</div>
<div className="space-y-2">
<p className="text-sm text-muted-foreground">{t("settings.anilistPullDesc")}</p>
<Button onClick={handlePull} disabled={isPulling}>
{isPulling ? (
<><Icon name="spinner" size="sm" className="animate-spin mr-2" />{t("settings.anilistPulling")}</>
) : (
<><Icon name="refresh" size="sm" className="mr-2" />{t("settings.anilistPullButton")}</>
)}
</Button>
{pullReport && (
<div className="mt-2 border rounded-lg overflow-hidden">
<div className="px-4 py-2 bg-muted/50 flex items-center gap-3">
<span className="text-sm text-success font-medium">{t("settings.anilistUpdated", { count: String(pullReport.updated) })}</span>
{pullReport.skipped > 0 && <span className="text-sm text-muted-foreground">{t("settings.anilistSkipped", { count: String(pullReport.skipped) })}</span>}
{pullReport.errors.length > 0 && <span className="text-sm text-destructive">{t("settings.anilistErrors", { count: String(pullReport.errors.length) })}</span>}
</div>
{pullReport.items.length > 0 && (
<div className="divide-y max-h-60 overflow-y-auto">
{pullReport.items.map((item: AnilistPullItemDto) => (
<div key={item.series_name} className="flex items-center justify-between px-4 py-2 text-sm">
<a
href={item.anilist_url ?? `https://anilist.co/manga/`}
target="_blank"
rel="noopener noreferrer"
className="truncate font-medium hover:underline min-w-0 mr-3"
>
{item.anilist_title ?? item.series_name}
</a>
<div className="flex items-center gap-2 shrink-0">
<span className={`text-xs px-1.5 py-0.5 rounded-full font-medium ${
item.anilist_status === "COMPLETED" ? "bg-green-500/15 text-green-600" :
item.anilist_status === "CURRENT" ? "bg-blue-500/15 text-blue-600" :
item.anilist_status === "PLANNING" ? "bg-amber-500/15 text-amber-600" :
"bg-muted text-muted-foreground"
}`}>{item.anilist_status}</span>
<span className="text-xs text-muted-foreground">{item.books_updated} {t("dashboard.books").toLowerCase()}</span>
</div>
</div>
))}
</div>
)}
{pullReport.errors.map((err: string, i: number) => (
<p key={i} className="text-xs text-destructive px-4 py-1 border-t">{err}</p>
))}
</div>
)}
</div>
</div>
{actionError && <p className="text-sm text-destructive">{actionError}</p>}
{previewItems !== null && (
<div className="mt-2 border rounded-lg overflow-hidden">
<div className="px-4 py-2 bg-muted/50 flex items-center justify-between">
<span className="text-sm font-medium">{t("settings.anilistPreviewTitle", { count: String(previewItems.length) })}</span>
<button onClick={() => setPreviewItems(null)} className="text-xs text-muted-foreground hover:text-foreground"></button>
</div>
{previewItems.length === 0 ? (
<p className="text-sm text-muted-foreground px-4 py-3">{t("settings.anilistPreviewEmpty")}</p>
) : (
<div className="divide-y">
{previewItems.map((item) => (
<div key={`${item.anilist_id}-${item.series_name}`} className="flex items-center justify-between px-4 py-2 text-sm">
<div className="flex items-center gap-2 min-w-0">
<a
href={item.anilist_url ?? `https://anilist.co/manga/${item.anilist_id}`}
target="_blank"
rel="noopener noreferrer"
className="truncate font-medium hover:underline"
>
{item.anilist_title ?? item.series_name}
</a>
{item.anilist_title && item.anilist_title !== item.series_name && (
<span className="text-muted-foreground truncate hidden sm:inline"> {item.series_name}</span>
)}
</div>
<div className="flex items-center gap-3 shrink-0 ml-3">
<span className="text-xs text-muted-foreground">{item.books_read}/{item.book_count}</span>
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium ${
item.status === "COMPLETED" ? "bg-success/15 text-success" :
item.status === "CURRENT" ? "bg-blue-500/15 text-blue-600" :
"bg-muted text-muted-foreground"
}`}>
{item.status}
</span>
</div>
</div>
))}
</div>
)}
</div>
)}
</CardContent>
</Card>
</>
);
}

View File

@@ -0,0 +1,281 @@
"use client";
import { useState, useEffect, useCallback, useMemo } from "react";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, FormSelect, Icon } from "@/app/components/ui";
import { KomgaSyncResponse, KomgaSyncReportSummary, UserDto } from "@/lib/api";
import { useTranslation } from "@/lib/i18n/context";
export function KomgaSyncCard({ users }: { users: UserDto[] }) {
const { t, locale } = useTranslation();
const [komgaUrl, setKomgaUrl] = useState("");
const [komgaUsername, setKomgaUsername] = useState("");
const [komgaPassword, setKomgaPassword] = useState("");
const [komgaUserId, setKomgaUserId] = useState(users[0]?.id ?? "");
const [isSyncing, setIsSyncing] = useState(false);
const [syncResult, setSyncResult] = useState<KomgaSyncResponse | null>(null);
const [syncError, setSyncError] = useState<string | null>(null);
const [showUnmatched, setShowUnmatched] = useState(false);
const [showMatchedBooks, setShowMatchedBooks] = useState(false);
const [reports, setReports] = useState<KomgaSyncReportSummary[]>([]);
const [selectedReport, setSelectedReport] = useState<KomgaSyncResponse | null>(null);
const [showReportUnmatched, setShowReportUnmatched] = useState(false);
const [showReportMatchedBooks, setShowReportMatchedBooks] = useState(false);
const syncNewlyMarkedSet = useMemo(
() => new Set(syncResult?.newly_marked_books ?? []),
[syncResult?.newly_marked_books],
);
const reportNewlyMarkedSet = useMemo(
() => new Set(selectedReport?.newly_marked_books ?? []),
[selectedReport?.newly_marked_books],
);
const fetchReports = useCallback(async () => {
try {
const resp = await fetch("/api/komga/reports");
if (resp.ok) setReports(await resp.json());
} catch { /* ignore */ }
}, []);
useEffect(() => {
fetchReports();
fetch("/api/settings/komga").then(r => r.ok ? r.json() : null).then(data => {
if (data) {
if (data.url) setKomgaUrl(data.url);
if (data.username) setKomgaUsername(data.username);
if (data.user_id) setKomgaUserId(data.user_id);
}
}).catch(() => {});
}, [fetchReports]);
async function handleViewReport(id: string) {
setSelectedReport(null);
setShowReportUnmatched(false);
setShowReportMatchedBooks(false);
try {
const resp = await fetch(`/api/komga/reports/${id}`);
if (resp.ok) setSelectedReport(await resp.json());
} catch { /* ignore */ }
}
async function handleKomgaSync() {
setIsSyncing(true);
setSyncResult(null);
setSyncError(null);
setShowUnmatched(false);
setShowMatchedBooks(false);
try {
const response = await fetch("/api/komga/sync", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ url: komgaUrl, username: komgaUsername, password: komgaPassword, user_id: komgaUserId }),
});
const data = await response.json();
if (!response.ok) {
setSyncError(data.error || "Sync failed");
} else {
setSyncResult(data);
fetchReports();
fetch("/api/settings/komga", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ value: { url: komgaUrl, username: komgaUsername, user_id: komgaUserId } }),
}).catch(() => {});
}
} catch {
setSyncError("Failed to connect to sync endpoint");
} finally {
setIsSyncing(false);
}
}
return (
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="refresh" size="md" />
{t("settings.komgaSync")}
</CardTitle>
<CardDescription>{t("settings.komgaDesc")}</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<div className="flex gap-4">
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.komgaUrl")}</label>
<FormInput type="url" placeholder="https://komga.example.com" value={komgaUrl} onChange={(e) => setKomgaUrl(e.target.value)} />
</FormField>
</div>
<div className="flex gap-4">
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.username")}</label>
<FormInput value={komgaUsername} onChange={(e) => setKomgaUsername(e.target.value)} />
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.password")}</label>
<FormInput type="password" autoComplete="off" value={komgaPassword} onChange={(e) => setKomgaPassword(e.target.value)} />
</FormField>
</div>
{users.length > 0 && (
<div className="flex gap-4">
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("users.title")}</label>
<FormSelect value={komgaUserId} onChange={(e) => setKomgaUserId(e.target.value)}>
{users.map((u) => (
<option key={u.id} value={u.id}>{u.username}</option>
))}
</FormSelect>
</FormField>
</div>
)}
<Button onClick={handleKomgaSync} disabled={isSyncing || !komgaUrl || !komgaUsername || !komgaPassword || !komgaUserId}>
{isSyncing ? (
<><Icon name="spinner" size="sm" className="animate-spin -ml-1 mr-2" />{t("settings.syncing")}</>
) : (
<><Icon name="refresh" size="sm" className="mr-2" />{t("settings.syncReadBooks")}</>
)}
</Button>
{syncError && <div className="p-3 rounded-lg bg-destructive/10 text-destructive">{syncError}</div>}
{syncResult && (
<div className="space-y-3">
<div className="grid grid-cols-2 sm:grid-cols-4 gap-4 p-4 bg-muted/30 rounded-lg">
<div>
<p className="text-sm text-muted-foreground">{t("settings.komgaRead")}</p>
<p className="text-2xl font-semibold">{syncResult.total_komga_read}</p>
</div>
<div>
<p className="text-sm text-muted-foreground">{t("settings.matched")}</p>
<p className="text-2xl font-semibold">{syncResult.matched}</p>
</div>
<div>
<p className="text-sm text-muted-foreground">{t("settings.alreadyRead")}</p>
<p className="text-2xl font-semibold">{syncResult.already_read}</p>
</div>
<div>
<p className="text-sm text-muted-foreground">{t("settings.newlyMarked")}</p>
<p className="text-2xl font-semibold text-success">{syncResult.newly_marked}</p>
</div>
</div>
{syncResult.matched_books.length > 0 && (
<div>
<button type="button" onClick={() => setShowMatchedBooks(!showMatchedBooks)} className="text-sm text-muted-foreground hover:text-foreground flex items-center gap-1">
<Icon name={showMatchedBooks ? "chevronDown" : "chevronRight"} size="sm" />
{t("settings.matchedBooks", { count: syncResult.matched_books.length, plural: syncResult.matched_books.length !== 1 ? "s" : "" })}
</button>
{showMatchedBooks && (
<div className="mt-2 max-h-60 overflow-y-auto p-3 bg-success/5 rounded-lg text-sm space-y-1">
{syncResult.matched_books.map((title, i) => (
<p key={i} className="text-foreground truncate flex items-center gap-1.5" title={title}>
{syncNewlyMarkedSet.has(title) && <Icon name="check" size="sm" className="text-success shrink-0" />}
{title}
</p>
))}
</div>
)}
</div>
)}
{syncResult.unmatched.length > 0 && (
<div>
<button type="button" onClick={() => setShowUnmatched(!showUnmatched)} className="text-sm text-muted-foreground hover:text-foreground flex items-center gap-1">
<Icon name={showUnmatched ? "chevronDown" : "chevronRight"} size="sm" />
{t("settings.unmatchedBooks", { count: syncResult.unmatched.length, plural: syncResult.unmatched.length !== 1 ? "s" : "" })}
</button>
{showUnmatched && (
<div className="mt-2 max-h-60 overflow-y-auto p-3 bg-muted/20 rounded-lg text-sm space-y-1">
{syncResult.unmatched.map((title, i) => (
<p key={i} className="text-muted-foreground truncate" title={title}>{title}</p>
))}
</div>
)}
</div>
)}
</div>
)}
{reports.length > 0 && (
<div className="border-t border-border pt-4">
<h3 className="text-sm font-medium text-foreground mb-3">{t("settings.syncHistory")}</h3>
<div className="space-y-2">
{reports.map((r) => (
<button
key={r.id}
type="button"
onClick={() => handleViewReport(r.id)}
className={`w-full text-left p-3 rounded-lg border transition-colors ${
selectedReport?.id === r.id ? "border-primary bg-primary/5" : "border-border/60 bg-muted/20 hover:bg-muted/40"
}`}
>
<div className="flex items-center justify-between">
<span className="text-sm font-medium text-foreground">{new Date(r.created_at).toLocaleString(locale)}</span>
<span className="text-xs text-muted-foreground truncate ml-2" title={r.komga_url}>{r.komga_url}</span>
</div>
<div className="flex gap-4 mt-1 text-xs text-muted-foreground">
<span>{r.total_komga_read} {t("settings.read")}</span>
<span>{r.matched} {t("settings.matched").toLowerCase()}</span>
<span className="text-success">{r.newly_marked} {t("settings.new")}</span>
{r.unmatched_count > 0 && <span className="text-warning">{r.unmatched_count} {t("settings.unmatched")}</span>}
</div>
</button>
))}
</div>
{selectedReport && (
<div className="mt-3 space-y-3">
<div className="grid grid-cols-2 sm:grid-cols-4 gap-4 p-4 bg-muted/30 rounded-lg">
<div>
<p className="text-sm text-muted-foreground">{t("settings.komgaRead")}</p>
<p className="text-2xl font-semibold">{selectedReport.total_komga_read}</p>
</div>
<div>
<p className="text-sm text-muted-foreground">{t("settings.matched")}</p>
<p className="text-2xl font-semibold">{selectedReport.matched}</p>
</div>
<div>
<p className="text-sm text-muted-foreground">{t("settings.alreadyRead")}</p>
<p className="text-2xl font-semibold">{selectedReport.already_read}</p>
</div>
<div>
<p className="text-sm text-muted-foreground">{t("settings.newlyMarked")}</p>
<p className="text-2xl font-semibold text-success">{selectedReport.newly_marked}</p>
</div>
</div>
{selectedReport.matched_books && selectedReport.matched_books.length > 0 && (
<div>
<button type="button" onClick={() => setShowReportMatchedBooks(!showReportMatchedBooks)} className="text-sm text-muted-foreground hover:text-foreground flex items-center gap-1">
<Icon name={showReportMatchedBooks ? "chevronDown" : "chevronRight"} size="sm" />
{t("settings.matchedBooks", { count: selectedReport.matched_books.length, plural: selectedReport.matched_books.length !== 1 ? "s" : "" })}
</button>
{showReportMatchedBooks && (
<div className="mt-2 max-h-60 overflow-y-auto p-3 bg-success/5 rounded-lg text-sm space-y-1">
{selectedReport.matched_books.map((title, i) => (
<p key={i} className="text-foreground truncate flex items-center gap-1.5" title={title}>
{reportNewlyMarkedSet.has(title) && <Icon name="check" size="sm" className="text-success shrink-0" />}
{title}
</p>
))}
</div>
)}
</div>
)}
{selectedReport.unmatched.length > 0 && (
<div>
<button type="button" onClick={() => setShowReportUnmatched(!showReportUnmatched)} className="text-sm text-muted-foreground hover:text-foreground flex items-center gap-1">
<Icon name={showReportUnmatched ? "chevronDown" : "chevronRight"} size="sm" />
{t("settings.unmatchedBooks", { count: selectedReport.unmatched.length, plural: selectedReport.unmatched.length !== 1 ? "s" : "" })}
</button>
{showReportUnmatched && (
<div className="mt-2 max-h-60 overflow-y-auto p-3 bg-muted/20 rounded-lg text-sm space-y-1">
{selectedReport.unmatched.map((title, i) => (
<p key={i} className="text-muted-foreground truncate" title={title}>{title}</p>
))}
</div>
)}
</div>
)}
</div>
)}
</div>
)}
</div>
</CardContent>
</Card>
);
}

View File

@@ -0,0 +1,170 @@
"use client";
import { useState, useEffect } from "react";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, FormField, FormInput, FormSelect, Icon } from "@/app/components/ui";
import { ProviderIcon } from "@/app/components/ProviderIcon";
import { useTranslation } from "@/lib/i18n/context";
export const METADATA_LANGUAGES = [
{ value: "en", label: "English" },
{ value: "fr", label: "Français" },
{ value: "es", label: "Español" },
] as const;
export function MetadataProvidersCard({ handleUpdateSetting }: { handleUpdateSetting: (key: string, value: unknown) => Promise<void> }) {
const { t } = useTranslation();
const [defaultProvider, setDefaultProvider] = useState("google_books");
const [metadataLanguage, setMetadataLanguage] = useState("en");
const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
useEffect(() => {
fetch("/api/settings/metadata_providers")
.then((r) => (r.ok ? r.json() : null))
.then((data) => {
if (data) {
if (data.default_provider) setDefaultProvider(data.default_provider);
if (data.metadata_language) setMetadataLanguage(data.metadata_language);
if (data.comicvine?.api_key) setApiKeys((prev) => ({ ...prev, comicvine: data.comicvine.api_key }));
if (data.google_books?.api_key) setApiKeys((prev) => ({ ...prev, google_books: data.google_books.api_key }));
}
})
.catch(() => {});
}, []);
function save(provider: string, lang: string, keys: Record<string, string>) {
const value: Record<string, unknown> = {
default_provider: provider,
metadata_language: lang,
};
for (const [k, v] of Object.entries(keys)) {
if (v) value[k] = { api_key: v };
}
handleUpdateSetting("metadata_providers", value);
}
return (
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="search" size="md" />
{t("settings.metadataProviders")}
</CardTitle>
<CardDescription>{t("settings.metadataProvidersDesc")}</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-6">
{/* Default provider */}
<div>
<label className="text-sm font-medium text-muted-foreground mb-2 block">{t("settings.defaultProvider")}</label>
<div className="flex gap-2 flex-wrap">
{([
{ value: "google_books", label: "Google Books" },
{ value: "open_library", label: "Open Library" },
{ value: "comicvine", label: "ComicVine" },
{ value: "anilist", label: "AniList" },
{ value: "bedetheque", label: "Bédéthèque" },
] as const).map((p) => (
<button
key={p.value}
type="button"
onClick={() => {
setDefaultProvider(p.value);
save(p.value, metadataLanguage, apiKeys);
}}
className={`inline-flex items-center gap-2 px-3 py-2 rounded-lg text-sm font-medium border transition-colors ${
defaultProvider === p.value
? "border-primary bg-primary/10 text-primary"
: "border-border bg-card text-muted-foreground hover:text-foreground hover:border-primary/50"
}`}
>
<ProviderIcon provider={p.value} size={18} />
{p.label}
</button>
))}
</div>
<p className="text-xs text-muted-foreground mt-2">{t("settings.defaultProviderHelp")}</p>
</div>
{/* Metadata language */}
<div>
<label className="text-sm font-medium text-muted-foreground mb-2 block">{t("settings.metadataLanguage")}</label>
<div className="flex gap-2">
{METADATA_LANGUAGES.map((l) => (
<button
key={l.value}
type="button"
onClick={() => {
setMetadataLanguage(l.value);
save(defaultProvider, l.value, apiKeys);
}}
className={`px-3 py-2 rounded-lg text-sm font-medium border transition-colors ${
metadataLanguage === l.value
? "border-primary bg-primary/10 text-primary"
: "border-border bg-card text-muted-foreground hover:text-foreground hover:border-primary/50"
}`}
>
{l.label}
</button>
))}
</div>
<p className="text-xs text-muted-foreground mt-2">{t("settings.metadataLanguageHelp")}</p>
</div>
{/* Provider API keys — always visible */}
<div className="border-t border-border/50 pt-4">
<h4 className="text-sm font-medium text-foreground mb-3">{t("settings.apiKeys")}</h4>
<div className="space-y-4">
<FormField>
<label className="text-sm font-medium text-muted-foreground mb-1 flex items-center gap-1.5">
<ProviderIcon provider="google_books" size={16} />
{t("settings.googleBooksKey")}
</label>
<FormInput
type="password" autoComplete="off"
placeholder={t("settings.googleBooksPlaceholder")}
value={apiKeys.google_books || ""}
onChange={(e) => setApiKeys({ ...apiKeys, google_books: e.target.value })}
onBlur={() => save(defaultProvider, metadataLanguage, apiKeys)}
/>
<p className="text-xs text-muted-foreground mt-1">{t("settings.googleBooksHelp")}</p>
</FormField>
<FormField>
<label className="text-sm font-medium text-muted-foreground mb-1 flex items-center gap-1.5">
<ProviderIcon provider="comicvine" size={16} />
{t("settings.comicvineKey")}
</label>
<FormInput
type="password" autoComplete="off"
placeholder={t("settings.comicvinePlaceholder")}
value={apiKeys.comicvine || ""}
onChange={(e) => setApiKeys({ ...apiKeys, comicvine: e.target.value })}
onBlur={() => save(defaultProvider, metadataLanguage, apiKeys)}
/>
<p className="text-xs text-muted-foreground mt-1">{t("settings.comicvineHelp")} <span className="font-mono text-foreground/70">comicvine.gamespot.com/api</span>.</p>
</FormField>
<div className="p-3 rounded-lg bg-muted/30 flex items-center gap-3 flex-wrap">
<div className="flex items-center gap-1.5">
<ProviderIcon provider="open_library" size={16} />
<span className="text-xs font-medium text-foreground">Open Library</span>
</div>
<span className="text-xs text-muted-foreground">,</span>
<div className="flex items-center gap-1.5">
<ProviderIcon provider="anilist" size={16} />
<span className="text-xs font-medium text-foreground">AniList</span>
</div>
<span className="text-xs text-muted-foreground">{t("common.and")}</span>
<div className="flex items-center gap-1.5">
<ProviderIcon provider="bedetheque" size={16} />
<span className="text-xs font-medium text-foreground">Bédéthèque</span>
</div>
<span className="text-xs text-muted-foreground">{t("settings.freeProviders")}</span>
</div>
</div>
</div>
</div>
</CardContent>
</Card>
);
}

View File

@@ -0,0 +1,134 @@
"use client";
import { useState, useEffect } from "react";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, Icon } from "@/app/components/ui";
import { useTranslation } from "@/lib/i18n/context";
export function ProwlarrCard({ handleUpdateSetting }: { handleUpdateSetting: (key: string, value: unknown) => Promise<void> }) {
const { t } = useTranslation();
const [prowlarrUrl, setProwlarrUrl] = useState("");
const [prowlarrApiKey, setProwlarrApiKey] = useState("");
const [prowlarrCategories, setProwlarrCategories] = useState("7030, 7020");
const [isTesting, setIsTesting] = useState(false);
const [testResult, setTestResult] = useState<{ success: boolean; message: string } | null>(null);
useEffect(() => {
fetch("/api/settings/prowlarr")
.then((r) => (r.ok ? r.json() : null))
.then((data) => {
if (data) {
if (data.url) setProwlarrUrl(data.url);
if (data.api_key) setProwlarrApiKey(data.api_key);
if (data.categories) setProwlarrCategories(data.categories.join(", "));
}
})
.catch(() => {});
}, []);
function saveProwlarr(url?: string, apiKey?: string, cats?: string) {
const categories = (cats ?? prowlarrCategories)
.split(",")
.map((s) => parseInt(s.trim()))
.filter((n) => !isNaN(n));
handleUpdateSetting("prowlarr", {
url: url ?? prowlarrUrl,
api_key: apiKey ?? prowlarrApiKey,
categories,
});
}
async function handleTestConnection() {
setIsTesting(true);
setTestResult(null);
try {
const resp = await fetch("/api/prowlarr/test");
const data = await resp.json();
if (data.error) {
setTestResult({ success: false, message: data.error });
} else {
setTestResult(data);
}
} catch {
setTestResult({ success: false, message: "Failed to connect" });
} finally {
setIsTesting(false);
}
}
return (
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="search" size="md" />
{t("settings.prowlarr")}
</CardTitle>
<CardDescription>{t("settings.prowlarrDesc")}</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<div className="flex gap-4">
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.prowlarrUrl")}</label>
<FormInput
type="url"
placeholder={t("settings.prowlarrUrlPlaceholder")}
value={prowlarrUrl}
onChange={(e) => setProwlarrUrl(e.target.value)}
onBlur={() => saveProwlarr()}
/>
</FormField>
</div>
<div className="flex gap-4">
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.prowlarrApiKey")}</label>
<FormInput
type="password" autoComplete="off"
placeholder={t("settings.prowlarrApiKeyPlaceholder")}
value={prowlarrApiKey}
onChange={(e) => setProwlarrApiKey(e.target.value)}
onBlur={() => saveProwlarr()}
/>
</FormField>
</div>
<div className="flex gap-4">
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.prowlarrCategories")}</label>
<FormInput
type="text"
placeholder="7030, 7020"
value={prowlarrCategories}
onChange={(e) => setProwlarrCategories(e.target.value)}
onBlur={() => saveProwlarr()}
/>
<p className="text-xs text-muted-foreground mt-1">{t("settings.prowlarrCategoriesHelp")}</p>
</FormField>
</div>
<div className="flex items-center gap-3">
<Button
onClick={handleTestConnection}
disabled={isTesting || !prowlarrUrl || !prowlarrApiKey}
>
{isTesting ? (
<>
<Icon name="spinner" size="sm" className="animate-spin -ml-1 mr-2" />
{t("settings.testing")}
</>
) : (
<>
<Icon name="refresh" size="sm" className="mr-2" />
{t("settings.testConnection")}
</>
)}
</Button>
{testResult && (
<span className={`text-sm font-medium ${testResult.success ? "text-success" : "text-destructive"}`}>
{testResult.message}
</span>
)}
</div>
</div>
</CardContent>
</Card>
);
}

View File

@@ -0,0 +1,125 @@
"use client";
import { useState, useEffect } from "react";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, Icon } from "@/app/components/ui";
import { useTranslation } from "@/lib/i18n/context";
export function QBittorrentCard({ handleUpdateSetting }: { handleUpdateSetting: (key: string, value: unknown) => Promise<void> }) {
const { t } = useTranslation();
const [qbUrl, setQbUrl] = useState("");
const [qbUsername, setQbUsername] = useState("");
const [qbPassword, setQbPassword] = useState("");
const [isTesting, setIsTesting] = useState(false);
const [testResult, setTestResult] = useState<{ success: boolean; message: string } | null>(null);
useEffect(() => {
fetch("/api/settings/qbittorrent")
.then((r) => (r.ok ? r.json() : null))
.then((data) => {
if (data) {
if (data.url) setQbUrl(data.url);
if (data.username) setQbUsername(data.username);
if (data.password) setQbPassword(data.password);
}
})
.catch(() => {});
}, []);
function saveQbittorrent() {
handleUpdateSetting("qbittorrent", {
url: qbUrl,
username: qbUsername,
password: qbPassword,
});
}
async function handleTestConnection() {
setIsTesting(true);
setTestResult(null);
try {
const resp = await fetch("/api/qbittorrent/test");
const data = await resp.json();
if (data.error) {
setTestResult({ success: false, message: data.error });
} else {
setTestResult(data);
}
} catch {
setTestResult({ success: false, message: "Failed to connect" });
} finally {
setIsTesting(false);
}
}
return (
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="settings" size="md" />
{t("settings.qbittorrent")}
</CardTitle>
<CardDescription>{t("settings.qbittorrentDesc")}</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
<div className="flex gap-4">
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.qbittorrentUrl")}</label>
<FormInput
type="url"
placeholder={t("settings.qbittorrentUrlPlaceholder")}
value={qbUrl}
onChange={(e) => setQbUrl(e.target.value)}
onBlur={() => saveQbittorrent()}
/>
</FormField>
</div>
<div className="flex gap-4">
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.qbittorrentUsername")}</label>
<FormInput
type="text"
value={qbUsername}
onChange={(e) => setQbUsername(e.target.value)}
onBlur={() => saveQbittorrent()}
/>
</FormField>
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.qbittorrentPassword")}</label>
<FormInput
type="password" autoComplete="off"
value={qbPassword}
onChange={(e) => setQbPassword(e.target.value)}
onBlur={() => saveQbittorrent()}
/>
</FormField>
</div>
<div className="flex items-center gap-3">
<Button
onClick={handleTestConnection}
disabled={isTesting || !qbUrl || !qbUsername}
>
{isTesting ? (
<>
<Icon name="spinner" size="sm" className="animate-spin -ml-1 mr-2" />
{t("settings.testing")}
</>
) : (
<>
<Icon name="refresh" size="sm" className="mr-2" />
{t("settings.testConnection")}
</>
)}
</Button>
{testResult && (
<span className={`text-sm font-medium ${testResult.success ? "text-success" : "text-destructive"}`}>
{testResult.message}
</span>
)}
</div>
</div>
</CardContent>
</Card>
);
}

View File

@@ -0,0 +1,228 @@
"use client";
import { useState, useEffect, useCallback, useMemo } from "react";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, FormSelect, Icon } from "@/app/components/ui";
import { StatusMappingDto } from "@/lib/api";
import { useTranslation } from "@/lib/i18n/context";
export function StatusMappingsCard() {
const { t } = useTranslation();
const [mappings, setMappings] = useState<StatusMappingDto[]>([]);
const [targetStatuses, setTargetStatuses] = useState<string[]>([]);
const [providerStatuses, setProviderStatuses] = useState<string[]>([]);
const [newTargetName, setNewTargetName] = useState("");
const [loading, setLoading] = useState(true);
const loadData = useCallback(async () => {
try {
const [mRes, sRes, pRes] = await Promise.all([
fetch("/api/settings/status-mappings").then((r) => r.ok ? r.json() : []),
fetch("/api/series/statuses").then((r) => r.ok ? r.json() : []),
fetch("/api/series/provider-statuses").then((r) => r.ok ? r.json() : []),
]);
setMappings(mRes);
setTargetStatuses(sRes);
setProviderStatuses(pRes);
} catch {
// ignore
} finally {
setLoading(false);
}
}, []);
useEffect(() => { loadData(); }, [loadData]);
// Group mappings by target status (only those with a non-null mapped_status)
const grouped = useMemo(() => {
const map = new Map<string, StatusMappingDto[]>();
for (const m of mappings) {
if (m.mapped_status) {
const list = map.get(m.mapped_status) || [];
list.push(m);
map.set(m.mapped_status, list);
}
}
return map;
}, [mappings]);
// Unmapped = mappings with null mapped_status + provider statuses not in status_mappings at all
const knownProviderStatuses = useMemo(
() => new Set(mappings.map((m) => m.provider_status)),
[mappings],
);
const unmappedMappings = useMemo(
() => mappings.filter((m) => !m.mapped_status),
[mappings],
);
const newProviderStatuses = useMemo(
() => providerStatuses.filter((ps) => !knownProviderStatuses.has(ps)),
[providerStatuses, knownProviderStatuses],
);
// All possible targets = existing statuses from DB + custom ones added locally
const [customTargets, setCustomTargets] = useState<string[]>([]);
const allTargets = useMemo(() => {
const set = new Set([...targetStatuses, ...customTargets]);
return [...set].sort();
}, [targetStatuses, customTargets]);
async function handleAssign(providerStatus: string, targetStatus: string) {
if (!providerStatus || !targetStatus) return;
try {
const res = await fetch("/api/settings/status-mappings", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ provider_status: providerStatus, mapped_status: targetStatus }),
});
if (res.ok) {
const created: StatusMappingDto = await res.json();
setMappings((prev) => [...prev.filter((m) => m.provider_status !== created.provider_status), created]);
}
} catch {
// ignore
}
}
async function handleUnmap(id: string) {
try {
const res = await fetch(`/api/settings/status-mappings/${id}`, { method: "DELETE" });
if (res.ok) {
const updated: StatusMappingDto = await res.json();
setMappings((prev) => prev.map((m) => (m.id === id ? updated : m)));
}
} catch {
// ignore
}
}
function handleCreateTarget() {
const name = newTargetName.trim().toLowerCase();
if (!name || allTargets.includes(name)) return;
setCustomTargets((prev) => [...prev, name]);
setNewTargetName("");
}
function statusLabel(status: string) {
const key = `seriesStatus.${status}` as Parameters<typeof t>[0];
const translated = t(key);
return translated !== key ? translated : status;
}
if (loading) {
return (
<Card className="mb-6">
<CardContent><p className="text-muted-foreground py-4">{t("common.loading")}</p></CardContent>
</Card>
);
}
return (
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="settings" size="md" />
{t("settings.statusMappings")}
</CardTitle>
<CardDescription>{t("settings.statusMappingsDesc")}</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
{/* Create new target status */}
<div className="flex gap-2 items-center">
<FormInput
placeholder={t("settings.newTargetPlaceholder")}
value={newTargetName}
onChange={(e) => setNewTargetName(e.target.value)}
onKeyDown={(e) => { if (e.key === "Enter") handleCreateTarget(); }}
className="max-w-[250px]"
/>
<Button
onClick={handleCreateTarget}
disabled={!newTargetName.trim() || allTargets.includes(newTargetName.trim().toLowerCase())}
>
<Icon name="plus" size="sm" />
{t("settings.createTargetStatus")}
</Button>
</div>
{/* Grouped by target status */}
{allTargets.map((target) => {
const items = grouped.get(target) || [];
return (
<div key={target} className="border border-border/50 rounded-lg p-3">
<div className="flex items-center gap-2 mb-2">
<span className="text-sm font-medium text-foreground">
{statusLabel(target)}
</span>
<span className="text-xs text-muted-foreground font-mono">({target})</span>
</div>
<div className="flex flex-wrap gap-2">
{items.map((m) => (
<span
key={m.id}
className="inline-flex items-center gap-1 px-2 py-1 rounded-md bg-muted/50 text-sm font-mono"
>
{m.provider_status}
<button
type="button"
onClick={() => handleUnmap(m.id)}
className="ml-1 text-muted-foreground hover:text-destructive transition-colors"
title={t("common.delete")}
>
<Icon name="x" size="sm" />
</button>
</span>
))}
{items.length === 0 && (
<span className="text-xs text-muted-foreground italic">{t("settings.noMappings")}</span>
)}
</div>
</div>
);
})}
{/* Unmapped provider statuses (null mapped_status + brand new from providers) */}
{(unmappedMappings.length > 0 || newProviderStatuses.length > 0) && (
<div className="border-t border-border/50 pt-4">
<h4 className="text-sm font-medium text-foreground mb-3">{t("settings.unmappedSection")}</h4>
<div className="space-y-2">
{unmappedMappings.map((m) => (
<div key={m.id} className="flex items-center gap-2">
<span className="text-sm font-mono bg-muted/50 px-2 py-1 rounded-md min-w-[120px]">{m.provider_status}</span>
<Icon name="chevronRight" size="sm" />
<FormSelect
className="w-auto"
value=""
onChange={(e) => { if (e.target.value) handleAssign(m.provider_status, e.target.value); }}
>
<option value="">{t("settings.selectTargetStatus")}</option>
{allTargets.map((s) => (
<option key={s} value={s}>{statusLabel(s)}</option>
))}
</FormSelect>
</div>
))}
{newProviderStatuses.map((ps) => (
<div key={ps} className="flex items-center gap-2">
<span className="text-sm font-mono bg-muted/50 px-2 py-1 rounded-md min-w-[120px]">{ps}</span>
<Icon name="chevronRight" size="sm" />
<FormSelect
className="w-auto"
value=""
onChange={(e) => { if (e.target.value) handleAssign(ps, e.target.value); }}
>
<option value="">{t("settings.selectTargetStatus")}</option>
{allTargets.map((s) => (
<option key={s} value={s}>{statusLabel(s)}</option>
))}
</FormSelect>
</div>
))}
</div>
</div>
)}
</div>
</CardContent>
</Card>
);
}

View File

@@ -0,0 +1,276 @@
"use client";
import { useState, useEffect } from "react";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, Icon } from "@/app/components/ui";
import { useTranslation } from "@/lib/i18n/context";
export const DEFAULT_EVENTS = {
scan_completed: true,
scan_failed: true,
scan_cancelled: true,
thumbnail_completed: true,
thumbnail_failed: true,
conversion_completed: true,
conversion_failed: true,
metadata_approved: true,
metadata_batch_completed: true,
metadata_batch_failed: true,
metadata_refresh_completed: true,
metadata_refresh_failed: true,
reading_status_match_completed: true,
reading_status_match_failed: true,
reading_status_push_completed: true,
reading_status_push_failed: true,
download_detection_completed: true,
download_detection_failed: true,
};
export function TelegramCard({ handleUpdateSetting }: { handleUpdateSetting: (key: string, value: unknown) => Promise<void> }) {
const { t } = useTranslation();
const [botToken, setBotToken] = useState("");
const [chatId, setChatId] = useState("");
const [enabled, setEnabled] = useState(false);
const [events, setEvents] = useState(DEFAULT_EVENTS);
const [isTesting, setIsTesting] = useState(false);
const [testResult, setTestResult] = useState<{ success: boolean; message: string } | null>(null);
const [showHelp, setShowHelp] = useState(false);
useEffect(() => {
fetch("/api/settings/telegram")
.then((r) => (r.ok ? r.json() : null))
.then((data) => {
if (data) {
if (data.bot_token) setBotToken(data.bot_token);
if (data.chat_id) setChatId(data.chat_id);
if (data.enabled !== undefined) setEnabled(data.enabled);
if (data.events) setEvents({ ...DEFAULT_EVENTS, ...data.events });
}
})
.catch(() => {});
}, []);
function saveTelegram(token?: string, chat?: string, en?: boolean, ev?: typeof events) {
handleUpdateSetting("telegram", {
bot_token: token ?? botToken,
chat_id: chat ?? chatId,
enabled: en ?? enabled,
events: ev ?? events,
});
}
async function handleTestConnection() {
setIsTesting(true);
setTestResult(null);
try {
const resp = await fetch("/api/telegram/test");
const data = await resp.json();
if (data.error) {
setTestResult({ success: false, message: data.error });
} else {
setTestResult(data);
}
} catch {
setTestResult({ success: false, message: "Failed to connect" });
} finally {
setIsTesting(false);
}
}
return (
<Card className="mb-6">
<CardHeader>
<CardTitle className="flex items-center gap-2">
<Icon name="bell" size="md" />
{t("settings.telegram")}
</CardTitle>
<CardDescription>{t("settings.telegramDesc")}</CardDescription>
</CardHeader>
<CardContent>
<div className="space-y-4">
{/* Setup guide */}
<div>
<button
type="button"
onClick={() => setShowHelp(!showHelp)}
className="text-sm text-primary hover:text-primary/80 flex items-center gap-1 transition-colors"
>
<Icon name={showHelp ? "chevronDown" : "chevronRight"} size="sm" />
{t("settings.telegramHelp")}
</button>
{showHelp && (
<div className="mt-3 p-4 rounded-lg bg-muted/30 space-y-3 text-sm text-foreground">
<div>
<p className="font-medium mb-1">1. Bot Token</p>
<p className="text-muted-foreground" dangerouslySetInnerHTML={{ __html: t("settings.telegramHelpBot") }} />
</div>
<div>
<p className="font-medium mb-1">2. Chat ID</p>
<p className="text-muted-foreground" dangerouslySetInnerHTML={{ __html: t("settings.telegramHelpChat") }} />
</div>
<div>
<p className="font-medium mb-1">3. Group chat</p>
<p className="text-muted-foreground" dangerouslySetInnerHTML={{ __html: t("settings.telegramHelpGroup") }} />
</div>
</div>
)}
</div>
<div className="flex items-center gap-3">
<label className="relative inline-flex items-center cursor-pointer">
<input
type="checkbox"
checked={enabled}
onChange={(e) => {
setEnabled(e.target.checked);
saveTelegram(undefined, undefined, e.target.checked);
}}
className="sr-only peer"
/>
<div className="w-11 h-6 bg-muted rounded-full peer peer-checked:after:translate-x-full peer-checked:after:border-white after:content-[''] after:absolute after:top-[2px] after:left-[2px] after:bg-white after:border-gray-300 after:border after:rounded-full after:h-5 after:w-5 after:transition-all peer-checked:bg-primary"></div>
</label>
<span className="text-sm font-medium text-foreground">{t("settings.telegramEnabled")}</span>
</div>
<div className="flex gap-4">
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.botToken")}</label>
<FormInput
type="password" autoComplete="off"
placeholder={t("settings.botTokenPlaceholder")}
value={botToken}
onChange={(e) => setBotToken(e.target.value)}
onBlur={() => saveTelegram()}
/>
</FormField>
</div>
<div className="flex gap-4">
<FormField className="flex-1">
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.chatId")}</label>
<FormInput
type="text"
placeholder={t("settings.chatIdPlaceholder")}
value={chatId}
onChange={(e) => setChatId(e.target.value)}
onBlur={() => saveTelegram()}
/>
</FormField>
</div>
{/* Event toggles grouped by category */}
<div className="border-t border-border/50 pt-4">
<h4 className="text-sm font-medium text-foreground mb-4">{t("settings.telegramEvents")}</h4>
<div className="grid grid-cols-2 gap-x-6 gap-y-5">
{([
{
category: t("settings.eventCategoryScan"),
icon: "search" as const,
items: [
{ key: "scan_completed" as const, label: t("settings.eventCompleted") },
{ key: "scan_failed" as const, label: t("settings.eventFailed") },
{ key: "scan_cancelled" as const, label: t("settings.eventCancelled") },
],
},
{
category: t("settings.eventCategoryThumbnail"),
icon: "image" as const,
items: [
{ key: "thumbnail_completed" as const, label: t("settings.eventCompleted") },
{ key: "thumbnail_failed" as const, label: t("settings.eventFailed") },
],
},
{
category: t("settings.eventCategoryConversion"),
icon: "refresh" as const,
items: [
{ key: "conversion_completed" as const, label: t("settings.eventCompleted") },
{ key: "conversion_failed" as const, label: t("settings.eventFailed") },
],
},
{
category: t("settings.eventCategoryMetadata"),
icon: "tag" as const,
items: [
{ key: "metadata_approved" as const, label: t("settings.eventLinked") },
{ key: "metadata_batch_completed" as const, label: t("settings.eventBatchCompleted") },
{ key: "metadata_batch_failed" as const, label: t("settings.eventBatchFailed") },
{ key: "metadata_refresh_completed" as const, label: t("settings.eventRefreshCompleted") },
{ key: "metadata_refresh_failed" as const, label: t("settings.eventRefreshFailed") },
],
},
{
category: t("settings.eventCategoryReadingStatus"),
icon: "books" as const,
items: [
{ key: "reading_status_match_completed" as const, label: t("settings.eventMatchCompleted") },
{ key: "reading_status_match_failed" as const, label: t("settings.eventMatchFailed") },
{ key: "reading_status_push_completed" as const, label: t("settings.eventPushCompleted") },
{ key: "reading_status_push_failed" as const, label: t("settings.eventPushFailed") },
],
},
{
category: t("settings.eventCategoryDownloadDetection"),
icon: "download" as const,
items: [
{ key: "download_detection_completed" as const, label: t("settings.eventCompleted") },
{ key: "download_detection_failed" as const, label: t("settings.eventFailed") },
],
},
]).map(({ category, icon, items }) => (
<div key={category}>
<p className="text-xs font-medium text-muted-foreground uppercase tracking-wide mb-2 flex items-center gap-1.5">
<Icon name={icon} size="sm" className="text-muted-foreground" />
{category}
</p>
<div className="space-y-1">
{items.map(({ key, label }) => (
<label key={key} className="flex items-center justify-between py-1.5 cursor-pointer group">
<span className="text-sm text-foreground group-hover:text-foreground/80">{label}</span>
<div className="relative">
<input
type="checkbox"
checked={events[key]}
onChange={(e) => {
const updated = { ...events, [key]: e.target.checked };
setEvents(updated);
saveTelegram(undefined, undefined, undefined, updated);
}}
className="sr-only peer"
/>
<div className="w-9 h-5 bg-muted rounded-full peer peer-checked:after:translate-x-full peer-checked:after:border-white after:content-[''] after:absolute after:top-[2px] after:left-[2px] after:bg-white after:border-gray-300 after:border after:rounded-full after:h-4 after:w-4 after:transition-all peer-checked:bg-primary" />
</div>
</label>
))}
</div>
</div>
))}
</div>
</div>
<div className="flex items-center gap-3">
<Button
onClick={handleTestConnection}
disabled={isTesting || !botToken || !chatId || !enabled}
>
{isTesting ? (
<>
<Icon name="spinner" size="sm" className="animate-spin -ml-1 mr-2" />
{t("settings.testing")}
</>
) : (
<>
<Icon name="refresh" size="sm" className="mr-2" />
{t("settings.testConnection")}
</>
)}
</Button>
{testResult && (
<span className={`text-sm font-medium ${testResult.success ? "text-success" : "text-destructive"}`}>
{testResult.message}
</span>
)}
</div>
</div>
</CardContent>
</Card>
);
}

View File

@@ -1,9 +1,10 @@
import { getSettings, getCacheStats, getThumbnailStats } from "../../lib/api";
import { getSettings, getCacheStats, getThumbnailStats, fetchUsers } from "@/lib/api";
import SettingsPage from "./SettingsPage";
export const dynamic = "force-dynamic";
export default async function SettingsPageWrapper() {
export default async function SettingsPageWrapper({ searchParams }: { searchParams: Promise<{ tab?: string }> }) {
const { tab } = await searchParams;
const settings = await getSettings().catch(() => ({
image_processing: { format: "webp", quality: 85, filter: "lanczos3", max_width: 2160 },
cache: { enabled: true, directory: "/tmp/stripstream-image-cache", max_size_mb: 10000 },
@@ -23,5 +24,7 @@ export default async function SettingsPageWrapper() {
directory: "/data/thumbnails"
}));
return <SettingsPage initialSettings={settings} initialCacheStats={cacheStats} initialThumbnailStats={thumbnailStats} />;
const users = await fetchUsers().catch(() => []);
return <SettingsPage initialSettings={settings} initialCacheStats={cacheStats} initialThumbnailStats={thumbnailStats} users={users} initialTab={tab} />;
}

View File

@@ -0,0 +1,316 @@
import { revalidatePath } from "next/cache";
import { redirect } from "next/navigation";
import { listTokens, createToken, revokeToken, deleteToken, updateToken, fetchUsers, createUser, deleteUser, updateUser, TokenDto, UserDto } from "@/lib/api";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, Badge, FormField, FormInput, FormSelect, FormRow } from "@/app/components/ui";
import { TokenUserSelect } from "@/app/components/TokenUserSelect";
import { UsernameEdit } from "@/app/components/UsernameEdit";
import { getServerTranslations } from "@/lib/i18n/server";
export const dynamic = "force-dynamic";
export default async function TokensPage({
searchParams
}: {
searchParams: Promise<{ created?: string }>;
}) {
const { t } = await getServerTranslations();
const params = await searchParams;
const tokens = await listTokens().catch(() => [] as TokenDto[]);
const users = await fetchUsers().catch(() => [] as UserDto[]);
async function createTokenAction(formData: FormData) {
"use server";
const name = formData.get("name") as string;
const scope = formData.get("scope") as string;
const userId = (formData.get("user_id") as string) || undefined;
if (name) {
const result = await createToken(name, scope, userId);
revalidatePath("/tokens");
redirect(`/tokens?created=${encodeURIComponent(result.token)}`);
}
}
async function revokeTokenAction(formData: FormData) {
"use server";
const id = formData.get("id") as string;
await revokeToken(id);
revalidatePath("/tokens");
}
async function deleteTokenAction(formData: FormData) {
"use server";
const id = formData.get("id") as string;
await deleteToken(id);
revalidatePath("/tokens");
}
async function createUserAction(formData: FormData) {
"use server";
const username = formData.get("username") as string;
if (username) {
await createUser(username);
revalidatePath("/tokens");
}
}
async function deleteUserAction(formData: FormData) {
"use server";
const id = formData.get("id") as string;
await deleteUser(id);
revalidatePath("/tokens");
}
async function renameUserAction(formData: FormData) {
"use server";
const id = formData.get("id") as string;
const username = formData.get("username") as string;
if (username?.trim()) {
await updateUser(id, username.trim());
revalidatePath("/tokens");
}
}
async function reassignTokenAction(formData: FormData) {
"use server";
const id = formData.get("id") as string;
const userId = (formData.get("user_id") as string) || null;
await updateToken(id, userId);
revalidatePath("/tokens");
}
return (
<>
<div className="mb-6">
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
<svg className="w-8 h-8 text-destructive" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M15 7a2 2 0 012 2m4 0a6 6 0 01-7.743 5.743L11 17H9v2H7v2H4a1 1 0 01-1-1v-2.586a1 1 0 01.293-.707l5.964-5.964A6 6 0 1121 9z" />
</svg>
{t("tokens.title")}
</h1>
</div>
{/* ── Lecteurs ─────────────────────────────────────────── */}
<div className="mb-2">
<h2 className="text-xl font-semibold text-foreground">{t("users.title")}</h2>
</div>
<Card className="mb-6">
<CardHeader>
<CardTitle>{t("users.createNew")}</CardTitle>
<CardDescription>{t("users.createDescription")}</CardDescription>
</CardHeader>
<CardContent>
<form action={createUserAction}>
<FormRow>
<FormField className="flex-1 min-w-48">
<FormInput name="username" placeholder={t("users.username")} required autoComplete="off" />
</FormField>
<Button type="submit">{t("users.createButton")}</Button>
</FormRow>
</form>
</CardContent>
</Card>
<Card className="overflow-hidden mb-10">
<div className="overflow-x-auto">
<table className="w-full">
<thead>
<tr className="border-b border-border/60 bg-muted/50">
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("users.name")}</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("users.tokenCount")}</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("status.read")}</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("status.reading")}</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("users.createdAt")}</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("users.actions")}</th>
</tr>
</thead>
<tbody className="divide-y divide-border/60">
{/* Ligne admin synthétique */}
<tr className="hover:bg-accent/50 transition-colors bg-destructive/5">
<td className="px-4 py-3 text-sm font-medium text-foreground flex items-center gap-2">
{process.env.ADMIN_USERNAME ?? "admin"}
<Badge variant="destructive">{t("tokens.scopeAdmin")}</Badge>
</td>
<td className="px-4 py-3 text-sm text-muted-foreground">
{tokens.filter(tok => tok.scope === "admin" && !tok.revoked_at).length}
</td>
<td className="px-4 py-3 text-sm text-muted-foreground/50"></td>
<td className="px-4 py-3 text-sm text-muted-foreground/50"></td>
<td className="px-4 py-3 text-sm text-muted-foreground/50"></td>
<td className="px-4 py-3 text-sm text-muted-foreground/50"></td>
</tr>
{/* Ligne tokens read non assignés */}
{(() => {
const unassigned = tokens.filter(tok => tok.scope === "read" && !tok.user_id && !tok.revoked_at);
if (unassigned.length === 0) return null;
return (
<tr className="hover:bg-accent/50 transition-colors bg-warning/5">
<td className="px-4 py-3 text-sm font-medium text-muted-foreground italic">
{t("tokens.noUser")}
</td>
<td className="px-4 py-3 text-sm text-warning font-medium">{unassigned.length}</td>
<td className="px-4 py-3 text-sm text-muted-foreground/50"></td>
<td className="px-4 py-3 text-sm text-muted-foreground/50"></td>
<td className="px-4 py-3 text-sm text-muted-foreground/50"></td>
<td className="px-4 py-3 text-sm text-muted-foreground/50"></td>
</tr>
);
})()}
{users.map((user) => (
<tr key={user.id} className="hover:bg-accent/50 transition-colors">
<td className="px-4 py-3">
<UsernameEdit userId={user.id} currentUsername={user.username} action={renameUserAction} />
</td>
<td className="px-4 py-3 text-sm text-muted-foreground">{user.token_count}</td>
<td className="px-4 py-3 text-sm">
{user.books_read > 0
? <span className="font-medium text-success">{user.books_read}</span>
: <span className="text-muted-foreground/50"></span>}
</td>
<td className="px-4 py-3 text-sm">
{user.books_reading > 0
? <span className="font-medium text-amber-500">{user.books_reading}</span>
: <span className="text-muted-foreground/50"></span>}
</td>
<td className="px-4 py-3 text-sm text-muted-foreground">
{new Date(user.created_at).toLocaleDateString()}
</td>
<td className="px-4 py-3">
<form action={deleteUserAction}>
<input type="hidden" name="id" value={user.id} />
<Button type="submit" variant="destructive" size="xs">
<svg className="w-3.5 h-3.5 mr-1.5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
</svg>
{t("common.delete")}
</Button>
</form>
</td>
</tr>
))}
</tbody>
</table>
</div>
</Card>
{/* ── Tokens API ───────────────────────────────────────── */}
<div className="mb-2">
<h2 className="text-xl font-semibold text-foreground">{t("tokens.apiTokens")}</h2>
</div>
{params.created ? (
<Card className="mb-6 border-success/50 bg-success/5">
<CardHeader>
<CardTitle className="text-success">{t("tokens.created")}</CardTitle>
<CardDescription>{t("tokens.createdDescription")}</CardDescription>
</CardHeader>
<CardContent>
<pre className="p-4 bg-background rounded-lg text-sm font-mono text-foreground overflow-x-auto border">{params.created}</pre>
</CardContent>
</Card>
) : null}
<Card className="mb-6">
<CardHeader>
<CardTitle>{t("tokens.createNew")}</CardTitle>
<CardDescription>{t("tokens.createDescription")}</CardDescription>
</CardHeader>
<CardContent>
<form action={createTokenAction}>
<FormRow>
<FormField className="flex-1 min-w-48">
<FormInput name="name" placeholder={t("tokens.tokenName")} required autoComplete="off" />
</FormField>
<FormField className="w-32">
<FormSelect name="scope" defaultValue="read">
<option value="read">{t("tokens.scopeRead")}</option>
<option value="admin">{t("tokens.scopeAdmin")}</option>
</FormSelect>
</FormField>
<FormField className="w-48">
<FormSelect name="user_id" defaultValue="">
<option value="">{t("tokens.noUser")}</option>
{users.map((user) => (
<option key={user.id} value={user.id}>{user.username}</option>
))}
</FormSelect>
</FormField>
<Button type="submit">{t("tokens.createButton")}</Button>
</FormRow>
</form>
</CardContent>
</Card>
<Card className="overflow-hidden">
<div className="overflow-x-auto">
<table className="w-full">
<thead>
<tr className="border-b border-border/60 bg-muted/50">
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.name")}</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.user")}</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.scope")}</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.prefix")}</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.status")}</th>
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.actions")}</th>
</tr>
</thead>
<tbody className="divide-y divide-border/60">
{tokens.map((token) => (
<tr key={token.id} className="hover:bg-accent/50 transition-colors">
<td className="px-4 py-3 text-sm text-foreground">{token.name}</td>
<td className="px-4 py-3 text-sm">
<TokenUserSelect
tokenId={token.id}
currentUserId={token.user_id}
users={users}
action={reassignTokenAction}
noUserLabel={t("tokens.noUser")}
/>
</td>
<td className="px-4 py-3 text-sm">
<Badge variant={token.scope === "admin" ? "destructive" : "secondary"}>
{token.scope}
</Badge>
</td>
<td className="px-4 py-3 text-sm">
<code className="px-2 py-1 bg-muted rounded font-mono text-foreground">{token.prefix}</code>
</td>
<td className="px-4 py-3 text-sm">
{token.revoked_at ? (
<Badge variant="error">{t("tokens.revoked")}</Badge>
) : (
<Badge variant="success">{t("tokens.active")}</Badge>
)}
</td>
<td className="px-4 py-3">
{!token.revoked_at ? (
<form action={revokeTokenAction}>
<input type="hidden" name="id" value={token.id} />
<Button type="submit" variant="destructive" size="xs">
<svg className="w-3.5 h-3.5 mr-1.5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z" />
</svg>
{t("tokens.revoke")}
</Button>
</form>
) : (
<form action={deleteTokenAction}>
<input type="hidden" name="id" value={token.id} />
<Button type="submit" variant="destructive" size="xs">
<svg className="w-3.5 h-3.5 mr-1.5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
</svg>
{t("common.delete")}
</Button>
</form>
)}
</td>
</tr>
))}
</tbody>
</table>
</div>
</Card>
</>
);
}

View File

@@ -0,0 +1,20 @@
import { NextResponse, NextRequest } from "next/server";
import { apiFetch } from "@/lib/api";
export async function PATCH(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> },
) {
try {
const { id } = await params;
const body = await request.json();
const data = await apiFetch(`/anilist/libraries/${id}`, {
method: "PATCH",
body: JSON.stringify(body),
});
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to update library AniList setting";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,12 @@
import { NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function GET() {
try {
const data = await apiFetch("/anilist/links");
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to fetch AniList links";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,12 @@
import { NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function POST() {
try {
const data = await apiFetch("/anilist/pull", { method: "POST", body: "{}" });
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to pull from AniList";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,16 @@
import { NextResponse, NextRequest } from "next/server";
import { apiFetch } from "@/lib/api";
export async function POST(request: NextRequest) {
try {
const body = await request.json();
const data = await apiFetch("/anilist/search", {
method: "POST",
body: JSON.stringify(body),
});
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to search AniList";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,46 @@
import { NextResponse, NextRequest } from "next/server";
import { apiFetch } from "@/lib/api";
type Params = Promise<{ libraryId: string; seriesName: string }>;
export async function GET(request: NextRequest, { params }: { params: Params }) {
try {
const { libraryId, seriesName } = await params;
const data = await apiFetch(
`/anilist/series/${libraryId}/${encodeURIComponent(seriesName)}`,
);
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Not found";
return NextResponse.json({ error: message }, { status: 404 });
}
}
export async function POST(request: NextRequest, { params }: { params: Params }) {
try {
const { libraryId, seriesName } = await params;
const body = await request.json();
const data = await apiFetch(
`/anilist/series/${libraryId}/${encodeURIComponent(seriesName)}/link`,
{ method: "POST", body: JSON.stringify(body) },
);
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to link series";
return NextResponse.json({ error: message }, { status: 500 });
}
}
export async function DELETE(request: NextRequest, { params }: { params: Params }) {
try {
const { libraryId, seriesName } = await params;
const data = await apiFetch(
`/anilist/series/${libraryId}/${encodeURIComponent(seriesName)}/unlink`,
{ method: "DELETE" },
);
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to unlink series";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,12 @@
import { NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function GET() {
try {
const data = await apiFetch("/anilist/status");
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to get AniList status";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,12 @@
import { NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function GET() {
try {
const data = await apiFetch("/anilist/sync/preview");
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to preview sync";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,12 @@
import { NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function POST() {
try {
const data = await apiFetch("/anilist/sync", { method: "POST", body: "{}" });
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to sync to AniList";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,12 @@
import { NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function GET() {
try {
const data = await apiFetch("/anilist/unlinked");
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to fetch unlinked series";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,31 @@
import { NextRequest, NextResponse } from "next/server";
import { createSessionToken, SESSION_COOKIE } from "@/lib/session";
export async function POST(req: NextRequest) {
const body = await req.json().catch(() => null);
if (!body || typeof body.username !== "string" || typeof body.password !== "string") {
return NextResponse.json({ error: "Invalid request" }, { status: 400 });
}
const expectedUsername = process.env.ADMIN_USERNAME || "admin";
const expectedPassword = process.env.ADMIN_PASSWORD;
if (!expectedPassword) {
return NextResponse.json({ error: "Server misconfiguration" }, { status: 500 });
}
if (body.username !== expectedUsername || body.password !== expectedPassword) {
return NextResponse.json({ error: "Invalid credentials" }, { status: 401 });
}
const token = await createSessionToken();
const response = NextResponse.json({ success: true });
response.cookies.set(SESSION_COOKIE, token, {
httpOnly: true,
secure: process.env.NODE_ENV === "production",
sameSite: "lax",
maxAge: 7 * 24 * 60 * 60,
path: "/",
});
return response;
}

View File

@@ -0,0 +1,8 @@
import { NextResponse } from "next/server";
import { SESSION_COOKIE } from "@/lib/session";
export async function POST() {
const response = NextResponse.json({ success: true });
response.cookies.delete(SESSION_COOKIE);
return response;
}

View File

@@ -28,12 +28,9 @@ export async function GET(
});
}
// Récupérer le content-type et les données
const contentType = response.headers.get("content-type") || "image/webp";
const imageBuffer = await response.arrayBuffer();
// Retourner l'image avec le bon content-type
return new NextResponse(imageBuffer, {
return new NextResponse(response.body, {
headers: {
"Content-Type": contentType,
"Cache-Control": "public, max-age=300",

View File

@@ -9,10 +9,25 @@ export async function GET(
try {
const { baseUrl, token } = config();
const ifNoneMatch = request.headers.get("if-none-match");
const fetchHeaders: Record<string, string> = {
Authorization: `Bearer ${token}`,
};
if (ifNoneMatch) {
fetchHeaders["If-None-Match"] = ifNoneMatch;
}
const response = await fetch(`${baseUrl}/books/${bookId}/thumbnail`, {
headers: { Authorization: `Bearer ${token}` },
headers: fetchHeaders,
next: { revalidate: 86400 },
});
// Forward 304 Not Modified as-is
if (response.status === 304) {
return new NextResponse(null, { status: 304 });
}
if (!response.ok) {
return new NextResponse(`Failed to fetch thumbnail: ${response.status}`, {
status: response.status
@@ -20,14 +35,17 @@ export async function GET(
}
const contentType = response.headers.get("content-type") || "image/webp";
const imageBuffer = await response.arrayBuffer();
const etag = response.headers.get("etag");
return new NextResponse(imageBuffer, {
headers: {
const headers: Record<string, string> = {
"Content-Type": contentType,
"Cache-Control": "public, max-age=31536000, immutable",
},
});
};
if (etag) {
headers["ETag"] = etag;
}
return new NextResponse(response.body, { headers });
} catch (error) {
console.error("Error fetching thumbnail:", error);
return new NextResponse("Failed to fetch thumbnail", { status: 500 });

View File

@@ -0,0 +1,47 @@
import { NextRequest, NextResponse } from "next/server";
import { apiFetch, IndexJobDto, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, startMetadataRefresh, startReadingStatusMatch, startReadingStatusPush, startDownloadDetection } from "@/lib/api";
export async function POST(
_request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const { id } = await params;
try {
const job = await apiFetch<IndexJobDto>(`/index/jobs/${id}`);
const libraryId = job.library_id ?? undefined;
switch (job.type) {
case "rebuild":
return NextResponse.json(await rebuildIndex(libraryId));
case "full_rebuild":
return NextResponse.json(await rebuildIndex(libraryId, true));
case "rescan":
return NextResponse.json(await rebuildIndex(libraryId, false, true));
case "scan":
return NextResponse.json(await rebuildIndex(libraryId));
case "thumbnail_rebuild":
return NextResponse.json(await rebuildThumbnails(libraryId));
case "thumbnail_regenerate":
return NextResponse.json(await regenerateThumbnails(libraryId));
case "metadata_batch":
if (!libraryId) return NextResponse.json({ error: "Library ID required for metadata batch" }, { status: 400 });
return NextResponse.json(await startMetadataBatch(libraryId));
case "metadata_refresh":
if (!libraryId) return NextResponse.json({ error: "Library ID required for metadata refresh" }, { status: 400 });
return NextResponse.json(await startMetadataRefresh(libraryId));
case "reading_status_match":
if (!libraryId) return NextResponse.json({ error: "Library ID required for reading status match" }, { status: 400 });
return NextResponse.json(await startReadingStatusMatch(libraryId));
case "reading_status_push":
if (!libraryId) return NextResponse.json({ error: "Library ID required for reading status push" }, { status: 400 });
return NextResponse.json(await startReadingStatusPush(libraryId));
case "download_detection":
if (!libraryId) return NextResponse.json({ error: "Library ID required for download detection" }, { status: 400 });
return NextResponse.json(await startDownloadDetection(libraryId));
default:
return NextResponse.json({ error: `Cannot replay job type: ${job.type}` }, { status: 400 });
}
} catch (error) {
return NextResponse.json({ error: "Failed to replay job" }, { status: 500 });
}
}

View File

@@ -0,0 +1,11 @@
import { NextResponse } from "next/server";
import { listJobs } from "@/lib/api";
export async function GET() {
try {
const data = await listJobs();
return NextResponse.json(data);
} catch (error) {
return NextResponse.json({ error: "Failed to fetch jobs" }, { status: 500 });
}
}

View File

@@ -11,6 +11,7 @@ export async function GET(request: NextRequest) {
let lastData: string | null = null;
let isActive = true;
let consecutiveErrors = 0;
let intervalId: ReturnType<typeof setInterval> | null = null;
const fetchJobs = async () => {
if (!isActive) return;
@@ -25,23 +26,28 @@ export async function GET(request: NextRequest) {
const data = await response.json();
const dataStr = JSON.stringify(data);
// Send if data changed
// Send only if data changed
if (dataStr !== lastData && isActive) {
lastData = dataStr;
try {
controller.enqueue(
new TextEncoder().encode(`data: ${dataStr}\n\n`)
);
} catch (err) {
// Controller closed, ignore
} catch {
isActive = false;
}
}
// Adapt interval: 2s when active jobs exist, 15s when idle
const hasActiveJobs = data.some((j: { status: string }) =>
j.status === "running" || j.status === "pending" || j.status === "extracting_pages" || j.status === "generating_thumbnails"
);
const nextInterval = hasActiveJobs ? 2000 : 15000;
restartInterval(nextInterval);
}
} catch (error) {
if (isActive) {
consecutiveErrors++;
// Only log first failure and every 30th to avoid spam
if (consecutiveErrors === 1 || consecutiveErrors % 30 === 0) {
console.warn(`SSE fetch error (${consecutiveErrors} consecutive):`, error);
}
@@ -49,22 +55,18 @@ export async function GET(request: NextRequest) {
}
};
// Initial fetch
await fetchJobs();
const restartInterval = (ms: number) => {
if (intervalId !== null) clearInterval(intervalId);
intervalId = setInterval(fetchJobs, ms);
};
// Poll every 2 seconds
const interval = setInterval(async () => {
if (!isActive) {
clearInterval(interval);
return;
}
// Initial fetch + start polling
await fetchJobs();
}, 2000);
// Cleanup
request.signal.addEventListener("abort", () => {
isActive = false;
clearInterval(interval);
if (intervalId !== null) clearInterval(intervalId);
controller.close();
});
},

View File

@@ -1,3 +1,4 @@
import { revalidatePath } from "next/cache";
import { NextRequest, NextResponse } from "next/server";
import { apiFetch, LibraryDto } from "@/lib/api";
@@ -12,6 +13,7 @@ export async function PATCH(
method: "PATCH",
body: JSON.stringify(body),
});
revalidatePath("/libraries");
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to update metadata provider";

View File

@@ -1,3 +1,4 @@
import { revalidatePath } from "next/cache";
import { NextRequest, NextResponse } from "next/server";
import { updateLibraryMonitoring } from "@/lib/api";
@@ -7,8 +8,9 @@ export async function PATCH(
) {
const { id } = await params;
try {
const { monitor_enabled, scan_mode, watcher_enabled } = await request.json();
const data = await updateLibraryMonitoring(id, monitor_enabled, scan_mode, watcher_enabled);
const { monitor_enabled, scan_mode, watcher_enabled, metadata_refresh_mode, download_detection_mode } = await request.json();
const data = await updateLibraryMonitoring(id, monitor_enabled, scan_mode, watcher_enabled, metadata_refresh_mode, download_detection_mode);
revalidatePath("/libraries");
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to update monitoring settings";

View File

@@ -0,0 +1,23 @@
import { revalidatePath } from "next/cache";
import { NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function PATCH(
request: Request,
{ params }: { params: Promise<{ id: string }> }
) {
const { id } = await params;
try {
const body = await request.json();
const data = await apiFetch(`/libraries/${id}/reading-status-provider`, {
method: "PATCH",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(body),
});
revalidatePath("/libraries");
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to update reading status provider";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,16 @@
import { NextResponse, NextRequest } from "next/server";
import { apiFetch } from "@/lib/api";
export async function POST(request: NextRequest) {
try {
const body = await request.json();
const data = await apiFetch("/prowlarr/search", {
method: "POST",
body: JSON.stringify(body),
});
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to search Prowlarr";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,12 @@
import { NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function GET() {
try {
const data = await apiFetch("/prowlarr/test");
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to test Prowlarr connection";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,16 @@
import { NextResponse, NextRequest } from "next/server";
import { apiFetch } from "@/lib/api";
export async function POST(request: NextRequest) {
try {
const body = await request.json();
const data = await apiFetch("/qbittorrent/add", {
method: "POST",
body: JSON.stringify(body),
});
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to add torrent";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,12 @@
import { NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function GET() {
try {
const data = await apiFetch("/qbittorrent/test");
return NextResponse.json(data);
} catch (error) {
const message = error instanceof Error ? error.message : "Failed to test qBittorrent";
return NextResponse.json({ error: message }, { status: 500 });
}
}

View File

@@ -0,0 +1,11 @@
import { NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function GET() {
try {
const data = await apiFetch<string[]>("/series/provider-statuses");
return NextResponse.json(data);
} catch {
return NextResponse.json([], { status: 200 });
}
}

View File

@@ -0,0 +1,11 @@
import { NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function GET() {
try {
const data = await apiFetch<string[]>("/series/statuses");
return NextResponse.json(data);
} catch {
return NextResponse.json([], { status: 200 });
}
}

View File

@@ -0,0 +1,17 @@
import { NextRequest, NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function DELETE(
_request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const { id } = await params;
try {
const data = await apiFetch<unknown>(`/settings/status-mappings/${id}`, {
method: "DELETE",
});
return NextResponse.json(data);
} catch {
return NextResponse.json({ error: "Failed to delete status mapping" }, { status: 500 });
}
}

Some files were not shown because too many files have changed in this diff Show More